diff --git a/.gitignore b/.gitignore
index bd4d3380b4efc83d01763bc201bfe58990d290e5..e7974642e543357ddb6160e73a1a191e2ca194f7 100644
--- a/.gitignore
+++ b/.gitignore
@@ -24,3 +24,6 @@ _citools/
 _work/
 .mypy_cache/
 .pytest_cache/
+.pixi/
+pixi.lock
+pixi.toml
diff --git a/doc/nitpick-exceptions.txt b/doc/nitpick-exceptions.txt
index 0af3c739ade938184477a2ec5f052bc59b5eec42..fbcc656d720d86ee56b2f6d086f433f90c01e203 100644
--- a/doc/nitpick-exceptions.txt
+++ b/doc/nitpick-exceptions.txt
@@ -3,7 +3,6 @@
 # SPDX-License-Identifier: BSD-3-Clause
 
 py:class conda_build.config.Config
-py:class pkg_resources.Requirement
+py:class packaging.requirements.Requirement
 py:class os._Environ
 py:class _io.StringIO
-py:class packaging.requirements.Requirement
diff --git a/src/idiap_devtools/gitlab/release.py b/src/idiap_devtools/gitlab/release.py
index e14656a2d9134d85c93fb7c181a54560a1703a67..8a485db660e064d0ccb36ea478c9dc5c1077f06d 100644
--- a/src/idiap_devtools/gitlab/release.py
+++ b/src/idiap_devtools/gitlab/release.py
@@ -12,11 +12,11 @@ from distutils.version import StrictVersion
 
 import gitlab
 import gitlab.v4.objects
+import packaging.requirements
 import packaging.version
 import tomlkit
 
 from git import Repo
-from pkg_resources import Requirement
 
 from idiap_devtools.profile import Profile
 
@@ -95,7 +95,7 @@ def _update_readme(
 
 def _pin_versions_of_packages_list(
     packages_list: list[str],
-    dependencies_versions: list[Requirement],
+    dependencies_versions: list[packaging.requirements.Requirement],
 ) -> list[str]:
     """Add its version to each package according to a dictionary of versions.
 
@@ -127,15 +127,15 @@ def _pin_versions_of_packages_list(
     # Check that there is not the same dependency twice in the pins
     seen = set()
     for d in dependencies_versions:
-        if d.key in seen:
+        if d.name in seen:
             raise NotImplementedError(
                 "Pinning with more than one specification per dependency not"
                 "supported."
             )
-        seen.add(d.key)
+        seen.add(d.name)
 
     # Make it easier to retrieve the dependency pin for each package.
-    dependencies_dict = {d.key: d for d in dependencies_versions}
+    dependencies_dict = {d.name: d for d in dependencies_versions}
 
     results = []
 
@@ -144,31 +144,31 @@ def _pin_versions_of_packages_list(
         results.append(package)
 
         # Get the dependency package version specifier if already present.
-        pkg_req = Requirement.parse(package)
+        pkg_req = packaging.requirements.Requirement(package)
 
         if pkg_req.url is not None:
             logger.warning(
                 "Ignoring dependency '%s' as it is specified with a url (%s).",
-                pkg_req.key,
+                pkg_req.name,
                 pkg_req.url,
             )
 
         # Retrieve this dependency's constraint Requirement object
-        desired_pin = dependencies_dict.get(pkg_req.key)
+        desired_pin = dependencies_dict.get(pkg_req.name)
 
         if desired_pin is None:
             logger.warning(
                 "Dependency '%s' is not available in constraints. Skipping "
                 "pinning. Consider adding this package to your dev-profile "
                 "constraints file.",
-                pkg_req.key,
+                pkg_req.name,
             )
             continue
 
         # A Requirement is composed of:
-        #   key[extras]@ url ; marker
+        #   name[extras]@ url ; marker
         # Or
-        #   key[extras]specifier; marker
+        #   name[extras]specifier; marker
         # Where extras and marker are optional
 
         # The following handles those different fields
@@ -176,29 +176,26 @@ def _pin_versions_of_packages_list(
         if desired_pin.url is not None:
             logger.info(
                 "Pinning of %s will be done with a URL (%s).",
-                pkg_req.key,
+                pkg_req.name,
                 desired_pin.url,
             )
         else:
             # Build the 'specs' field
-            if len(desired_pin.specs) == 0:
+            if not desired_pin.specifier:
                 logger.warning(
                     "Dependency '%s' has no version specifier in constraints "
                     "'%s'. Skipping pinning.",
-                    pkg_req.key,
+                    pkg_req.name,
                     desired_pin,
                 )
                 continue
 
             # If version specifiers are already present in that dependency
-            if len(pkg_req.specs) > 0:
+            if pkg_req.specifier:
                 raise ValueError(
                     f"You cannot specify a version for the dependency {pkg_req}"
                 )
-            desired_specs = desired_pin.specs
-
-            # Set the version of that dependency to the pinned one.
-            specs_str = ",".join("".join(s) for s in desired_specs)
+            specs_str = str(desired_pin.specifier)
 
         # Build the 'marker' field
         if pkg_req.marker is not None:
@@ -225,7 +222,7 @@ def _pin_versions_of_packages_list(
         if desired_pin.url is not None:
             final_str = "".join(
                 (
-                    pkg_req.key,
+                    pkg_req.name,
                     extras_str,
                     "@ ",
                     desired_pin.url,
@@ -235,11 +232,13 @@ def _pin_versions_of_packages_list(
             )
         else:
             final_str = "".join(
-                (pkg_req.key, extras_str, specs_str, marker_str)
+                (pkg_req.name, extras_str, specs_str, marker_str)
             )
 
         # Replace the package specification with the pinned version
-        packages_list[pkg_id] = str(Requirement.parse(final_str))
+        packages_list[pkg_id] = str(
+            packaging.requirements.Requirement(final_str)
+        )
         logger.debug("Package pinned: %s", packages_list[pkg_id])
 
     return packages_list
diff --git a/src/idiap_devtools/profile.py b/src/idiap_devtools/profile.py
index b242241344b342f2453797c3c754ae230a78efdd..acc9fd828a028d235055d302e3793a79d77c2261 100644
--- a/src/idiap_devtools/profile.py
+++ b/src/idiap_devtools/profile.py
@@ -6,7 +6,7 @@ import io
 import pathlib
 import typing
 
-import pkg_resources
+import packaging.requirements
 import tomli
 import xdg
 import yaml
@@ -325,11 +325,13 @@ class Profile:
             for p, v in package_pins.items()
         }
 
-    def python_constraints(self) -> list[pkg_resources.Requirement] | None:
+    def python_constraints(
+        self,
+    ) -> list[packaging.requirements.Requirement] | None:
         """Return a list of Python requirements given the current profile."""
         content = self.get_file_contents(("python", "constraints"))
 
         if content is None:
             return None
 
-        return list(pkg_resources.parse_requirements(content))
+        return [packaging.requirements.Requirement(k) for k in content.split()]
diff --git a/src/idiap_devtools/scripts/cli.py b/src/idiap_devtools/scripts/cli.py
index 55c17e9eb3997c398f260fea5836607f26f65eba..77fb9e4f451f27496bf1f5a7856a388e19b0caf3 100644
--- a/src/idiap_devtools/scripts/cli.py
+++ b/src/idiap_devtools/scripts/cli.py
@@ -8,6 +8,7 @@ from ..click import AliasedGroup
 from .env import env
 from .fullenv import fullenv
 from .gitlab import gitlab
+from .pixi import pixi
 from .update_pins import update_pins
 
 
@@ -23,4 +24,5 @@ def cli():
 cli.add_command(env)
 cli.add_command(fullenv)
 cli.add_command(gitlab)
+cli.add_command(pixi)
 cli.add_command(update_pins)
diff --git a/src/idiap_devtools/scripts/fullenv.py b/src/idiap_devtools/scripts/fullenv.py
index a39bdf21d0fb7ec2f179606844e268025d5d3983..936647873f69cede039afb383f7b1d34dac7be30 100644
--- a/src/idiap_devtools/scripts/fullenv.py
+++ b/src/idiap_devtools/scripts/fullenv.py
@@ -123,8 +123,7 @@ def fullenv(
     python_packages = [
         k
         for k in python_packages
-        if python_to_conda.get(k.project_name, k.project_name)
-        not in conda_packages
+        if python_to_conda.get(k.name, k.name) not in conda_packages
     ]
 
     data: dict[str, typing.Any] = dict(channels=["conda-forge"])
diff --git a/src/idiap_devtools/scripts/pixi.py b/src/idiap_devtools/scripts/pixi.py
new file mode 100644
index 0000000000000000000000000000000000000000..78edebf9bec313743cd687a11928a6ea22d347e1
--- /dev/null
+++ b/src/idiap_devtools/scripts/pixi.py
@@ -0,0 +1,250 @@
+# Copyright © 2022 Idiap Research Institute <contact@idiap.ch>
+#
+# SPDX-License-Identifier: BSD-3-Clause
+
+import pathlib
+import sys
+
+import click
+
+from ..click import PreserveIndentCommand, validate_profile, verbosity_option
+from ..logging import setup
+
+logger = setup(__name__.split(".", 1)[0])
+
+
+@click.command(
+    cls=PreserveIndentCommand,
+    epilog="""
+Examples:
+
+  1. Creates a **draft** pixi configuration file for a project you just checked
+     out:
+
+    .. code:: sh
+
+       $ devtool pixi -vv .
+       $ pixi run python
+       ...
+       >>>
+
+  2. Creates a draft pixi configuration file for a project you checked out at
+     directory my-project:
+
+    .. code:: sh
+
+       $ devtool pixi -vv my-project
+       $ cd my-project
+       $ pixi run python
+       ...
+       >>>
+
+  .. tip::
+
+     You may hand-edit the output file ``pixi.toml`` to adjust for details,
+     add conda or Python packages you'd like to complement your work
+     environment. An example would be adding debuggers such as ``pdbpp`` to
+     the installation plan.
+
+""",
+)
+@click.argument(
+    "project-dir",
+    nargs=1,
+    required=True,
+    type=click.Path(path_type=pathlib.Path),
+)
+@click.option(
+    "-P",
+    "--profile",
+    default="default",
+    show_default=True,
+    callback=validate_profile,
+    help="Directory containing the development profile (and a file named "
+    "profile.toml), or the name of a configuration key pointing to the "
+    "development profile to use",
+)
+@click.option(
+    "-p",
+    "--python",
+    default=("%d.%d" % sys.version_info[:2]),
+    show_default=True,
+    help="Version of python to build the environment for",
+)
+@verbosity_option(logger=logger)
+def pixi(
+    project_dir,
+    profile,
+    python,
+    **_,
+) -> None:
+    """Create a pixi recipe for a project."""
+
+    import shutil
+
+    import packaging.requirements
+
+    from ..profile import Profile
+
+    the_profile = Profile(profile)
+
+    python_to_conda = {
+        v: k
+        for k, v in the_profile.data["conda"]["to_python"].items()
+        if not k.startswith("__")
+    }
+
+    version = the_profile.conda_constraints(python)
+    assert version is not None
+
+    def _make_requirement_dict(requirements: list[str]) -> dict[str, str]:
+        retval: dict[str, str] = {}
+        for k in requirements:
+            pr = packaging.requirements.Requirement(k)
+            name = (
+                pr.name
+                if pr.name not in python_to_conda
+                else python_to_conda[pr.name]
+            )
+            if pr.name in version:
+                retval[name] = version[name]
+            if pr.specifier:
+                if name in retval:
+                    retval[name] = ",".join((retval[name], str(pr.specifier)))
+                else:
+                    retval[name] = str(pr.specifier)
+            retval.setdefault(name, "*")
+        return retval
+
+    # loads the pyproject.toml file (easy)
+    pyproject = project_dir / "pyproject.toml"
+    if pyproject.exists():
+        import tomli
+
+        pyproject = tomli.load(pyproject.open("rb"))
+
+    # build output TOML pixi file
+    config = {}
+    config["project"] = dict(
+        name=pyproject["project"]["name"],
+        authors=[
+            f"{k['name']} <{k['email']}>"
+            for k in pyproject["project"].get("authors", [])
+            + pyproject["project"].get("maintainers", [])
+        ],
+        description=pyproject["project"]["description"],
+        # license=pyproject["project"]["license"]["text"],
+        readme="README.md",
+        homepage=pyproject["project"]["urls"]["homepage"],
+        repository=pyproject["project"]["urls"]["repository"],
+        documentation=pyproject["project"]["urls"]["documentation"],
+    )
+
+    conda_config = the_profile.conda_config(
+        python=python, public=True, stable=True
+    )
+    config["project"]["channels"] = conda_config.channels
+    config["project"]["platforms"] = ["linux-64", "osx-arm64"]
+
+    config["dependencies"] = {"python": python + ".*"}
+    config["dependencies"].update(
+        _make_requirement_dict(
+            pyproject.get("project", {}).get("dependencies", [])
+        )
+    )
+
+    cmds = []
+
+    # setup standardized build procedure
+    config.setdefault("feature", {}).setdefault("build", {})["dependencies"] = (
+        _make_requirement_dict(
+            pyproject.get("build-system", {}).get("requires", [])
+        )
+    )
+    # add pip so that the build works
+    config["feature"]["build"]["dependencies"].update(
+        _make_requirement_dict(["pip"])
+    )
+    config["feature"]["build"]["tasks"] = dict(
+        build="pip install --no-build-isolation --no-dependencies --editable ."
+    )
+    config.setdefault("environments", {}).setdefault("default", []).insert(
+        0, "build"
+    )
+    cmds.append("To install, run: `pixi run build`")
+
+    # adds optional features
+    for feature, deps in (
+        pyproject.get("project", {}).get("optional-dependencies", {}).items()
+    ):
+        config.setdefault("feature", {}).setdefault(feature, {})[
+            "dependencies"
+        ] = _make_requirement_dict(deps)
+
+        if "pre-commit" in config["feature"][feature]["dependencies"]:
+            config["feature"][feature]["tasks"] = {
+                "qa": "pre-commit run --all-files",
+                "qa-install": "pre-commit install",
+            }
+            # this feature can be separated from the rest
+            config.setdefault("environments", {})["qa"] = [feature]
+            cmds.append(
+                "To install pre-commit hook, run: `pixi run qa-install`"
+            )
+            cmds.append("To run quality-assurance, run: `pixi run qa`")
+
+            # if ruff is part of pre-commit configuration, then also add that
+            # dependence to the qa stack
+            precommit_config = project_dir / ".pre-commit-config.yaml"
+            if (
+                precommit_config.exists()
+                and "ruff" in precommit_config.open().read()
+                and "ruff" not in config["feature"][feature]["dependencies"]
+            ):
+                config["feature"][feature]["dependencies"]["ruff"] = "*"
+                config["feature"][feature]["tasks"]["ruff"] = "ruff check"
+                cmds.append("To run a simple ruff check, run: `pixi run ruff`")
+
+        if "sphinx" in config["feature"][feature]["dependencies"]:
+            config["feature"][feature]["tasks"] = {
+                "doc": {
+                    "cmd": "rm -rf doc/api && rm -rf html && sphinx-build -aEW doc html",
+                    "depends_on": "build",
+                }
+            }
+            # this feature needs to have the package installed
+            config.setdefault("environments", {}).setdefault(
+                "default", []
+            ).insert(0, feature)
+            cmds.append("To do build docs, run: `pixi run doc`")
+
+        if "pytest" in config["feature"][feature]["dependencies"]:
+            config["feature"][feature]["tasks"] = {
+                "test": {
+                    "cmd": "pytest -sv tests/",
+                    "depends_on": "build",
+                }
+            }
+            # this feature needs to have the package installed
+            config.setdefault("environments", {}).setdefault(
+                "default", []
+            ).insert(0, feature)
+            cmds.append("To do run test, run: `pixi run test`")
+
+    # backup previous installation plan, if one exists
+    output = project_dir / "pixi.toml"
+    if output.exists():
+        backup = output.parent / (output.name + "~")
+        shutil.copy(output, backup)
+
+    with output.open("w") as f:
+        import tomlkit
+
+        tomlkit.dump(config, f)
+        click.secho(
+            f"pixi configuration recorded at {str(output)}",
+            fg="yellow",
+            bold=True,
+        )
+        for k in cmds:
+            click.secho(k, fg="yellow", bold=True)
diff --git a/tests/test_release.py b/tests/test_release.py
index 1b5099c3917e57cd486d055f11818d73df043faf..1d1f493d3816dceaadb81f3435fdc2662686767f 100644
--- a/tests/test_release.py
+++ b/tests/test_release.py
@@ -5,7 +5,7 @@
 import pytest
 
 from idiap_devtools.gitlab import release
-from pkg_resources import Requirement
+from packaging.requirements import Requirement
 
 
 def test_pinning_no_constraints():