Skip to content

Instantly share code, notes, and snippets.

@nicktimko
Last active January 22, 2020 21:28
Show Gist options
  • Save nicktimko/c6e8e0e668c50baff653f34bcae4c91b to your computer and use it in GitHub Desktop.
Save nicktimko/c6e8e0e668c50baff653f34bcae4c91b to your computer and use it in GitHub Desktop.
Pipenv to pip-tools converter
#!/usr/bin/env python3
"""
Covert a Pipfile/[.lock] to a pip-tools requirements.(in|txt) file. While it's
only slightly annoying to convert the Pipfile to a requirements.in (which we
do) the primary goal is to convert the *locked* file with all the versions and
hashes, without *updating* any of them. This will allow bisection of problems
to the conversion from Pipenv to Pip-tools without confounding from myriad
updates to all the libraries used within.
To validate/clean up the new file, you can run the build command which will
use the lock file and inspect what needs to be updated (hopefully nothing),
and should just change the "# via ?" lines and maybe re-order some packages.
$ pip-compile --quiet --generate-hashes --output-file=requirements.txt requirements.in
After using this to convert the source/lock file, here's a recommended Make
target to add:
requirements.txt: requirements.in
pip-compile \\
--quiet \\
--generate-hashes \\
--output-file=requirements.txt \\
requirements.in
"""
import argparse
import json
import pathlib
import os
import subprocess
import sys
import typing
import toml
PIPENV_SOURCE = "Pipfile"
PIPENV_LOCK = "Pipfile.lock"
TARGET_BASENAME_DEFAULT = "requirements"
def transform_source_file(
directory: pathlib.Path,
*,
target_basename: str = TARGET_BASENAME_DEFAULT,
force_overwrite: bool = False,
dev: bool = False,
) -> typing.Set[str]:
"""
Returns a set containing the direct dependencies (casefolded for easier lookup)
"""
with open(directory / PIPENV_SOURCE, mode="r") as f:
env_source = toml.load(f)
deps = set()
lines = []
package_groups = [env_source["packages"]]
if dev:
package_groups.append(env_source["dev-packages"])
for n, pg in enumerate(package_groups):
if n and dev:
lines.append("\n# dev-packages\n")
for pkg, constraint in pg.items():
if constraint == "*":
constraint = ""
lines.append(f"{pkg}{constraint}\n")
deps.add(pkg.casefold())
mode = "w" if force_overwrite else "x"
with open(directory / (target_basename + ".in"), mode=mode) as f:
f.writelines(lines)
return deps
def transform_lock_file(
directory: pathlib.Path,
*,
target_basename: str = TARGET_BASENAME_DEFAULT,
direct_deps: typing.Set[str] = None,
force_overwrite: bool = False,
dev: bool = False,
) -> None:
with open(directory / PIPENV_LOCK, mode="r") as f:
env_lock = json.load(f)
lines = [
f"""\
#
# This file is autogenerated by pipenv2tools (NOT pip-compile)
# To update, run:
#
# pip-compile --generate-hashes --output-file={target_basename}.txt {target_basename}.in
#
"""
]
sources = env_lock["_meta"]["sources"]
for n, s in enumerate(sources):
flag = "--extra-index-url" if n > 0 else "--index-url"
assert s["verify_ssl"]
lines.append(f"{flag} {s['url']}\n")
lines.append("\n")
package_groups = [env_lock["default"]]
if dev:
package_groups.append(env_lock["develop"])
for pg in package_groups:
for pkg, info in pg.items():
pl = []
pl.append(f"{pkg}{info['version']}")
for hash_ in info["hashes"]:
pl.append(f" --hash={hash_}")
if direct_deps and pkg.casefold() not in direct_deps:
# this could probably directly append to lines and skip the '\\\n'
# join, but pip-compile emits reqs.txt with the line-continuation
# to the comment.
pl.append(" # via ?")
lines.append(" \\\n".join(pl) + "\n")
lines.append(
"\n# WARNING: This file was created via pipenv2tools so may be broken.\n"
)
mode = "w" if force_overwrite else "x"
with open(directory / (target_basename + ".txt"), mode=mode) as f:
f.writelines(lines)
class RegenerationError(RuntimeError):
pass
def regenerate(directory: pathlib.Path):
args = [
"pip-compile",
"--generate-hashes",
"--quiet",
"--output-file=requirements.txt",
"requirements.in",
]
try:
proc = subprocess.run(args, check=True, cwd=directory)
except FileNotFoundError as exc:
errno, msg = exc.args
if msg == "No such file or directory: 'pip-compile'":
raise RegenerationError("pip-compile not found on the path") from exc
raise
except subprocess.CalledProcessError as exc:
raise RegenerationError(
f"pip-compile returned a non-zero exit code {exc.returncode}, see output"
) from exc
def main():
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter, description=__doc__
)
parser.add_argument(
"-f",
"--force",
action="store_true",
help="force overwrite destination requirements files (if they exist)",
)
parser.add_argument(
"-D", "--no-dev", action="store_true", help="Don't include dev dependencies"
)
parser.add_argument(
"-G",
"--no-regenerate",
action="store_true",
help="By default, the converted output will be run through pip-tools to "
"validate and clean it up. this requires the network and a little time, "
"however. So this will skip that step",
)
parser.add_argument(
"-b",
"--basename",
type=str,
default=TARGET_BASENAME_DEFAULT,
help="Basename of the output files.",
)
args = parser.parse_args()
# path = pathlib.Path(__file__).parent # relative to script
path = pathlib.Path(os.getcwd()) # pwd
try:
direct_deps = transform_source_file(
path, force_overwrite=args.force, dev=not args.no_dev
)
transform_lock_file(
path,
force_overwrite=args.force,
direct_deps=direct_deps,
dev=not args.no_dev,
)
except FileExistsError as exc:
print(
"An output file already exists, aborting. Specify --force to overwrite.\n",
exc,
file=sys.stderr,
)
return 1
if not args.no_regenerate:
try:
regenerate(path)
except RegenerationError as exc:
print(
"Failed to regenerate a 'clean' reqs.txt, "
f"but the Pipfile was converted to reqs.(txt|in).\n Reason: {exc}",
file=sys.stderr,
)
return 1
if __name__ == "__main__":
sys.exit(main())
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment