Skip to content

Instantly share code, notes, and snippets.

@andrew-d
Created December 10, 2018 09:06
Show Gist options
  • Save andrew-d/9015223355618158ee9d5ef09dc2115f to your computer and use it in GitHub Desktop.
Save andrew-d/9015223355618158ee9d5ef09dc2115f to your computer and use it in GitHub Desktop.
Very WIP script to update dependencies for gvisor in Nix
#!/usr/bin/env python
from __future__ import print_function
import re
import sys
import json
import random
import shutil
import tempfile
import subprocess
from os import path
try:
from shlex import quote as shellquote
except ImportError:
from pipes import quote as shellquote
# Do-nothing rule that loads Labels as strings
def Label(s):
return s
def random_sha256():
h = ''.join(random.choice("0123456789abcdef") for _ in range(64))
return sha256_to_base32(h)
def sha256_to_base32(h):
out = subprocess.check_output(['nix-hash', '--type', 'sha256', '--to-base32', h])
return out.strip()
# Kind of hacky, but this mostly works, assuming that we stick all dependencies
# in the same block, named by their repository name.
def translate_label(l):
name = l.split('/', 1)[0]
assert name.startswith('@')
name = name[1:]
subpath = l.split('/', 2)[2]
subpath = subpath.replace(':', '/', 1)
return '${%s}/%s' % (name, subpath)
GO_REPOSITORY_TEMPLATE = """
{name} = fetchFromGitHubAndGazelle {{
owner = "{owner}";
repo = "{repo}";
rev = "{rev}";
sha256 = "{sha256}";
}};
"""
def parse_go_repository(name, importpath, commit):
# TODO(andrew-d): support more than github
if not importpath.startswith('github.com/'):
raise NotImplementedError("importpath not supported")
args = {
'name': name,
'owner': importpath.split('/')[1],
'repo': importpath.split('/')[2],
'rev': commit,
'sha256': random_sha256(),
}
return GO_REPOSITORY_TEMPLATE.format(**args)
HTTP_ARCHIVE_TEMPLATE = """
{name} = fetchzip {{
url = "{url}";
sha256 = "{sha256}";
stripRoot = false;
}};
"""
def parse_http_archive(name, url, sha256):
return HTTP_ARCHIVE_TEMPLATE.format(
name=name,
url=url,
sha256=random_sha256(),
)
HTTP_REPO_TEMPLATE = """
{name} = fetchurl {{
urls = {urls};
sha256 = "{sha256}";
recursiveHash = true;
downloadToTemp = true;
postFetch = ''
set -ex
{postFetch}
'';
}};
"""
def parse_http_repo_rule(name, url=None, urls=None, strip_prefix=None, type=None, patches=None, patch_args=None):
if urls is not None and url is None:
pass
elif urls is None and url is not None:
urls = [url]
elif url is None and urls is None:
raise Exception("must have at least one of 'url' or 'urls' in: %s" % (name,))
if url is not None and urls is not None:
raise Exception("may only have one of 'url' or 'urls' in: %s" % (name,))
if strip_prefix is not None:
stripPrefix = """
cd "$TMPDIR"
mkdir unpacked-stripped
shopt -s dotglob
mv "$unpackDir/{0}"/* unpacked-stripped/
shopt -u dotglob
unpackDir="$TMPDIR/unpacked-stripped"
""".format(strip_prefix)
else:
stripPrefix = '# no strip_prefix'
if type == 'zip':
unpack = '${unzip}/bin/unzip -qq "$downloadedFile"'
else:
raise NotImplementedError("type %r not supported" % (type,))
if patches is not None:
if patch_args is not None:
patch_args = ' '.join(shellquote(x) for x in patch_args)
patch = 'cd "$unpackDir"\n'
for p in patches:
patch += 'patch {args} < {path}\n'.format(
args = (patch_args or ''),
path = translate_label(p),
)
else:
patch = '# no patches'
postFetch = """
unpackDir="$TMPDIR/unpacked"
mkdir "$unpackDir" && cd "$unpackDir"
{unpack}
{stripPrefix}
touch "$unpackDir/WORKSPACE"
{patch}
cd "$TMPDIR"
mv "$unpackDir" "$out"
""".format(unpack=unpack, stripPrefix=stripPrefix, patch=patch)
return HTTP_REPO_TEMPLATE.format(
name = name,
sha256 = random_sha256(),
urls = json.dumps(urls), # close enough :)
postFetch = postFetch.strip().replace('\n', '\n '),
)
GIT_REPO_TEMPLATE = """
{name} = fetchgit {{
url = "{url}";
rev = "{rev}";
sha256 = "{sha256}";
postFetch = ''
set -ex
{postFetch}
'';
}};
"""
def parse_git_repo_rule(name, remote=None, commit =None, patches=None, patch_args=None):
if patches is not None:
if patch_args is not None:
patch_args = ' '.join(shellquote(x) for x in patch_args)
patch = 'cd "$unpackDir"\n'
for p in patches:
patch += 'patch {args} < {path}\n'.format(
args = (patch_args or ''),
path = translate_label(p),
)
else:
patch = '# no patches'
postFetch = 'cd "$out"\ntouch "$out/WORKSPACE"\n{patch}\n'.format(patch=patch)
return GIT_REPO_TEMPLATE.format(
name = name,
url = remote,
rev = commit,
sha256 = random_sha256(),
postFetch = postFetch.strip().replace('\n', '\n '),
)
def main():
# Load by evaluating file
exec(open("resolved.bzl").read(), globals())
nix_rules = []
# Parse rules
for rule in resolved:
ty = rule['original_rule_class']
if ty == '@bazel_gazelle//internal:go_repository.bzl%go_repository':
nix_rules.append(parse_go_repository(**rule['original_attributes']))
elif ty == '@bazel_tools//tools/build_defs/repo:http.bzl%http_archive':
nix_rules.append(parse_http_archive(**rule['original_attributes']))
elif ty == '@bazel_tools//tools/build_defs/repo:http.bzl%repo_rule':
nix_rules.append(parse_http_repo_rule(**rule['original_attributes']))
elif ty == '@bazel_tools//tools/build_defs/repo:git.bzl%repo_rule':
nix_rules.append(parse_git_repo_rule(**rule['original_attributes']))
else:
print("ignoring unknown rule type: %s" % (ty,), file=sys.stderr)
joinedRules = '\n\n'.join(x.strip() for x in nix_rules)
depsFile = """
# This file contains all Bazel repositories, manually translated to Nix code so
# that we can perform a build without any network access.
#
# When changing this file, it's helpful to run a gvisor build using the
# following command, which generates a "resolved.bzl" file containing all
# dependencies:
# bazel sync --experimental_repository_cache=$PWD/my_cache --experimental_repository_resolved_file=./resolved.bzl
#
{{
# From stdenv
fetchFromGitHub
, fetchgit
, fetchurl
, fetchzip
, buildGoPackage
, unzip
}}:
let
# Actual Bazel dependencies.
deps = rec {{
{rules}
}};
# Build gazelle using the same dependencies.
gazelle = buildGoPackage rec {{
name = "bazel-gazelle-${{version}}";
version = "2018-11-10";
goPackagePath = "github.com/bazelbuild/bazel-gazelle";
goDeps = ./gazelle-deps.nix;
subPackages = [ "cmd/gazelle" ];
src = deps.bazel_gazelle;
}};
# Wrapper around `fetchFromGitHub` that calls `gazelle` on the fetched
# repository.
# TODO(andrew-d): make this work on non-GitHub dependencies
fetchFromGitHubAndGazelle = args @ {{ owner, repo, ... }}: fetchFromGitHub (args // {{
extraPostFetch = ''
cd $out
${{gazelle}}/bin/gazelle \\
-go_prefix "github.com/${{owner}}/${{repo}}" \\
-repo_root "$PWD"
touch WORKSPACE
'' + (if args ? "extraPostFetch" then args.extraPostFetch else "");
}});
in deps
""".format(
rules=joinedRules.replace('\n', '\n '),
).strip()
# Write to temporary file
tdir = tempfile.mkdtemp()
tfile = path.join(tdir, 'default.nix')
with open(tfile, 'w') as f:
f.write(depsFile)
# Copy gazelle deps file to the temporary directory
shutil.copy2('gazelle-deps.nix', path.join(tdir, 'gazelle-deps.nix'))
# TODO: figure out how to not manually-encode this ordering
for dep in ['io_bazel_rules_go', 'bazel_gazelle']:
print("performing initial build for: %s" % (dep,), file=sys.stderr)
proc = subprocess.Popen([
'nix-build',
'-E', 'with import <nixpkgs> { }; callPackage ' + tfile + ' { }',
'--no-out-link',
'--keep-going',
'-j1',
'-A', dep,
], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdout, _ = proc.communicate()
# Replace our bad hash with our expected hash
for line in stdout.split('\n'):
m = re.match(r"fixed-output derivation produced path '[^']+' with sha256 hash '(?P<actual>[^']+)' instead of the expected hash '(?P<expected>[^']+)'", line)
if m is not None:
print("replace %r with %r" % (m.group("expected"), m.group("actual")), file=sys.stderr)
depsFile = depsFile.replace(m.group("expected"), m.group("actual"))
# Re-write temp file
with open(tfile, 'w') as f:
f.write(depsFile)
# Re-run the build for all dependencies
print("performing final build", file=sys.stderr)
proc = subprocess.Popen([
'nix-build',
'-E', 'with import <nixpkgs> { }; callPackage ' + tfile + ' { }',
'--no-out-link',
'--keep-going',
'-j1',
], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdout, _ = proc.communicate()
# Replace all bad hashes with our expected hash
for line in stdout.split('\n'):
m = re.match(r"fixed-output derivation produced path '[^']+' with sha256 hash '(?P<actual>[^']+)' instead of the expected hash '(?P<expected>[^']+)'", line)
if m is not None:
print("replace %r with %r" % (m.group("expected"), m.group("actual")), file=sys.stderr)
depsFile = depsFile.replace(m.group("expected"), m.group("actual"))
# Strip trailing whitespace, and then print everything
print('\n'.join(l.rstrip() for l in depsFile.split('\n')))
if __name__ == "__main__":
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment