Skip to content

Instantly share code, notes, and snippets.

@rohansingh
Last active May 6, 2019 17:53
Show Gist options
  • Save rohansingh/59d21706852117d106332be53c6b59ed to your computer and use it in GitHub Desktop.
Save rohansingh/59d21706852117d106332be53c6b59ed to your computer and use it in GitHub Desktop.
Tiltfile aspect
tiltfile(
name = "example",
images = {
"//example:image": "example/image",
},
k8s_objects = [
"//example:k8s",
],
template = "Tiltfile.template",
)
sh_binary(
name = "tilt",
srcs = ["tilt_wrapper.sh"],
data = select({
"@bazel_tools//src/conditions:darwin": ["@tilt_mac_x86_64//:tilt"],
"//conditions:default": ["@tilt_linux_x86_64//:tilt"],
}) + [":example"],
visibility = ["//:__pkg__"],
)
load("@bazel_skylib//lib:structs.bzl", "structs")
load("@bazel_skylib//lib:types.bzl", "types")
SourceFiles = provider(fields = ["files"])
def _source_files_impl(target, ctx):
files = []
transitive_depsets = []
# skip external dependencies, we only care about things in our workspace
if target.label.workspace_root.startswith("external/"):
return []
# try to determine the BUILD file for this target
if hasattr(ctx.rule.attr, "generator_location") and ctx.rule.attr.generator_location:
build_file = ctx.rule.attr.generator_location.split(":")[0]
files.append(build_file)
# record any file dependencies of this target. this is our main job.
file_attrs = structs.to_dict(ctx.rule.files)
for k, v in file_attrs.items():
files += [f.path for f in v]
# transitively record any files from targets that this target depends on
attr = structs.to_dict(ctx.rule.attr)
for k, v in attr.items():
if types.is_list(v):
for v2 in v:
if type(v2) == "Target" and SourceFiles in v2:
transitive_depsets.append(v2[SourceFiles].files)
elif type(v) == "Target" and SourceFiles in v:
transitive_depsets.append(v[SourceFiles].files)
return [
SourceFiles(
files = depset(
files,
transitive = transitive_depsets,
),
),
]
_source_files = aspect(
implementation = _source_files_impl,
doc = "Walk the dependency tree of a target and record all file dependencies.",
attr_aspects = ["*"],
)
def _deps_for_target(target):
return [
f
for f in target[SourceFiles].files.to_list()
if not (
f.startswith("bazel-out/") or
f.startswith("external/")
)
]
def _normalized_target(target):
l = target.label
return "//%s:%s" % (l.package, l.name)
def _tiltfile_impl(ctx):
# collect dependencies for each image target
image_targets = []
for image, name in ctx.attr.images.items():
image_targets.append({
"ref": name,
"command": "bazel run %s" % _normalized_target(image),
"deps": _deps_for_target(image),
"live_update_sync": ctx.attr.live_update_sync.get(name, []),
"live_update_fall_back_on": ctx.attr.live_update_fall_back_on.get(name, []),
})
# collect dependencies for each k8s_object target
k8s_targets = []
for k8s_obj in ctx.attr.k8s_objects:
k8s_targets.append({
"command": "bazel run %s" % _normalized_target(k8s_obj),
"deps": _deps_for_target(k8s_obj),
})
# format everything we collected into a JSON blob
targets_json = struct(
images = image_targets,
k8s = k8s_targets,
).to_json()
ctx.actions.expand_template(
template = ctx.file.template,
output = ctx.outputs.tf,
substitutions = {
"%{targets}": targets_json,
},
)
return [
DefaultInfo(
runfiles = ctx.runfiles(files = [ctx.outputs.tf]),
),
]
tiltfile = rule(
implementation = _tiltfile_impl,
doc = """Generate a Tiltfile for a set of image and k8s_object targets.
To generate the Tiltfile, we walk the dependency tree of each image and
k8s_object target, and create a JSON object that contains all the info
that Tilt needs for each target.
For a single image dependency and k8s_object, the JSON object might look
like this:
{
"images": [
{
"ref": "my_app/image",
"command": "bazel run //my_app:image",
"deps": [
"my_app/BUILD",
"my_app/source_file_1",
"some_lib/source_file_2"
]
}
],
"k8s": [
{
"command": "bazel run //my_app:k8s",
"deps": [
"my_app/BUILD",
"my_app/k8s.yaml",
"k8s/tilt.yaml"
]
}
],
}
Any occurrences of "%{targets}" in the provided Tiltfile template are
replaced with this JSON object, in raw format. To consume the object, the
Tiltfile template should use `decode_json`, like this:
TARGETS = decode_json('%{targets}')
In a basic implementation, each of the objects in `images` can be used
for a Tilt `custom_build` command. And each of the objects in `k8s` can be
used for a `k8s_yaml(local())` invocation.
""",
attrs = {
"images": attr.label_keyed_string_dict(
doc = "Mapping of image targets to Docker image names",
aspects = [_source_files],
),
"k8s_objects": attr.label_list(
doc = "List of k8s_objects to deploy",
aspects = [_source_files],
),
"template": attr.label(
mandatory = True,
allow_single_file = True,
),
"live_update_sync": attr.string_list_dict(
doc = """Mapping of image names to local/remote paths for live update.
For more details, see:
https://docs.tilt.dev/api.html#api.sync
These paths are naively injected into the corresponding image target
in the generated JSON blob, and can be used by the Tiltfile template.
""",
),
"live_update_fall_back_on": attr.string_list_dict(
doc = """Mapping of image names to files that require a full rebuild.
This only necessary to specify if live_update_sync is specified, and
if some files in the local path require a full image rebuild.
These paths are naively injected into the corresponding image target
in the generated JSON blob, and can be used by the Tilfile template.
""",
),
},
outputs = {
"tf": "%{name}.tiltfile",
},
)
#!usr/bin/env bash
set -euo pipefail
if [[ "$OSTYPE" == "darwin"* ]]; then
TILT="external/tilt_mac_x86_64/tilt"
else
TILT="external/tilt_linux_x86_64/tilt"
fi
TILT="${PWD}/${TILT}"
# copy tiltfile to workspace root. we need to do this because paths in a
# tiltfile are resolved relative to the tiltfile directory.
cp -f tools/tilt/example.tiltfile "$BUILD_WORKSPACE_DIRECTORY"
cd "$BUILD_WORKSPACE_DIRECTORY"
if [ $# -eq 0 ]; then
# no args provided, run `tilt up` by default
exec "$TILT" up --file=example.tiltfile
else
exec "$TILT" "$@"
fi
k8s_resource_assembly_version(2)
TARGETS = decode_json("""%{targets}""")
for image in TARGETS["images"]:
live_update = []
if image["live_update_sync"]:
# figure out the source file path to container file path mapping.
# generally this is something like "backend/src" to "/app/src".
src, dest = image["live_update_sync"]
# handle any paths that are explicitly configured to fall back to a
# full rebuild
live_update += [
fall_back_on(f)
for f in image["live_update_fall_back_on"]
]
# use live sync for any paths that match the source file path
live_update += [
sync(f, f.replace(src, dest))
for f in image["deps"]
if f.startswith(src)
]
custom_build(
ref = image["ref"],
command = image["command"],
deps = image["deps"],
tag = "latest",
disable_push = True,
live_update = live_update
)
for k8s in TARGETS["k8s"]:
for f in k8s["deps"]:
watch_file(f)
k8s_yaml(local(k8s["command"]))
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "tilt_linux_x86_64",
build_file_content = "exports_files(['tilt'])",
sha256 = "9139e76faa40c559baabcf5a9bed6c89b6554532e9dd95abde9a0229827fea5c",
urls = ["https://github.com/windmilleng/tilt/releases/download/v0.7.13/tilt.0.7.13.linux.x86_64.tar.gz"],
)
http_archive(
name = "tilt_mac_x86_64",
build_file_content = "exports_files(['tilt'])",
sha256 = "659db55ccdb12a5f77ca640680c6069a161343229d9c2aecbfc26c01442aa8e5",
urls = ["https://github.com/windmilleng/tilt/releases/download/v0.7.13/tilt.0.7.13.mac.x86_64.tar.gz"],
)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment