Vendor dependencies

Let's see how I like this workflow.
This commit is contained in:
John Doty 2022-12-19 08:27:18 -08:00
parent 34d1830413
commit 9c435dc440
7500 changed files with 1665121 additions and 99 deletions

View file

@ -0,0 +1,762 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//:local_only.bzl", "link_cxx_binary_locally")
load("@prelude//cxx:cxx_link_utility.bzl", "make_link_args")
load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo")
load(
"@prelude//cxx:linker.bzl",
"get_default_shared_library_name",
"get_shared_library_name_linker_flags",
)
load(
"@prelude//linking:link_info.bzl",
"LinkArgs",
"LinkStyle", #@unused Used as a type
"get_link_args",
)
load("@prelude//utils:set.bzl", "set")
load(
":build_params.bzl",
"BuildParams", # @unused Used as a type
"CrateType",
"Emit",
"crate_type_codegen",
"crate_type_linked",
"emit_needs_codegen",
"output_filename",
)
load(
":failure_filter.bzl",
"RustFailureFilter",
"failure_filter",
)
load(
":link_info.bzl",
"RustLinkInfo",
"inherited_non_rust_link_info",
"normalize_crate",
"resolve_deps",
"style_info",
)
load(":rust_toolchain.bzl", "ctx_toolchain_info")
# Struct for sharing common args between rustc and rustdoc
# (rustdoc just relays bunch of the same args to rustc when trying to gen docs)
CommonArgsInfo = record(
args = field("cmd_args"),
subdir = field(str.type),
tempfile = field(str.type),
short_cmd = field(str.type),
is_check = field(bool.type),
crate_map = field({str.type: "label"}),
)
# Compile info which is reusable between multiple compilation command performed
# by the same rule.
CompileContext = record(
# Symlink root containing all sources.
symlinked_srcs = field("artifact"),
# Linker args to pass the linker wrapper to rustc.
linker_args = field("cmd_args"),
# Clippy wrapper (wrapping clippy-driver so it has the same CLI as rustc)
clippy_wrapper = field("cmd_args"),
# Memoized common args for reuse
common_args = field({(CrateType.type, Emit.type, LinkStyle.type): CommonArgsInfo.type}),
)
RustcOutput = record(
outputs = field({Emit.type: "artifact"}),
diag = field({str.type: "artifact"}),
)
def compile_context(ctx: "context") -> CompileContext.type:
# Setup source symlink tree.
srcs = ctx.attrs.srcs
mapped_srcs = ctx.attrs.mapped_srcs
symlinks = {src.short_path: src for src in srcs}
symlinks.update({k: v for v, k in mapped_srcs.items()})
symlinked_srcs = ctx.actions.symlinked_dir("__srcs", symlinks)
linker = _linker_args(ctx)
clippy_wrapper = _clippy_wrapper(ctx)
return CompileContext(
symlinked_srcs = symlinked_srcs,
linker_args = linker,
clippy_wrapper = clippy_wrapper,
common_args = {},
)
def generate_rustdoc(
ctx: "context",
compile_ctx: CompileContext.type,
crate: str.type,
# link style doesn't matter, but caller should pass in build params
# with static-pic (to get best cache hits for deps)
params: BuildParams.type,
default_roots: [str.type],
document_private_items: bool.type) -> "artifact":
toolchain_info = ctx_toolchain_info(ctx)
common_args = _compute_common_args(
ctx = ctx,
compile_ctx = compile_ctx,
# to make sure we get the rmeta's generated for the crate dependencies,
# rather than full .rlibs
emit = Emit("metadata"),
crate = crate,
params = params,
link_style = params.dep_link_style,
default_roots = default_roots,
)
subdir = common_args.subdir + "_rustdoc"
output = ctx.actions.declare_output(subdir)
plain_env, path_env = _process_env(ctx)
rustdoc_cmd = cmd_args(
toolchain_info.rustc_action,
[cmd_args("--env=", k, "=", v, delimiter = "") for k, v in plain_env.items()],
[cmd_args("--path-env=", k, "=", v, delimiter = "") for k, v in path_env.items()],
cmd_args(str(ctx.label.raw_target()), format = "--env=RUSTDOC_BUCK_TARGET={}"),
toolchain_info.rustdoc,
toolchain_info.rustdoc_flags,
ctx.attrs.rustdoc_flags,
common_args.args,
cmd_args(output.as_output(), format = "--out-dir={}"),
)
if document_private_items:
rustdoc_cmd.add("--document-private-items")
url_prefix = toolchain_info.extern_html_root_url_prefix
for rust_dependency in resolve_deps(ctx):
dep = rust_dependency.dep
if dep.label.cell != ctx.label.cell:
# TODO: support a different extern_html_root_url_prefix per cell
continue
if rust_dependency.name:
name = normalize_crate(rust_dependency.name)
else:
info = dep.get(RustLinkInfo)
if info == None:
continue
name = info.crate
rustdoc_cmd.add(
"--extern-html-root-url={}={}/{}:{}"
.format(name, url_prefix, dep.label.package, dep.label.name),
)
rustdoc_cmd.hidden(toolchain_info.rustdoc, compile_ctx.symlinked_srcs)
ctx.actions.run(rustdoc_cmd, category = "rustdoc")
return output
# Generate multiple compile artifacts so that distinct sets of artifacts can be
# generated concurrently.
def rust_compile_multi(
ctx: "context",
compile_ctx: CompileContext.type,
emits: [Emit.type],
crate: str.type,
params: BuildParams.type,
link_style: LinkStyle.type,
default_roots: [str.type],
extra_link_args: [""] = [],
predeclared_outputs: {Emit.type: "artifact"} = {},
extra_flags: [[str.type, "resolved_macro"]] = [],
is_binary: bool.type = False) -> [RustcOutput.type]:
outputs = []
for emit in emits:
outs = rust_compile(
ctx = ctx,
compile_ctx = compile_ctx,
emit = emit,
crate = crate,
params = params,
link_style = link_style,
default_roots = default_roots,
extra_link_args = extra_link_args,
predeclared_outputs = predeclared_outputs,
extra_flags = extra_flags,
is_binary = is_binary,
)
outputs.append(outs)
return outputs
# Generate a compilation action. A single instance of rustc can emit
# numerous output artifacts, so return an artifact object for each of
# them.
def rust_compile(
ctx: "context",
compile_ctx: CompileContext.type,
emit: Emit.type,
crate: str.type,
params: BuildParams.type,
link_style: LinkStyle.type,
default_roots: [str.type],
extra_link_args: [""] = [],
predeclared_outputs: {Emit.type: "artifact"} = {},
extra_flags: [[str.type, "resolved_macro"]] = [],
is_binary: bool.type = False) -> RustcOutput.type:
toolchain_info = ctx_toolchain_info(ctx)
lints, clippy_lints = _lint_flags(ctx)
common_args = _compute_common_args(
ctx = ctx,
compile_ctx = compile_ctx,
emit = emit,
crate = crate,
params = params,
link_style = link_style,
default_roots = default_roots,
)
rustc_cmd = cmd_args(
common_args.args,
cmd_args("--remap-path-prefix=", compile_ctx.symlinked_srcs, "/=", ctx.label.package, delimiter = ""),
compile_ctx.linker_args,
# Report unused --extern crates in the notification stream
["--json=unused-externs-silent", "-Wunused-crate-dependencies"] if toolchain_info.report_unused_deps else [],
extra_flags,
lints,
)
if crate_type_linked(params.crate_type) and not common_args.is_check:
subdir = common_args.subdir
tempfile = common_args.tempfile
# If this crate type has an associated native dep link style, include deps
# of that style.
(link_args, hidden, _dwo_dir_unused_in_rust) = make_link_args(
ctx,
[
LinkArgs(flags = extra_link_args),
get_link_args(
inherited_non_rust_link_info(ctx),
link_style,
),
],
"{}-{}".format(subdir, tempfile),
)
linker_argsfile, _ = ctx.actions.write(
"{}/__{}_linker_args.txt".format(subdir, tempfile),
link_args,
allow_args = True,
)
rustc_cmd.add(cmd_args(linker_argsfile, format = "-Clink-arg=@{}"))
rustc_cmd.hidden(hidden)
# If we're using failure filtering then we need to make sure the final
# artifact location is the predeclared one since its specific path may have
# already been encoded into the other compile args (eg rpath). So we still
# let rustc_emits generate its own output artifacts, and then make sure we
# use the predeclared one as the output after the failure filter action
# below. Otherwise we'll use the predeclared outputs directly.
if toolchain_info.failure_filter:
outputs, emit_args = _rustc_emits(
ctx = ctx,
emit = emit,
predeclared_outputs = {},
subdir = common_args.subdir,
crate = crate,
params = params,
)
else:
outputs, emit_args = _rustc_emits(
ctx = ctx,
emit = emit,
predeclared_outputs = predeclared_outputs,
subdir = common_args.subdir,
crate = crate,
params = params,
)
(diag, build_status) = _rustc_invoke(
ctx = ctx,
compile_ctx = compile_ctx,
prefix = "{}/{}".format(common_args.subdir, common_args.tempfile),
rustc_cmd = cmd_args(toolchain_info.compiler, rustc_cmd, emit_args),
diag = "diag",
outputs = outputs.values(),
short_cmd = common_args.short_cmd,
is_binary = is_binary,
crate_map = common_args.crate_map,
)
# Add clippy diagnostic targets for check builds
if common_args.is_check:
# We don't really need the outputs from this build, just to keep the artifact accounting straight
clippy_out, clippy_emit_args = _rustc_emits(ctx, emit, {}, common_args.subdir + "-clippy", crate, params)
(clippy_diag, _) = _rustc_invoke(
ctx = ctx,
compile_ctx = compile_ctx,
prefix = "{}/{}".format(common_args.subdir, common_args.tempfile),
rustc_cmd = cmd_args(compile_ctx.clippy_wrapper, rustc_cmd, clippy_lints, clippy_emit_args),
diag = "clippy",
outputs = clippy_out.values(),
short_cmd = common_args.short_cmd,
is_binary = False,
crate_map = common_args.crate_map,
)
diag.update(clippy_diag)
if toolchain_info.failure_filter:
# Filter each output through a failure filter
filtered_outputs = {}
for (emit, output) in outputs.items():
# This is only needed when this action's output is being used as an
# input, so we only need standard diagnostics (clippy is always
# asked for explicitly).
stderr = diag["diag.txt"]
filter_prov = RustFailureFilter(buildstatus = build_status, required = output, stderr = stderr)
filtered_outputs[emit] = failure_filter(
ctx = ctx,
prefix = "{}/{}".format(common_args.subdir, emit.value),
predecl_out = predeclared_outputs.get(emit),
failprov = filter_prov,
short_cmd = common_args.short_cmd,
)
else:
filtered_outputs = outputs
return RustcOutput(outputs = filtered_outputs, diag = diag)
# --extern <crate>=<path> for direct dependencies
# -Ldependency=<dir> for transitive dependencies
# For native dependencies, we use -Clink-arg=@argsfile
# Second element of result tuple is a list of files/directories that should be present for executable to be run successfully
# Third return is the mapping from crate names back to targets (needed so that a deps linter knows what deps need fixing)
def _dependency_args(
ctx: "context",
subdir: str.type,
crate_type: CrateType.type,
link_style: LinkStyle.type,
is_check: bool.type) -> ("cmd_args", {str.type: "label"}):
args = cmd_args()
transitive_deps = {}
deps = []
crate_targets = {}
for x in resolve_deps(ctx):
crate = x.name and normalize_crate(x.name)
dep = x.dep
deps.append(dep)
# Rust dependency
info = dep.get(RustLinkInfo)
if info == None:
continue
crate = crate or info.crate
style = style_info(info, link_style)
# Use rmeta dependencies whenever possible because they
# should be cheaper to produce.
if is_check or (ctx_toolchain_info(ctx).pipelined and not crate_type_codegen(crate_type)):
artifact = style.rmeta
transitive_artifacts = style.transitive_rmeta_deps
else:
artifact = style.rlib
transitive_artifacts = style.transitive_deps
flags = ""
if x.flags != []:
flags = ",".join(x.flags) + ":"
args.add(cmd_args("--extern=", flags, crate, "=", artifact, delimiter = ""))
crate_targets[crate] = dep.label
# Unwanted transitive_deps have already been excluded
transitive_deps.update(transitive_artifacts)
# Add as many -Ldependency dirs as we need to avoid name conflicts
deps_dirs = [{}]
for dep in transitive_deps.keys():
name = dep.basename
if name in deps_dirs[-1]:
deps_dirs.append({})
deps_dirs[-1][name] = dep
for idx, srcs in enumerate(deps_dirs):
deps_dir = "{}-deps{}-{}".format(subdir, ("-check" if is_check else ""), idx)
dep_link_dir = ctx.actions.symlinked_dir(deps_dir, srcs)
args.add(cmd_args(dep_link_dir, format = "-Ldependency={}"))
return (args, crate_targets)
def _lintify(flag: str.type, clippy: bool.type, lints: ["resolved_macro"]) -> "cmd_args":
return cmd_args(
[lint for lint in lints if str(lint).startswith("\"clippy::") == clippy],
format = "-{}{{}}".format(flag),
)
def _lint_flags(ctx: "context") -> ("cmd_args", "cmd_args"):
toolchain_info = ctx_toolchain_info(ctx)
plain = cmd_args(
_lintify("A", False, toolchain_info.allow_lints),
_lintify("D", False, toolchain_info.deny_lints),
_lintify("W", False, toolchain_info.warn_lints),
)
clippy = cmd_args(
_lintify("A", True, toolchain_info.allow_lints),
_lintify("D", True, toolchain_info.deny_lints),
_lintify("W", True, toolchain_info.warn_lints),
)
return (plain, clippy)
def _rustc_flags(flags: [[str.type, "resolved_macro"]]) -> [[str.type, "resolved_macro"]]:
# Rustc's "-g" flag is documented as being exactly equivalent to
# "-Cdebuginfo=2". Rustdoc supports the latter, it just doesn't have the
# "-g" shorthand for it.
for i, flag in enumerate(flags):
if str(flag) == '"-g"':
flags[i] = "-Cdebuginfo=2"
return flags
# Compute which are common to both rustc and rustdoc
def _compute_common_args(
ctx: "context",
compile_ctx: CompileContext.type,
emit: Emit.type,
crate: str.type,
params: BuildParams.type,
link_style: LinkStyle.type,
default_roots: [str.type]) -> CommonArgsInfo.type:
crate_type = params.crate_type
args_key = (crate_type, emit, link_style)
if args_key in compile_ctx.common_args:
return compile_ctx.common_args[args_key]
# Keep filenames distinct in per-flavour subdirs
subdir = "{}-{}-{}-{}".format(crate_type.value, params.reloc_model.value, link_style.value, emit.value)
# Included in tempfiles
tempfile = "{}-{}".format(crate, emit.value)
srcs = ctx.attrs.srcs
mapped_srcs = ctx.attrs.mapped_srcs
all_srcs = map(lambda s: s.short_path, srcs) + mapped_srcs.values()
crate_root = ctx.attrs.crate_root or _crate_root(all_srcs, crate, default_roots)
is_check = not emit_needs_codegen(emit)
dependency_args, crate_map = _dependency_args(
ctx = ctx,
subdir = subdir,
crate_type = crate_type,
link_style = link_style,
is_check = is_check,
)
if crate_type == CrateType("proc-macro"):
dependency_args.add("--extern=proc_macro")
if crate_type == CrateType("cdylib") and not is_check:
linker_info = ctx.attrs._cxx_toolchain[CxxToolchainInfo].linker_info
shlib_name = get_default_shared_library_name(linker_info, ctx.label)
dependency_args.add(cmd_args(
get_shared_library_name_linker_flags(linker_info.type, shlib_name),
format = "-Clink-arg={}",
))
toolchain_info = ctx_toolchain_info(ctx)
edition = ctx.attrs.edition or toolchain_info.default_edition or \
fail("missing 'edition' attribute, and there is no 'default_edition' set by the toolchain")
args = cmd_args(
cmd_args(compile_ctx.symlinked_srcs, "/", crate_root, delimiter = ""),
"--crate-name={}".format(crate),
"--crate-type={}".format(crate_type.value),
"-Crelocation-model={}".format(params.reloc_model.value),
"--edition={}".format(edition),
"-Cmetadata={}".format(_metadata(ctx.label)[0]),
# Make diagnostics json with the option to extract rendered text
"--error-format=json",
"--json=diagnostic-rendered-ansi",
["-Cprefer-dynamic=yes"] if crate_type == CrateType("dylib") else [],
["--target={}".format(toolchain_info.rustc_target_triple)] if toolchain_info.rustc_target_triple else [],
_rustc_flags(toolchain_info.rustc_flags),
_rustc_flags(toolchain_info.rustc_check_flags) if is_check else [],
_rustc_flags(ctx.attrs.rustc_flags),
cmd_args(ctx.attrs.features, format = '--cfg=feature="{}"'),
dependency_args,
)
common_args = CommonArgsInfo(
args = args,
subdir = subdir,
tempfile = tempfile,
short_cmd = "{},{},{}".format(crate_type.value, params.reloc_model.value, emit.value),
is_check = is_check,
crate_map = crate_map,
)
compile_ctx.common_args[args_key] = common_args
return common_args
# Return wrapper script for clippy-driver to make sure sysroot is set right
# We need to make sure clippy is using the same sysroot - compiler, std libraries -
# as rustc itself, so explicitly invoke rustc to get the path. This is a
# (small - ~15ms per invocation) perf hit but only applies when generating
# specifically requested clippy diagnostics.
def _clippy_wrapper(ctx: "context") -> "cmd_args":
clippy_driver = cmd_args(ctx_toolchain_info(ctx).clippy_driver)
rustc = cmd_args(ctx_toolchain_info(ctx).compiler)
wrapper_file, _ = ctx.actions.write(
ctx.actions.declare_output("__clippy_driver_wrapper.sh"),
[
"#!/bin/bash",
# Force clippy to be clippy: https://github.com/rust-lang/rust-clippy/blob/e405c68b3c1265daa9a091ed9b4b5c5a38c0c0ba/src/driver.rs#L334
"export __CLIPPY_INTERNAL_TESTS=true",
cmd_args(rustc, format = "export SYSROOT=$({} --print=sysroot)"),
cmd_args(clippy_driver, format = "{} \"$@\"\n"),
],
is_executable = True,
allow_args = True,
)
return cmd_args(wrapper_file).hidden(clippy_driver, rustc)
# This is a hack because we need to pass the linker to rustc
# using -Clinker=path and there is currently no way of doing this
# without an artifact. We create a wrapper (which is an artifact),
# and add -Clinker=
def _linker_args(ctx: "context") -> "cmd_args":
linker_info = ctx.attrs._cxx_toolchain[CxxToolchainInfo].linker_info
linker = cmd_args(
linker_info.linker,
linker_info.linker_flags or [],
ctx.attrs.linker_flags,
)
# Now we create a wrapper to actually run the linker. Use $(cat <<heredoc) to
# combine the multiline command into a single logical command.
wrapper, _ = ctx.actions.write(
ctx.actions.declare_output("__linker_wrapper.sh"),
[
"#!/bin/bash",
cmd_args(cmd_args(_shell_quote(linker), delimiter = " \\\n"), format = "{} \"$@\"\n"),
],
is_executable = True,
allow_args = True,
)
return cmd_args(wrapper, format = "-Clinker={}").hidden(linker)
def _shell_quote(args: "cmd_args") -> "cmd_args":
return cmd_args(args, quote = "shell")
# Returns the full label and its hash. The full label is used for `-Cmetadata`
# which provided the primary disambiguator for two otherwise identically named
# crates. The hash is added to the filename to give them a lower likelihood of
# duplicate names, but it doesn't matter if they collide.
def _metadata(label: "label") -> (str.type, str.type):
label = str(label.raw_target())
h = hash(label)
if h < 0:
h = -h
h = "%x" % h
return (label, "0" * (8 - len(h)) + h)
def _crate_root(
srcs: [str.type],
crate: str.type,
default_roots: [str.type]) -> str.type:
candidates = set()
crate_with_suffix = crate + ".rs"
for src in srcs:
filename = src.split("/")[-1]
if filename in default_roots or filename == crate_with_suffix:
candidates.add(src)
if candidates.size() == 1:
return candidates.list()[0]
fail("Could not infer crate_root. candidates=%s\nAdd 'crate_root = \"src/example.rs\"' to your attributes to disambiguate." % candidates.list())
# Take a desired output and work out how to convince rustc to generate it
def _rustc_emits(
ctx: "context",
emit: Emit.type,
predeclared_outputs: {Emit.type: "artifact"},
subdir: str.type,
crate: str.type,
params: BuildParams.type) -> ({Emit.type: "artifact"}, "cmd_args"):
toolchain_info = ctx_toolchain_info(ctx)
crate_type = params.crate_type
# Metadata for pipelining needs has enough info to be used as an input
# for dependents. To do this reliably, we actually emit "link" but
# suppress actual codegen with -Zno-codegen.
#
# We don't bother to do this with "codegen" crates - ie, ones which are
# linked into an artifact like binaries and dylib, since they're not
# used as a pipelined dependency input.
pipeline_meta = emit == Emit("metadata") and \
toolchain_info.pipelined and \
not crate_type_codegen(crate_type)
emit_args = cmd_args()
if emit in predeclared_outputs:
output = predeclared_outputs[emit]
else:
if emit == Emit("save-analysis"):
filename = "{}/save-analysis/{}{}.json".format(subdir, params.prefix, crate)
else:
extra_hash = "-" + _metadata(ctx.label)[1]
emit_args.add("-Cextra-filename={}".format(extra_hash))
if pipeline_meta:
# Make sure hollow rlibs are distinct from real ones
filename = subdir + "/hollow/" + output_filename(crate, Emit("link"), params, extra_hash)
else:
filename = subdir + "/" + output_filename(crate, emit, params, extra_hash)
output = ctx.actions.declare_output(filename)
outputs = {emit: output}
if pipeline_meta:
# If we're doing a pipelined build, instead of emitting an actual rmeta
# we emit a "hollow" .rlib - ie, it only contains lib.rmeta and no object
# code. It should contain full information needed by any dependent
# crate which is generating code (MIR, etc).
# Requires https://github.com/rust-lang/rust/pull/86045
emit_args.add(
cmd_args(output.as_output(), format = "--emit=link={}"),
"-Zno-codegen",
)
elif emit == Emit("expand"):
emit_args.add(
"-Zunpretty=expanded",
cmd_args(output.as_output(), format = "-o{}"),
)
elif emit == Emit("save-analysis"):
emit_args.add(
"--emit=metadata",
"-Zsave-analysis",
# No way to explicitly set the output location except with the output dir
cmd_args(output.as_output(), format = "--out-dir={}").parent(2),
)
else:
# Assume https://github.com/rust-lang/rust/issues/85356 is fixed (ie
# https://github.com/rust-lang/rust/pull/85362 is applied)
emit_args.add(cmd_args("--emit=", emit.value, "=", output.as_output(), delimiter = ""))
if emit not in (Emit("expand"), Emit("save-analysis")):
extra_dir = subdir + "/extras/" + output_filename(crate, emit, params)
extra_out = ctx.actions.declare_output(extra_dir)
emit_args.add(cmd_args(extra_out.as_output(), format = "--out-dir={}"))
if ctx.attrs.incremental_enabled:
build_mode = ctx.attrs.incremental_build_mode
incremental_out = ctx.actions.declare_output("{}/extras/incremental/{}".format(subdir, build_mode))
incremental_cmd = cmd_args(incremental_out.as_output(), format = "-Cincremental={}")
emit_args.add(incremental_cmd)
return (outputs, emit_args)
# Invoke rustc and capture outputs
def _rustc_invoke(
ctx: "context",
compile_ctx: CompileContext.type,
prefix: str.type,
rustc_cmd: "cmd_args",
diag: str.type,
outputs: ["artifact"],
short_cmd: str.type,
is_binary: bool.type,
crate_map: {str.type: "label"}) -> ({str.type: "artifact"}, ["artifact", None]):
toolchain_info = ctx_toolchain_info(ctx)
plain_env, path_env = _process_env(ctx)
# Save diagnostic outputs
json_diag = ctx.actions.declare_output("{}-{}.json".format(prefix, diag))
txt_diag = ctx.actions.declare_output("{}-{}.txt".format(prefix, diag))
rustc_action = cmd_args(toolchain_info.rustc_action)
compile_cmd = cmd_args(
cmd_args(json_diag.as_output(), format = "--diag-json={}"),
cmd_args(txt_diag.as_output(), format = "--diag-txt={}"),
"--remap-cwd-prefix=",
"--buck-target={}".format(ctx.label.raw_target()),
)
for k, v in crate_map.items():
compile_cmd.add(cmd_args("--crate-map=", k, "=", str(v.raw_target()), delimiter = ""))
for k, v in plain_env.items():
# The env variable may have newlines in it (yuk), but when writing them to an @file,
# we can't escape the newlines. Therefore leave them on the command line
rustc_action.add(cmd_args("--env=", k, "=", v, delimiter = ""))
for k, v in path_env.items():
compile_cmd.add(cmd_args("--path-env=", k, "=", v, delimiter = ""))
build_status = None
if toolchain_info.failure_filter:
# Build status for fail filter
build_status = ctx.actions.declare_output("{}_build_status-{}.json".format(prefix, diag))
compile_cmd.add(cmd_args(build_status.as_output(), format = "--failure-filter={}"))
for out in outputs:
compile_cmd.add("--required-output", out.short_path, out.as_output())
compile_cmd.add(rustc_cmd)
compile_cmd.hidden(toolchain_info.compiler, compile_ctx.symlinked_srcs)
compile_cmd_file, extra_args = ctx.actions.write("{}-{}.args".format(prefix, diag), compile_cmd, allow_args = True)
incremental_enabled = ctx.attrs.incremental_enabled
local_only = (is_binary and link_cxx_binary_locally(ctx)) or incremental_enabled
identifier = "{} {} [{}]".format(prefix, short_cmd, diag)
ctx.actions.run(
cmd_args(rustc_action, cmd_args(compile_cmd_file, format = "@{}")).hidden(compile_cmd, extra_args),
local_only = local_only,
category = "rustc",
identifier = identifier,
no_outputs_cleanup = incremental_enabled,
)
return ({diag + ".json": json_diag, diag + ".txt": txt_diag}, build_status)
# Separate env settings into "plain" and "with path". Path env vars are often
# used in Rust `include!()` and similar directives, which always interpret the
# path relative to the source file containing the directive. Since paths in env
# vars are often expanded from macros such as `$(location)`, they will be
# cell-relative which will not work properly. To solve this, we canonicalize
# paths to absolute paths so they'll work in any context. Hence the need to
# distinguish path from non-path. (This will not work if the value contains both
# path and non-path content, but we'll burn that bridge when we get to it.)
def _process_env(
ctx: "context") -> ({str.type: "cmd_args"}, {str.type: "cmd_args"}):
# Values with inputs (ie artifact references).
path_env = {}
# Plain strings.
plain_env = {}
for k, v in ctx.attrs.env.items():
v = cmd_args(v)
if len(v.inputs) > 0:
path_env[k] = v
else:
plain_env[k] = v
return (plain_env, path_env)

View file

@ -0,0 +1,250 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
# Rules for mapping requirements to options
load(
"@prelude//linking:link_info.bzl",
"LinkStyle",
"Linkage", # @unused Used as a type
)
load("@prelude//utils:utils.bzl", "expect")
# --crate-type=
# Excludes `lib` because we want to explicitly choose the library flavour
CrateType = enum(
# Binary
"bin",
# Rust linkage
"rlib",
"dylib",
"proc-macro",
# Native linkage
"cdylib",
"staticlib",
)
# Crate type is intended for consumption by Rust code
def crate_type_rust_linkage(crate_type: CrateType.type) -> bool.type:
return crate_type.value in ("rlib", "dylib", "proc-macro")
# Crate type is intended for native linkage (eg C++)
def crate_type_native_linkage(crate_type: CrateType.type) -> bool.type:
return crate_type.value in ("cdylib", "staticlib")
# Crate type which invokes the linker
def crate_type_linked(crate_type: CrateType.type) -> bool.type:
return crate_type.value in ("bin", "dylib", "proc-macro", "cdylib")
# Crate type which should include transitive deps
def crate_type_transitive_deps(crate_type: CrateType.type) -> bool.type:
return crate_type.value in ("rlib", "dylib", "staticlib") # not sure about staticlib
# Crate type which should always need codegen
def crate_type_codegen(crate_type: CrateType.type) -> bool.type:
return crate_type_linked(crate_type) or crate_type_native_linkage(crate_type)
# -Crelocation-model= from --print relocation-models
RelocModel = enum(
# Common
"static",
"pic",
# Various obscure types
"dynamic-no-pic",
"ropi",
"rwpi",
"ropi-rwpi",
"default",
)
# --emit=
Emit = enum(
"asm",
"llvm-bc",
"llvm-ir",
"obj",
"metadata",
"link",
"dep-info",
"mir",
"save-analysis", # pseudo emit alias for metadata + -Zsave-analysis
"expand", # pseudo emit alias for -Zunpretty=expanded
)
# Emitting this artifact generates code
def emit_needs_codegen(emit: Emit.type) -> bool.type:
return emit.value in ("asm", "llvm-bc", "llvm-ir", "obj", "link", "mir")
BuildParams = record(
crate_type = field(CrateType.type),
reloc_model = field(RelocModel.type),
dep_link_style = field(LinkStyle.type), # what link_style to use for dependencies
# XXX This needs to be OS-specific
prefix = field(str.type),
suffix = field(str.type),
)
RustcFlags = record(
crate_type = field(CrateType.type),
reloc_model = field(RelocModel.type),
dep_link_style = field(LinkStyle.type),
)
# Filenames used for various emitted forms
# `None` for a prefix or suffix means use the build_param version
_EMIT_PREFIX_SUFFIX = {
Emit("asm"): ("", ".s"),
Emit("llvm-bc"): ("", ".bc"),
Emit("llvm-ir"): ("", ".ll"),
Emit("obj"): ("", ".o"),
Emit("metadata"): ("lib", ".rmeta"), # even binaries get called 'libfoo.rmeta'
Emit("link"): (None, None), # crate type and reloc model dependent
Emit("dep-info"): ("", ".d"),
Emit("mir"): (None, ".mir"),
Emit("expand"): (None, ".rs"),
Emit("save-analysis"): (None, ".json"),
}
# Return the filename for a particular emitted artifact type
def output_filename(cratename: str.type, emit: Emit.type, buildparams: BuildParams.type, extra: [str.type, None] = None) -> str.type:
epfx, esfx = _EMIT_PREFIX_SUFFIX[emit]
prefix = epfx if epfx != None else buildparams.prefix
suffix = esfx if esfx != None else buildparams.suffix
return prefix + cratename + (extra or "") + suffix
# Rule type - 'binary' also covers 'test'
RuleType = enum("binary", "library")
# What language we're generating artifacts to be linked with
LinkageLang = enum("rust", "c++")
_BINARY_SHARED = 0
_BINARY_PIE = 1
_BINARY_NON_PIE = 2
_NATIVE_LINKABLE_SHARED_OBJECT = 3
_RUST_DYLIB_SHARED = 4
_RUST_PROC_MACRO = 5
_RUST_STATIC_PIC_LIBRARY = 6
_RUST_STATIC_NON_PIC_LIBRARY = 7
_NATIVE_LINKABLE_STATIC_PIC = 8
_NATIVE_LINKABLE_STATIC_NON_PIC = 9
# FIXME: This isn't right! We should be using the target platform, rather than host_info.
# On Windows the GNU linker emits .exe/.dll, while on Linux it emits nothing/.so, use this variable to switch.
#
# At the moment we don't have anything working on Windows, so trying to get cross-compiling working is a step
# further than we want to take. But this will need cleaning up for Windows hosts to be able to cross-compile.
_FIXME_IS_WINDOWS = host_info().os.is_windows
def _executable_prefix_suffix(platform):
return {
"darwin": ("", ""),
"gnu": ("", ".exe") if _FIXME_IS_WINDOWS else ("", ""),
"windows": ("", ".exe"),
}[platform]
def _library_prefix_suffix(platform):
return {
"darwin": ("lib", ".dylib"),
"gnu": ("", ".dll") if _FIXME_IS_WINDOWS else ("lib", ".so"),
"windows": ("", ".dll"),
}[platform]
_BUILD_PARAMS = {
_BINARY_SHARED: (RustcFlags(crate_type = CrateType("bin"), reloc_model = RelocModel("pic"), dep_link_style = LinkStyle("shared")), _executable_prefix_suffix),
_BINARY_PIE: (RustcFlags(crate_type = CrateType("bin"), reloc_model = RelocModel("pic"), dep_link_style = LinkStyle("static_pic")), _executable_prefix_suffix),
_BINARY_NON_PIE: (RustcFlags(crate_type = CrateType("bin"), reloc_model = RelocModel("static"), dep_link_style = LinkStyle("static")), _executable_prefix_suffix),
_NATIVE_LINKABLE_SHARED_OBJECT: (RustcFlags(crate_type = CrateType("cdylib"), reloc_model = RelocModel("pic"), dep_link_style = LinkStyle("shared")), _library_prefix_suffix),
_RUST_DYLIB_SHARED: (RustcFlags(crate_type = CrateType("dylib"), reloc_model = RelocModel("pic"), dep_link_style = LinkStyle("shared")), _library_prefix_suffix),
_RUST_PROC_MACRO: (RustcFlags(crate_type = CrateType("proc-macro"), reloc_model = RelocModel("pic"), dep_link_style = LinkStyle("static_pic")), _library_prefix_suffix),
_RUST_STATIC_PIC_LIBRARY: (RustcFlags(crate_type = CrateType("rlib"), reloc_model = RelocModel("pic"), dep_link_style = LinkStyle("static_pic")), lambda _: ("lib", ".rlib")),
_RUST_STATIC_NON_PIC_LIBRARY: (RustcFlags(crate_type = CrateType("rlib"), reloc_model = RelocModel("static"), dep_link_style = LinkStyle("static")), lambda _: ("lib", ".rlib")),
_NATIVE_LINKABLE_STATIC_PIC: (RustcFlags(crate_type = CrateType("staticlib"), reloc_model = RelocModel("pic"), dep_link_style = LinkStyle("static_pic")), lambda _: ("lib", "_pic.a")),
_NATIVE_LINKABLE_STATIC_NON_PIC: (RustcFlags(crate_type = CrateType("staticlib"), reloc_model = RelocModel("static"), dep_link_style = LinkStyle("static")), lambda _: ("lib", ".a")),
}
_INPUTS = {
# Binary, shared
("binary", False, "shared", "any", "rust"): _BINARY_SHARED,
("binary", False, "shared", "shared", "rust"): _BINARY_SHARED,
("binary", False, "shared", "static", "rust"): _BINARY_SHARED,
# Binary, PIE
("binary", False, "static_pic", "any", "rust"): _BINARY_PIE,
("binary", False, "static_pic", "shared", "rust"): _BINARY_PIE,
("binary", False, "static_pic", "static", "rust"): _BINARY_PIE,
# Binary, non-PIE
("binary", False, "static", "any", "rust"): _BINARY_NON_PIE,
("binary", False, "static", "shared", "rust"): _BINARY_NON_PIE,
("binary", False, "static", "static", "rust"): _BINARY_NON_PIE,
# Native linkable shared object
("library", False, "shared", "any", "c++"): _NATIVE_LINKABLE_SHARED_OBJECT,
("library", False, "shared", "shared", "c++"): _NATIVE_LINKABLE_SHARED_OBJECT,
("library", False, "static", "shared", "c++"): _NATIVE_LINKABLE_SHARED_OBJECT,
("library", False, "static_pic", "shared", "c++"): _NATIVE_LINKABLE_SHARED_OBJECT,
# Rust dylib shared object
("library", False, "shared", "any", "rust"): _RUST_DYLIB_SHARED,
("library", False, "shared", "shared", "rust"): _RUST_DYLIB_SHARED,
("library", False, "static", "shared", "rust"): _RUST_DYLIB_SHARED,
("library", False, "static_pic", "shared", "rust"): _RUST_DYLIB_SHARED,
# Rust proc-macro
("library", True, "shared", "any", "rust"): _RUST_PROC_MACRO,
("library", True, "shared", "shared", "rust"): _RUST_PROC_MACRO,
("library", True, "shared", "static", "rust"): _RUST_PROC_MACRO,
("library", True, "static", "any", "rust"): _RUST_PROC_MACRO,
("library", True, "static", "shared", "rust"): _RUST_PROC_MACRO,
("library", True, "static", "static", "rust"): _RUST_PROC_MACRO,
("library", True, "static_pic", "any", "rust"): _RUST_PROC_MACRO,
("library", True, "static_pic", "shared", "rust"): _RUST_PROC_MACRO,
("library", True, "static_pic", "static", "rust"): _RUST_PROC_MACRO,
# Rust static_pic library
("library", False, "shared", "static", "rust"): _RUST_STATIC_PIC_LIBRARY,
("library", False, "static_pic", "any", "rust"): _RUST_STATIC_PIC_LIBRARY,
("library", False, "static_pic", "static", "rust"): _RUST_STATIC_PIC_LIBRARY,
# Rust static (non-pic) library
("library", False, "static", "any", "rust"): _RUST_STATIC_NON_PIC_LIBRARY,
("library", False, "static", "static", "rust"): _RUST_STATIC_NON_PIC_LIBRARY,
# Native linkable static_pic
("library", False, "shared", "static", "c++"): _NATIVE_LINKABLE_STATIC_PIC,
("library", False, "static_pic", "any", "c++"): _NATIVE_LINKABLE_STATIC_PIC,
("library", False, "static_pic", "static", "c++"): _NATIVE_LINKABLE_STATIC_PIC,
# Native linkable static non-pic
("library", False, "static", "any", "c++"): _NATIVE_LINKABLE_STATIC_NON_PIC,
("library", False, "static", "static", "c++"): _NATIVE_LINKABLE_STATIC_NON_PIC,
}
# Compute crate type, relocation model and name mapping given what rule we're building,
# whether its a proc-macro, linkage information and language.
def build_params(
rule: RuleType.type,
proc_macro: bool.type,
link_style: LinkStyle.type,
preferred_linkage: Linkage.type,
lang: LinkageLang.type,
linker_type: str.type) -> BuildParams.type:
input = (rule.value, proc_macro, link_style.value, preferred_linkage.value, lang.value)
expect(
input in _INPUTS,
"missing case for rule_type={} proc_macro={} link_style={} preferred_linkage={} lang={}",
rule,
proc_macro,
link_style,
preferred_linkage,
lang,
)
build_kind_key = _INPUTS[input]
(flags, platform_to_affix) = _BUILD_PARAMS[build_kind_key]
(prefix, suffix) = platform_to_affix(linker_type)
return BuildParams(
crate_type = flags.crate_type,
reloc_model = flags.reloc_model,
dep_link_style = flags.dep_link_style,
prefix = prefix,
suffix = suffix,
)

View file

@ -0,0 +1,55 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load(":rust_toolchain.bzl", "ctx_toolchain_info")
# Inputs to the fail filter
RustFailureFilter = provider(fields = [
# Build status json
"buildstatus",
# Required files
"required",
# stderr
"stderr",
])
# This creates an action which takes a buildstatus json artifact as an input, and a list of other
# artifacts. If all those artifacts are present in the buildstatus as successfully generated, then
# the action will succeed with those artifacts as outputs. Otherwise it fails.
# Either way it streams whatever stderr content there is to stream.
def failure_filter(
ctx: "context",
prefix: str.type,
predecl_out: ["artifact", None],
failprov: "RustFailureFilter",
short_cmd: str.type) -> "artifact":
failure_filter_action = ctx_toolchain_info(ctx).failure_filter_action
buildstatus = failprov.buildstatus
required = failprov.required
stderr = failprov.stderr
if predecl_out:
output = predecl_out
else:
output = ctx.actions.declare_output("out/" + required.short_path)
cmd = cmd_args(
failure_filter_action,
"--stderr",
stderr,
"--required-file",
required.short_path,
required,
output.as_output(),
"--build-status",
buildstatus,
)
ctx.actions.run(cmd, category = "failure_filter", identifier = "{} {}".format(prefix, short_cmd))
return output

View file

@ -0,0 +1,165 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
# Implementation of the Rust build rules.
load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxPlatformInfo")
load(
"@prelude//linking:link_info.bzl",
"LinkStyle",
"MergedLinkInfo",
"merge_link_infos",
)
load(
"@prelude//linking:shared_libraries.bzl",
"SharedLibraryInfo",
)
load("@prelude//utils:platform_flavors_util.bzl", "by_platform")
load("@prelude//utils:utils.bzl", "flatten")
# Override dylib crates to static_pic, so that Rust code is always
# statically linked.
# In v1 we always linked Rust deps statically, even for "shared" link style
# That shouldn't be necessary, but fully shared needs some more debugging,
# so default to v1 behaviour. (Should be controlled with the `rust.force_rlib` option)
FORCE_RLIB = True
# Output of a Rust compilation
RustLinkInfo = provider(fields = [
# crate - crate name
"crate",
# styles - information about each LinkStyle as RustLinkStyleInfo
# {LinkStyle: RustLinkStyleInfo}
"styles",
# Propagate non-rust native linkable dependencies through rust libraries.
"non_rust_exported_link_deps",
# Propagate non-rust native linkable info through rust libraries.
"non_rust_link_info",
# Propagate non-rust shared libraries through rust libraries.
"non_rust_shared_libs",
])
# Information which is keyed on link_style
RustLinkStyleInfo = record(
# Path to library or binary
rlib = field("artifact"),
# Transitive dependencies which are relevant to consumer
# This is a dict from artifact to None (we don't have sets)
transitive_deps = field({"artifact": None}),
# Path for library metadata (used for check or pipelining)
rmeta = field("artifact"),
# Transitive rmeta deps
transitive_rmeta_deps = field({"artifact": None}),
)
def style_info(info: RustLinkInfo.type, link_style: LinkStyle.type) -> RustLinkStyleInfo.type:
if FORCE_RLIB and link_style == LinkStyle("shared"):
link_style = LinkStyle("static_pic")
return info.styles[link_style]
def cxx_by_platform(ctx: "context", xs: [(str.type, "_a")]) -> "_a":
platform = ctx.attrs._cxx_toolchain[CxxPlatformInfo].name
return flatten(by_platform([platform], xs))
# A Rust dependency
RustDependency = record(
# The actual dependency
dep = field("dependency"),
# The local name, if any (for `named_deps`)
name = field([None, str.type]),
# Any flags for the dependency (`flagged_deps`), which are passed on to rustc.
flags = field([str.type]),
)
# Returns all first-order dependencies, resolving the ones from "platform_deps"
def resolve_deps(ctx: "context") -> [RustDependency.type]:
return [
RustDependency(name = name, dep = dep, flags = flags)
# The `getattr`s are needed for when we're operating on
# `prebuilt_rust_library` rules, which don't have those attrs.
for name, dep, flags in [(None, dep, []) for dep in ctx.attrs.deps + cxx_by_platform(ctx, ctx.attrs.platform_deps)] +
[(name, dep, []) for name, dep in getattr(ctx.attrs, "named_deps", {}).items()] +
[(None, dep, flags) for dep, flags in getattr(ctx.attrs, "flagged_deps", []) +
cxx_by_platform(ctx, getattr(ctx.attrs, "platform_flagged_deps", []))]
]
# Returns native link dependencies.
def _non_rust_link_deps(ctx: "context") -> ["dependency"]:
"""
Return all first-order native linkable dependencies of all transitive Rust
libraries.
This emulates v1's graph walk, where it traverses through Rust libraries
looking for non-Rust native link infos (and terminating the search there).
"""
first_order_deps = [dep.dep for dep in resolve_deps(ctx)]
return [
d
for d in first_order_deps
if RustLinkInfo not in d and MergedLinkInfo in d
]
# Returns native link dependencies.
def _non_rust_link_infos(ctx: "context") -> ["MergedLinkInfo"]:
"""
Return all first-order native link infos of all transitive Rust libraries.
This emulates v1's graph walk, where it traverses through Rust libraries
looking for non-Rust native link infos (and terminating the search there).
MergedLinkInfo is a mapping from link style to all the transitive deps
rolled up in a tset.
"""
return [d[MergedLinkInfo] for d in _non_rust_link_deps(ctx)]
# Returns native link dependencies.
def _non_rust_shared_lib_infos(ctx: "context") -> ["SharedLibraryInfo"]:
"""
Return all transitive shared libraries for non-Rust native linkabes.
This emulates v1's graph walk, where it traverses through -- and ignores --
Rust libraries to collect all transitive shared libraries.
"""
first_order_deps = [dep.dep for dep in resolve_deps(ctx)]
return [
d[SharedLibraryInfo]
for d in first_order_deps
if RustLinkInfo not in d and SharedLibraryInfo in d
]
# Returns native link dependencies.
def _rust_link_infos(ctx: "context") -> ["RustLinkInfo"]:
first_order_deps = resolve_deps(ctx)
return filter(None, [d.dep.get(RustLinkInfo) for d in first_order_deps])
def normalize_crate(label: str.type) -> str.type:
return label.replace("-", "_")
def inherited_non_rust_exported_link_deps(ctx: "context") -> ["dependency"]:
deps = {}
for dep in _non_rust_link_deps(ctx):
deps[dep.label] = dep
for info in _rust_link_infos(ctx):
for dep in info.non_rust_exported_link_deps:
deps[dep.label] = dep
return deps.values()
def inherited_non_rust_link_info(ctx: "context") -> "MergedLinkInfo":
infos = []
infos.extend(_non_rust_link_infos(ctx))
infos.extend([d.non_rust_link_info for d in _rust_link_infos(ctx)])
return merge_link_infos(ctx, infos)
def inherited_non_rust_shared_libs(ctx: "context") -> ["SharedLibraryInfo"]:
infos = []
infos.extend(_non_rust_shared_lib_infos(ctx))
infos.extend([d.non_rust_shared_libs for d in _rust_link_infos(ctx)])
return infos
def attr_crate(ctx: "context") -> str.type:
return ctx.attrs.crate or normalize_crate(ctx.label.name)

View file

@ -0,0 +1,33 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//:paths.bzl", "paths")
load("@prelude//utils:utils.bzl", "expect", "from_named_set")
def rust_attr_resources(ctx: "context") -> {str.type: ("artifact", ["_arglike"])}:
"""
Return the resources provided by this rule, as a map of resource name to
a tuple of the resource artifact and any "other" outputs exposed by it.
"""
resources = {}
for name, resource in from_named_set(ctx.attrs.resources).items():
if type(resource) == "artifact":
other = []
else:
info = resource[DefaultInfo]
expect(
len(info.default_outputs) == 1,
"expected exactly one default output from {} ({})"
.format(resource, info.default_outputs),
)
[resource] = info.default_outputs
other = info.other_outputs
resources[paths.join(ctx.label.package, name)] = (resource, other)
return resources

View file

@ -0,0 +1,238 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load(
"@prelude//:resources.bzl",
"create_resource_db",
"gather_resources",
)
load("@prelude//cxx:cxx_library_utility.bzl", "cxx_attr_deps")
load("@prelude//cxx:cxx_link_utility.bzl", "executable_shared_lib_arguments")
load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo")
load(
"@prelude//linking:link_info.bzl",
"LinkStyle",
"Linkage",
)
load(
"@prelude//linking:shared_libraries.bzl",
"merge_shared_libraries",
"traverse_shared_library_info",
)
load(
"@prelude//tests:re_utils.bzl",
"get_re_executor_from_props",
)
load("@prelude//utils:utils.bzl", "flatten_dict")
load("@prelude//test/inject_test_run_info.bzl", "inject_test_run_info")
load(
":build.bzl",
"compile_context",
"generate_rustdoc",
"rust_compile",
"rust_compile_multi",
)
load(
":build_params.bzl",
"Emit",
"LinkageLang",
"RuleType",
"build_params",
"output_filename",
)
load(
":link_info.bzl",
"attr_crate",
"inherited_non_rust_shared_libs",
)
load(":resources.bzl", "rust_attr_resources")
load(":rust_toolchain.bzl", "ctx_toolchain_info")
def _rust_binary_common(
ctx: "context",
default_roots: [str.type],
extra_flags: [str.type]) -> ([[DefaultInfo.type, RunInfo.type]], "cmd_args"):
toolchain_info = ctx_toolchain_info(ctx)
crate = attr_crate(ctx)
styles = {}
style_param = {} # style -> param
specified_link_style = LinkStyle(ctx.attrs.link_style or "static_pic")
compile_ctx = compile_context(ctx)
linker_type = ctx.attrs._cxx_toolchain[CxxToolchainInfo].linker_info.type
resources = flatten_dict(gather_resources(
label = ctx.label,
resources = rust_attr_resources(ctx),
deps = cxx_attr_deps(ctx),
).values())
for link_style in LinkStyle:
params = build_params(
rule = RuleType("binary"),
proc_macro = False,
link_style = link_style,
preferred_linkage = Linkage("any"),
lang = LinkageLang("rust"),
linker_type = linker_type,
)
style_param[link_style] = params
name = link_style.value + "/" + output_filename(crate, Emit("link"), params)
output = ctx.actions.declare_output(name)
# Gather and setup symlink tree of transitive shared library deps.
shared_libs = {}
# As per v1, we only setup a shared library symlink tree for the shared
# link style.
# XXX need link tree for dylib crates
if link_style == LinkStyle("shared"):
shlib_info = merge_shared_libraries(
ctx.actions,
deps = inherited_non_rust_shared_libs(ctx),
)
for soname, shared_lib in traverse_shared_library_info(shlib_info).items():
shared_libs[soname] = shared_lib.lib
extra_link_args, runtime_files, _ = executable_shared_lib_arguments(
ctx.actions,
ctx.attrs._cxx_toolchain[CxxToolchainInfo],
output,
shared_libs,
)
extra_flags = toolchain_info.rustc_binary_flags + (extra_flags or [])
# Compile rust binary.
link, meta = rust_compile_multi(
ctx = ctx,
compile_ctx = compile_ctx,
emits = [Emit("link"), Emit("metadata")],
crate = crate,
params = params,
link_style = link_style,
default_roots = default_roots,
extra_link_args = extra_link_args,
predeclared_outputs = {Emit("link"): output},
extra_flags = extra_flags,
is_binary = True,
)
args = cmd_args(link.outputs[Emit("link")]).hidden(runtime_files)
extra_targets = [("check", meta.outputs[Emit("metadata")])] + meta.diag.items()
# If we have some resources, write it to the resources JSON file and add
# it and all resources to "runtime_files" so that we make to materialize
# them with the final binary.
if resources:
resources_hidden = [create_resource_db(
ctx = ctx,
name = name + ".resources.json",
binary = output,
resources = resources,
)]
for resource, other in resources.values():
resources_hidden.append(resource)
resources_hidden.extend(other)
args.hidden(resources_hidden)
runtime_files.extend(resources_hidden)
styles[link_style] = (link.outputs[Emit("link")], args, extra_targets, runtime_files)
expand = rust_compile(
ctx = ctx,
compile_ctx = compile_ctx,
emit = Emit("expand"),
crate = crate,
params = style_param[LinkStyle("static_pic")],
link_style = LinkStyle("static_pic"),
default_roots = default_roots,
extra_flags = extra_flags,
)
save_analysis = rust_compile(
ctx = ctx,
compile_ctx = compile_ctx,
emit = Emit("save-analysis"),
crate = crate,
params = style_param[LinkStyle("static_pic")],
link_style = LinkStyle("static_pic"),
default_roots = default_roots,
extra_flags = extra_flags,
)
extra_targets += [
("doc", generate_rustdoc(
ctx = ctx,
compile_ctx = compile_ctx,
crate = crate,
params = style_param[LinkStyle("static_pic")],
default_roots = default_roots,
document_private_items = True,
)),
("expand", expand.outputs[Emit("expand")]),
("save-analysis", save_analysis.outputs[Emit("save-analysis")]),
("sources", compile_ctx.symlinked_srcs),
]
sub_targets = {k: [DefaultInfo(default_outputs = [v])] for k, v in extra_targets}
for (k, (sub_link, sub_args, _sub_extra, sub_runtime_files)) in styles.items():
sub_targets[k.value] = [
DefaultInfo(
default_outputs = [sub_link],
other_outputs = sub_runtime_files,
# Check/save-analysis for each link style?
# sub_targets = { k: [DefaultInfo(default_outputs = [v])] for k, v in sub_extra }
),
RunInfo(args = sub_args),
]
(link, args, extra_targets, runtime_files) = styles[specified_link_style]
providers = [
DefaultInfo(
default_outputs = [link],
other_outputs = runtime_files,
sub_targets = sub_targets,
),
]
return (providers, args)
def rust_binary_impl(ctx: "context") -> [[DefaultInfo.type, RunInfo.type]]:
providers, args = _rust_binary_common(ctx, ["main.rs"], [])
return providers + [RunInfo(args = args)]
def rust_test_impl(ctx: "context") -> [[DefaultInfo.type, RunInfo.type, ExternalRunnerTestInfo.type]]:
toolchain_info = ctx_toolchain_info(ctx)
extra_flags = toolchain_info.rustc_test_flags or []
if ctx.attrs.framework:
extra_flags += ["--test"]
providers, args = _rust_binary_common(ctx, ["main.rs", "lib.rs"], extra_flags)
# Setup a RE executor based on the `remote_execution` param.
re_executor = get_re_executor_from_props(ctx.attrs.remote_execution)
return inject_test_run_info(
ctx,
ExternalRunnerTestInfo(
type = "rust",
command = [args],
env = ctx.attrs.env,
labels = ctx.attrs.labels,
contacts = ctx.attrs.contacts,
default_executor = re_executor,
# We implicitly make this test via the project root, instead of
# the cell root (e.g. fbcode root).
run_from_project_root = re_executor != None,
use_project_relative_paths = re_executor != None,
),
) + providers

View file

@ -0,0 +1,564 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
load("@prelude//:resources.bzl", "ResourceInfo", "gather_resources")
load("@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info")
load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo")
load(
"@prelude//cxx:linker.bzl",
"get_default_shared_library_name",
)
load(
"@prelude//cxx:omnibus.bzl",
"create_linkable_root",
"is_known_omnibus_root",
)
load(
"@prelude//linking:link_groups.bzl",
"merge_link_group_lib_info",
)
load(
"@prelude//linking:link_info.bzl",
"Archive",
"ArchiveLinkable",
"LinkInfo",
"LinkInfos",
"LinkStyle",
"Linkage",
"LinkedObject",
"MergedLinkInfo",
"SharedLibLinkable",
"create_merged_link_info",
"get_actual_link_style",
"merge_link_infos",
)
load(
"@prelude//linking:linkable_graph.bzl",
"AnnotatedLinkableRoot",
"create_linkable_graph",
"create_linkable_graph_node",
"create_linkable_node",
)
load(
"@prelude//linking:shared_libraries.bzl",
"create_shared_libraries",
"merge_shared_libraries",
)
load(
":build.bzl",
"CompileContext", # @unused Used as a type
"RustcOutput", # @unused Used as a type
"compile_context",
"generate_rustdoc",
"rust_compile",
"rust_compile_multi",
)
load(
":build_params.bzl",
"BuildParams", # @unused Used as a type
"Emit",
"LinkageLang",
"RuleType",
"build_params",
"crate_type_transitive_deps",
)
load(
":link_info.bzl",
"RustLinkInfo",
"RustLinkStyleInfo",
"attr_crate",
"inherited_non_rust_exported_link_deps",
"inherited_non_rust_link_info",
"inherited_non_rust_shared_libs",
"resolve_deps",
"style_info",
)
load(":resources.bzl", "rust_attr_resources")
def prebuilt_rust_library_impl(ctx: "context") -> ["provider"]:
providers = []
# Default output.
providers.append(
DefaultInfo(
default_outputs = [ctx.attrs.rlib],
),
)
# Rust link provider.
crate = attr_crate(ctx)
styles = {}
for style in LinkStyle:
tdeps, tmetadeps = _compute_transitive_deps(ctx, style)
styles[style] = RustLinkStyleInfo(
rlib = ctx.attrs.rlib,
transitive_deps = tdeps,
rmeta = ctx.attrs.rlib,
transitive_rmeta_deps = tmetadeps,
)
providers.append(
RustLinkInfo(
crate = crate,
styles = styles,
non_rust_exported_link_deps = inherited_non_rust_exported_link_deps(ctx),
non_rust_link_info = inherited_non_rust_link_info(ctx),
non_rust_shared_libs = merge_shared_libraries(
ctx.actions,
deps = inherited_non_rust_shared_libs(ctx),
),
),
)
# Native link provier.
link = LinkInfo(
linkables = [ArchiveLinkable(
archive = Archive(artifact = ctx.attrs.rlib),
linker_type = "unknown",
)],
)
providers.append(
create_merged_link_info(
ctx,
{link_style: LinkInfos(default = link) for link_style in LinkStyle},
exported_deps = [d[MergedLinkInfo] for d in ctx.attrs.deps],
# TODO(agallagher): This matches v1 behavior, but some of these libs
# have prebuilt DSOs which might be usuable.
preferred_linkage = Linkage("static"),
),
)
# Native link graph setup.
linkable_graph = create_linkable_graph(
ctx,
node = create_linkable_graph_node(
ctx,
linkable_node = create_linkable_node(
ctx = ctx,
preferred_linkage = Linkage("static"),
exported_deps = ctx.attrs.deps,
link_infos = {link_style: LinkInfos(default = link) for link_style in LinkStyle},
),
),
deps = ctx.attrs.deps,
)
providers.append(linkable_graph)
providers.append(merge_link_group_lib_info(deps = ctx.attrs.deps))
return providers
def rust_library_impl(ctx: "context") -> ["provider"]:
crate = attr_crate(ctx)
compile_ctx = compile_context(ctx)
# Multiple styles and language linkages could generate the same crate types
# (eg procmacro or using preferred_linkage), so we need to see how many
# distinct kinds of build we actually need to deal with.
param_lang, lang_style_param = _build_params_for_styles(ctx)
artifacts = _build_library_artifacts(ctx, compile_ctx, param_lang)
rust_param_artifact = {}
native_param_artifact = {}
check_artifacts = None
for (lang, params), (link, meta) in artifacts.items():
if lang == LinkageLang("rust"):
# Grab the check output for all kinds of builds to use
# in the check subtarget. The link style doesn't matter
# so pick the first.
if check_artifacts == None:
check_artifacts = {"check": meta.outputs[Emit("metadata")]}
check_artifacts.update(meta.diag)
rust_param_artifact[params] = _handle_rust_artifact(ctx, params, link, meta)
elif lang == LinkageLang("c++"):
native_param_artifact[params] = link.outputs[Emit("link")]
else:
fail("Unhandled lang {}".format(lang))
rustdoc = generate_rustdoc(
ctx = ctx,
compile_ctx = compile_ctx,
crate = crate,
params = lang_style_param[(LinkageLang("rust"), LinkStyle("static_pic"))],
default_roots = ["lib.rs"],
document_private_items = False,
)
expand = rust_compile(
ctx = ctx,
compile_ctx = compile_ctx,
emit = Emit("expand"),
crate = crate,
params = lang_style_param[(LinkageLang("rust"), LinkStyle("static_pic"))],
link_style = LinkStyle("static_pic"),
default_roots = ["lib.rs"],
)
save_analysis = rust_compile(
ctx = ctx,
compile_ctx = compile_ctx,
emit = Emit("save-analysis"),
crate = crate,
params = lang_style_param[(LinkageLang("rust"), LinkStyle("static_pic"))],
link_style = LinkStyle("static_pic"),
default_roots = ["lib.rs"],
)
providers = []
providers += _default_providers(
lang_style_param = lang_style_param,
param_artifact = rust_param_artifact,
rustdoc = rustdoc,
check_artifacts = check_artifacts,
expand = expand.outputs[Emit("expand")],
save_analysis = save_analysis.outputs[Emit("save-analysis")],
sources = compile_ctx.symlinked_srcs,
)
providers += _rust_providers(
ctx = ctx,
lang_style_param = lang_style_param,
param_artifact = rust_param_artifact,
)
providers += _native_providers(
ctx = ctx,
lang_style_param = lang_style_param,
param_artifact = native_param_artifact,
)
providers.append(ResourceInfo(resources = gather_resources(
label = ctx.label,
resources = rust_attr_resources(ctx),
deps = [dep.dep for dep in resolve_deps(ctx)],
)))
return providers
def _build_params_for_styles(ctx: "context") -> (
{BuildParams.type: [LinkageLang.type]},
{(LinkageLang.type, LinkStyle.type): BuildParams.type},
):
"""
For a given rule, return two things:
- a set of build params we need for all combinations of linkage langages and
link styles, mapped to which languages they apply to
- a mapping from linkage language and link style to build params
This is needed because different combinations may end up using the same set
of params, and we want to minimize invocations to rustc, both for
efficiency's sake, but also to avoid duplicate objects being linked
together.
"""
param_lang = {} # param -> linkage_lang
style_param = {} # (linkage_lang, link_style) -> param
# Styles+lang linkage to params
for linkage_lang in LinkageLang:
# Skip proc_macro + c++ combination
if ctx.attrs.proc_macro and linkage_lang == LinkageLang("c++"):
continue
linker_type = ctx.attrs._cxx_toolchain[CxxToolchainInfo].linker_info.type
for link_style in LinkStyle:
params = build_params(
rule = RuleType("library"),
proc_macro = ctx.attrs.proc_macro,
link_style = link_style,
preferred_linkage = Linkage(ctx.attrs.preferred_linkage),
lang = linkage_lang,
linker_type = linker_type,
)
if params not in param_lang:
param_lang[params] = []
param_lang[params] = param_lang[params] + [linkage_lang]
style_param[(linkage_lang, link_style)] = params
return (param_lang, style_param)
def _build_library_artifacts(
ctx: "context",
compile_ctx: CompileContext.type,
param_lang: {BuildParams.type: [LinkageLang.type]}) -> {
(LinkageLang.type, BuildParams.type): (RustcOutput.type, RustcOutput.type),
}:
"""
Generate the actual actions to build various output artifacts. Given the set
parameters we need, return a mapping to the linkable and metadata artifacts.
"""
crate = attr_crate(ctx)
param_artifact = {}
for params, langs in param_lang.items():
link_style = params.dep_link_style
# Separate actions for each emit type
#
# In principle we don't really need metadata for C++-only artifacts, but I don't think it hurts
link, meta = rust_compile_multi(
ctx = ctx,
compile_ctx = compile_ctx,
emits = [Emit("link"), Emit("metadata")],
crate = crate,
params = params,
link_style = link_style,
default_roots = ["lib.rs"],
)
for lang in langs:
param_artifact[(lang, params)] = (link, meta)
return param_artifact
def _handle_rust_artifact(
ctx: "context",
params: BuildParams.type,
link: RustcOutput.type,
meta: RustcOutput.type) -> RustLinkStyleInfo.type:
"""
Return the RustLinkInfo for a given set of artifacts. The main consideration
is computing the right set of dependencies.
"""
link_style = params.dep_link_style
# If we're a crate where our consumers should care about transitive deps,
# then compute them (specifically, not proc-macro).
tdeps, tmetadeps = ({}, {})
if crate_type_transitive_deps(params.crate_type):
tdeps, tmetadeps = _compute_transitive_deps(ctx, link_style)
if not ctx.attrs.proc_macro:
return RustLinkStyleInfo(
rlib = link.outputs[Emit("link")],
transitive_deps = tdeps,
rmeta = meta.outputs[Emit("metadata")],
transitive_rmeta_deps = tmetadeps,
)
else:
# Proc macro deps are always the real thing
return RustLinkStyleInfo(
rlib = link.outputs[Emit("link")],
transitive_deps = tdeps,
rmeta = link.outputs[Emit("link")],
transitive_rmeta_deps = tdeps,
)
def _default_providers(
lang_style_param: {(LinkageLang.type, LinkStyle.type): BuildParams.type},
param_artifact: {BuildParams.type: RustLinkStyleInfo.type},
rustdoc: "artifact",
check_artifacts: {str.type: "artifact"},
expand: "artifact",
save_analysis: "artifact",
sources: "artifact") -> ["provider"]:
# Outputs indexed by LinkStyle
style_info = {
link_style: param_artifact[lang_style_param[(LinkageLang("rust"), link_style)]]
for link_style in LinkStyle
}
# Add provider for default output, and for each link-style...
targets = {k.value: v.rlib for (k, v) in style_info.items()}
targets.update(check_artifacts)
targets["doc"] = rustdoc
targets["sources"] = sources
targets["expand"] = expand
targets["save-analysis"] = save_analysis
providers = []
providers.append(
DefaultInfo(
default_outputs = [check_artifacts["check"]],
sub_targets = {
k: [DefaultInfo(default_outputs = [v])]
for (k, v) in targets.items()
},
),
)
return providers
def _rust_providers(
ctx: "context",
lang_style_param: {(LinkageLang.type, LinkStyle.type): BuildParams.type},
param_artifact: {BuildParams.type: RustLinkStyleInfo.type}) -> ["provider"]:
"""
Return the set of providers for Rust linkage.
"""
crate = attr_crate(ctx)
style_info = {
link_style: param_artifact[lang_style_param[(LinkageLang("rust"), link_style)]]
for link_style in LinkStyle
}
# Inherited link input and shared libraries. As in v1, this only includes
# non-Rust rules, found by walking through -- and ignoring -- Rust libraries
# to find non-Rust native linkables and libraries.
if not ctx.attrs.proc_macro:
inherited_non_rust_link_deps = inherited_non_rust_exported_link_deps(ctx)
inherited_non_rust_link = inherited_non_rust_link_info(ctx)
inherited_non_rust_shlibs = inherited_non_rust_shared_libs(ctx)
else:
# proc-macros are just used by the compiler and shouldn't propagate
# their native deps to the link line of the target.
inherited_non_rust_link = merge_link_infos(ctx, [])
inherited_non_rust_shlibs = []
inherited_non_rust_link_deps = []
providers = []
# Create rust library provider.
providers.append(RustLinkInfo(
crate = crate,
styles = style_info,
non_rust_link_info = inherited_non_rust_link,
non_rust_exported_link_deps = inherited_non_rust_link_deps,
non_rust_shared_libs = merge_shared_libraries(
ctx.actions,
deps = inherited_non_rust_shlibs,
),
))
return providers
def _native_providers(
ctx: "context",
lang_style_param: {(LinkageLang.type, LinkStyle.type): BuildParams.type},
param_artifact: {BuildParams.type: "artifact"}) -> ["provider"]:
"""
Return the set of providers needed to link Rust as a dependency for native
(ie C/C++) code, along with relevant dependencies.
TODO: This currently assumes `staticlib`/`cdylib` behaviour, where all
dependencies are bundled into the Rust crate itself. We need to break out of
this mode of operation.
"""
inherited_non_rust_link_deps = inherited_non_rust_exported_link_deps(ctx)
inherited_non_rust_link = inherited_non_rust_link_info(ctx)
inherited_non_rust_shlibs = inherited_non_rust_shared_libs(ctx)
linker_info = get_cxx_toolchain_info(ctx).linker_info
linker_type = linker_info.type
providers = []
if ctx.attrs.proc_macro:
# Proc-macros never have a native form
return providers
libraries = {
link_style: param_artifact[lang_style_param[(LinkageLang("c++"), link_style)]]
for link_style in LinkStyle
}
link_infos = {}
for link_style, arg in libraries.items():
if link_style in [LinkStyle("static"), LinkStyle("static_pic")]:
link_infos[link_style] = LinkInfos(default = LinkInfo(linkables = [ArchiveLinkable(archive = Archive(artifact = arg), linker_type = linker_type)]))
else:
link_infos[link_style] = LinkInfos(default = LinkInfo(linkables = [SharedLibLinkable(lib = arg)]))
preferred_linkage = Linkage(ctx.attrs.preferred_linkage)
# Native link provider.
providers.append(create_merged_link_info(
ctx,
link_infos,
exported_deps = [inherited_non_rust_link],
preferred_linkage = preferred_linkage,
))
solibs = {}
# Add the shared library to the list of shared libs.
linker_info = ctx.attrs._cxx_toolchain[CxxToolchainInfo].linker_info
shlib_name = get_default_shared_library_name(linker_info, ctx.label)
# Only add a shared library if we generated one.
if get_actual_link_style(LinkStyle("shared"), preferred_linkage) == LinkStyle("shared"):
solibs[shlib_name] = LinkedObject(output = libraries[LinkStyle("shared")])
# Native shared library provider.
providers.append(merge_shared_libraries(
ctx.actions,
create_shared_libraries(ctx, solibs),
inherited_non_rust_shlibs,
))
# Create, augment and provide the linkable graph.
deps_linkable_graph = create_linkable_graph(
ctx,
deps = inherited_non_rust_link_deps,
)
# Omnibus root provider.
known_omnibus_root = is_known_omnibus_root(ctx)
linkable_root = create_linkable_root(
ctx,
name = get_default_shared_library_name(linker_info, ctx.label),
link_infos = LinkInfos(
default = LinkInfo(
linkables = [ArchiveLinkable(archive = Archive(artifact = libraries[LinkStyle("static_pic")]), linker_type = linker_type, link_whole = True)],
),
),
deps = inherited_non_rust_link_deps,
graph = deps_linkable_graph,
create_shared_root = known_omnibus_root,
)
providers.append(linkable_root)
roots = {}
if known_omnibus_root:
roots[ctx.label] = AnnotatedLinkableRoot(root = linkable_root)
linkable_graph = create_linkable_graph(
ctx,
node = create_linkable_graph_node(
ctx,
linkable_node = create_linkable_node(
ctx = ctx,
preferred_linkage = preferred_linkage,
exported_deps = inherited_non_rust_link_deps,
link_infos = link_infos,
shared_libs = solibs,
),
roots = roots,
),
children = [deps_linkable_graph],
)
providers.append(linkable_graph)
providers.append(merge_link_group_lib_info(deps = inherited_non_rust_link_deps))
return providers
# Compute transitive deps. Caller decides whether this is necessary.
def _compute_transitive_deps(ctx: "context", link_style: LinkStyle.type) -> ({"artifact": None}, {"artifact": None}):
transitive_deps = {}
transitive_rmeta_deps = {}
for dep in resolve_deps(ctx):
info = dep.dep.get(RustLinkInfo)
if info == None:
continue
style = style_info(info, link_style)
transitive_deps[style.rlib] = None
transitive_deps.update(style.transitive_deps)
transitive_rmeta_deps[style.rmeta] = None
transitive_rmeta_deps.update(style.transitive_rmeta_deps)
return (transitive_deps, transitive_rmeta_deps)

View file

@ -0,0 +1,74 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
# @unsorted-dict-items
_rust_toolchain_attrs = {
# Report unused dependencies
"report_unused_deps": False,
# Rustc target triple to use
# https://doc.rust-lang.org/rustc/platform-support.html
"rustc_target_triple": None,
# Baseline compiler config
"rustc_flags": [],
# Extra flags when building binaries
"rustc_binary_flags": [],
# Extra flags for doing check builds
"rustc_check_flags": [],
# Extra flags for doing building tests
"rustc_test_flags": [],
# Extra flags for rustdoc invocations
"rustdoc_flags": [],
# Use rmeta for lib->lib dependencies, and only block
# linking on rlib crates. The hope is that rmeta builds
# are quick and this increases effective parallelism.
# Currently blocked by https://github.com/rust-lang/rust/issues/85401
"pipelined": False,
# Filter out failures when we just need diagnostics. That is,
# a rule which fails with a compilation failure will report
# success as an RE action, but a "failure filter" action will
# report the failure if some downstream action needs one of the
# artifacts. If all you need is diagnostics, then it will report
# success. This doubles the number of actions, so it should only
# be explicitly enabled when needed.
"failure_filter": False,
# The Rust compiler (rustc)
"compiler": None,
# Rust documentation extractor (rustdoc)
"rustdoc": None,
# Clippy (linter) version of the compiler
"clippy_driver": None,
# Wrapper for rustc in actions
"rustc_action": None,
# Failure filter action
"failure_filter_action": None,
# The default edition to use, if not specified.
"default_edition": None,
# Lints
"allow_lints": [],
"deny_lints": [],
"warn_lints": [],
# Prefix (/intern/rustdoc in our case) where fbcode crates' docs are hosted.
# Used for linking types in signatures to their definition in another crate.
"extern_html_root_url_prefix": "",
}
RustToolchainInfo = provider(fields = _rust_toolchain_attrs.keys())
# Stores "platform"/flavor name used to resolve *platform_* arguments
RustPlatformInfo = provider(fields = [
"name",
])
def ctx_toolchain_info(ctx: "context") -> "RustToolchainInfo":
toolchain_info = ctx.attrs._rust_toolchain[RustToolchainInfo]
attrs = dict()
for k, default in _rust_toolchain_attrs.items():
v = getattr(toolchain_info, k)
attrs[k] = default if v == None else v
return RustToolchainInfo(**attrs)

View file

@ -0,0 +1,13 @@
prelude = native
prelude.python_bootstrap_binary(
name = "rustc_action",
main = "rustc_action.py",
visibility = ["PUBLIC"],
)
prelude.python_bootstrap_binary(
name = "failure_filter_action",
main = "failure_filter_action.py",
visibility = ["PUBLIC"],
)

View file

@ -0,0 +1,83 @@
#!/usr/bin/env python3
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
# Implement a "failure filter" - that is, look at the output of a previous
# action and see if it failed with respect to downstream actions which need its
# outputs. This is to allow us to report success if rustc generated the artifact
# we needed (ie diagnostics) even if the compilation itself failed.
import argparse
import json
import os
import shutil
import sys
from typing import IO, List, NamedTuple, Optional, Tuple
class Args(NamedTuple):
build_status: IO[str]
required_file: Optional[List[Tuple[str, str, str]]]
stderr: Optional[IO[str]]
def arg_parse() -> Args:
parser = argparse.ArgumentParser()
parser.add_argument(
"--build-status",
type=argparse.FileType(),
required=True,
)
parser.add_argument(
"--required-file",
action="append",
nargs=3,
metavar=("SHORT", "INPUT", "OUTPUT"),
)
parser.add_argument(
"--stderr",
type=argparse.FileType(),
)
return Args(**vars(parser.parse_args()))
def main() -> int:
args = arg_parse()
if args.stderr:
stderr = args.stderr.read()
sys.stderr.write(stderr)
build_status = json.load(args.build_status)
# Copy all required files to output, and fail with the original exit status
# if any are missing. (Ideally we could just do the copy by referring to the
# same underlying CAS object, which would avoid having to move the actual
# bytes around at all.)
if args.required_file:
for short, inp, out in args.required_file:
if short in build_status["files"]:
try:
# Try a hard link to avoid unnecessary copies
os.link(inp, out)
except OSError:
# Fall back to real copy if that doesn't work
shutil.copy(inp, out)
else:
print(
f"Missing required input file {short} ({inp})",
file=sys.stderr,
)
return build_status["status"]
# If all the required files were present, then success regardless of
# original status.
return 0
sys.exit(main())

View file

@ -0,0 +1,302 @@
#!/usr/bin/env python3
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under both the MIT license found in the
# LICENSE-MIT file in the root directory of this source tree and the Apache
# License, Version 2.0 found in the LICENSE-APACHE file in the root directory
# of this source tree.
# Wrapper for rustc (or similar, like rustdoc). This wrapper does a few pieces
# of post-processing on the json-formatted diagnostics:
# - (preprocessing) resolve env vars referring to paths to absolute paths
# - write the rendered form to a text diagnostic output, and also to stderr
# - annotate unused crate messages with buck target info for downstream tooling,
# and also generated a rendered version
# - generate a build status json when using failure filtering
#
# This is closely coupled to `_rustc_invoke` in `build.bzl`
import argparse
import asyncio
import json
import os
import platform
import shlex
import subprocess
import sys
from pathlib import Path
from typing import Dict, IO, List, NamedTuple, Optional, Tuple
DEBUG = False
def key_value_arg(s: str) -> Tuple[str, str]:
key_value = s.split("=", maxsplit=1)
if len(key_value) == 2:
return (key_value[0], key_value[1])
raise argparse.ArgumentTypeError(f"expected the form `key=value` for `{s}`")
class Args(NamedTuple):
diag_json: Optional[IO[str]]
diag_txt: Optional[IO[str]]
env: Optional[List[Tuple[str, str]]]
path_env: Optional[List[Tuple[str, str]]]
remap_cwd_prefix: Optional[str]
crate_map: Optional[List[Tuple[str, str]]]
buck_target: Optional[str]
failure_filter: Optional[IO[str]]
required_output: Optional[List[Tuple[str, str]]]
rustc: List[str]
def arg_parse() -> Args:
# Command line is <action.py> [args] -- rustc command line
parser = argparse.ArgumentParser(fromfile_prefix_chars="@")
parser.add_argument(
"--diag-json",
type=argparse.FileType("w"),
help="Json-formatted diagnostic output "
"(assumes compiler is invoked with --error-format=json)",
)
parser.add_argument(
"--diag-txt",
type=argparse.FileType("w"),
help="Rendered text diagnostic output (also streamed to stderr)",
)
parser.add_argument(
"--env",
action="append",
type=key_value_arg,
metavar="NAME=VALUE",
help="Set environment",
)
parser.add_argument(
"--path-env",
action="append",
type=key_value_arg,
metavar="NAME=PATH",
help="Set path environment (to be made absolute)",
)
parser.add_argument(
"--remap-cwd-prefix",
help="Remap paths under the current working directory to this path prefix",
)
parser.add_argument(
"--crate-map",
action="append",
type=key_value_arg,
metavar="CRATE=TARGET",
help="Crate name to target map for unused crate diagnostics",
)
parser.add_argument(
"--buck-target",
help="Buck target for crate, used for unused crate diagnostics",
)
parser.add_argument(
"--failure-filter",
type=argparse.FileType(mode="w"),
help="Consider a failure as success so long as we got some usable diagnostics",
metavar="build-status.json",
)
parser.add_argument(
"--required-output",
action="append",
nargs=2,
metavar=("SHORT", "PATH"),
help="Required output path we expect rustc to generate "
"(and filled with a placeholder on a filtered failure)",
)
parser.add_argument(
"rustc",
nargs=argparse.REMAINDER,
type=str,
help="Compiler command line",
)
return Args(**vars(parser.parse_args()))
async def handle_output( # noqa: C901
proc: asyncio.subprocess.Process,
args: Args,
crate_map: Dict[str, str],
) -> bool:
got_error_diag = False
proc_stderr = proc.stderr
assert proc_stderr is not None
while True:
line = await proc_stderr.readline()
if line is None or line == b"":
break
try:
diag = json.loads(line)
except json.JSONDecodeError:
sys.stderr.buffer.write(line + b"\n") # Passthrough
continue
if DEBUG:
print(f"diag={repr(diag)}")
if diag.get("level") == "error":
got_error_diag = True
# Add more information to unused crate warnings
unused_names = diag.get("unused_extern_names", None)
if unused_names:
# Treat error-level unused dep warnings as errors
if diag.get("lint_level") in ("deny", "forbid"):
got_error_diag = True
if args.buck_target:
rendered_unused = []
for name in unused_names:
if name in crate_map:
rendered_unused.append("{}: {}".format(crate_map[name], name))
else:
rendered_unused.append("{}".format(name))
rendered_unused.sort()
rendered_unused = "\n ".join(rendered_unused)
diag["buck_target"] = args.buck_target
diag["rendered"] = (
f"Target `{args.buck_target}` has unused dependencies:\n"
f" {rendered_unused}"
)
diag["unused_deps"] = {
name: crate_map[name] for name in unused_names if name in crate_map
}
# Emit json
if args.diag_json:
args.diag_json.write(json.dumps(diag, separators=(",", ":")) + "\n")
# Emit rendered text version
if "rendered" in diag:
rendered = diag["rendered"] + "\n"
if args.diag_txt:
args.diag_txt.write(rendered)
sys.stderr.write(rendered)
return got_error_diag
async def main() -> int:
args = arg_parse()
# Inherit a very limited initial environment, then add the new things
env = {
k: os.environ[k]
for k in [
"RUSTC_LOG",
"RUST_BACKTRACE",
"PATH",
"PWD",
"HOME",
"TMPDIR",
# Required on Windows
"TEMP",
# TODO(andirauter): Required by RE. Remove them when no longer required T119466023
"EXECUTION_ID",
"SESSION_ID",
"CAS_DAEMON_PORT",
# Required by Dotslash, which is how the Rust toolchain is shipped
# on Mac.
"USER",
"DOTSLASH_CACHE",
]
if k in os.environ
}
if args.env:
env.update(dict(args.env))
if args.path_env:
env.update({k: str(Path(v).resolve()) for k, v in args.path_env})
crate_map = dict(args.crate_map) if args.crate_map else {}
if DEBUG:
print(f"args {repr(args)} env {env} crate_map {crate_map}")
# `tools/build_defs/rustc_plugin_platform.bzl` hardcodes building
# proc-macros for x86-64 because we have no way of knowing what
# architecture the rustc using them is going to eventually run on.
# Since it's possible that we're on Apple Silicon, force rustc
# to run under Rosetta so proc-macros are compatible.
if sys.platform == "darwin" and platform.machine() == "arm64":
rustc_cmd = ["arch", "-x86_64"] + args.rustc
else:
rustc_cmd = args.rustc
if args.remap_cwd_prefix is not None:
rustc_cmd.append(
"--remap-path-prefix={}={}".format(os.getcwd(), args.remap_cwd_prefix)
)
# Kick off the action
proc = await asyncio.create_subprocess_exec(
*rustc_cmd,
env=env,
stdin=subprocess.DEVNULL,
stdout=None, # Inherit
stderr=subprocess.PIPE,
limit=1_000_000,
)
got_error_diag = await handle_output(proc, args, crate_map)
res = await proc.wait()
if DEBUG:
print(
f"res={repr(res)} "
f"got_error_diag={got_error_diag} "
f"args.failure_filter {args.failure_filter}"
)
# If rustc is reporting a silent error, make it loud
if res == 0 and got_error_diag:
res = 1
# Check for death by signal - this is always considered a failure
if res < 0:
cmdline = " ".join(shlex.quote(arg) for arg in args.rustc)
print(
f"Command exited with signal {-res}: command line: {cmdline}",
file=sys.stderr,
)
elif args.failure_filter:
# If failure filtering is enabled, then getting an error diagnostic is also
# considered a success. That is, if rustc exited with an error status, but
# we saw a fatal error diagnostic, we can still report a zero exit status.
# This still means we had an error if someone wanted one of the other output
# artifacts as an input, but the failure filter action will handle that by
# examining the build_status json output.
required_output = args.required_output or []
# We need to create a build status output, including the list of output
# files which were *actuallyq* created. We use the short paths as the
# logical filename rather than the actual full path, since that won't
# mean much to a downstream action.
build_status = {
"status": res,
"files": [short for short, path in required_output if Path(path).exists()],
}
json.dump(build_status, args.failure_filter)
# OK to actually report success, but keep buck happy by making sure all
# the required outputs are present
if got_error_diag and res != 0:
for _short, path in required_output:
path = Path(path)
if not path.exists():
path.touch()
res = 0
return res
sys.exit(asyncio.run(main()))