| //! Implementation of bootstrap, the Rust build system. |
| //! |
| //! This module, and its descendants, are the implementation of the Rust build |
| //! system. Most of this build system is backed by Cargo but the outer layer |
| //! here serves as the ability to orchestrate calling Cargo, sequencing Cargo |
| //! builds, building artifacts like LLVM, etc. The goals of bootstrap are: |
| //! |
| //! * To be an easily understandable, easily extensible, and maintainable build |
| //! system. |
| //! * Leverage standard tools in the Rust ecosystem to build the compiler, aka |
| //! crates.io and Cargo. |
| //! * A standard interface to build across all platforms, including MSVC |
| //! |
| //! ## Further information |
| //! |
| //! More documentation can be found in each respective module below, and you can |
| //! also check out the `src/bootstrap/README.md` file for more information. |
| #![cfg_attr(test, allow(unused))] |
| |
| use std::cell::Cell; |
| use std::collections::{BTreeSet, HashMap, HashSet}; |
| use std::fmt::Display; |
| use std::path::{Path, PathBuf}; |
| use std::sync::OnceLock; |
| use std::time::{Instant, SystemTime}; |
| use std::{env, fs, io, str}; |
| |
| use build_helper::ci::gha; |
| use build_helper::exit; |
| use cc::Tool; |
| use termcolor::{ColorChoice, StandardStream, WriteColor}; |
| use utils::build_stamp::BuildStamp; |
| use utils::channel::GitInfo; |
| use utils::exec::ExecutionContext; |
| |
| use crate::core::builder; |
| use crate::core::builder::Kind; |
| use crate::core::config::{BootstrapOverrideLld, DryRun, LlvmLibunwind, TargetSelection, flags}; |
| use crate::utils::exec::{BootstrapCommand, command}; |
| use crate::utils::helpers::{self, dir_is_empty, exe, libdir, set_file_times, split_debuginfo}; |
| |
| mod core; |
| mod utils; |
| |
| pub use core::builder::PathSet; |
| #[cfg(feature = "tracing")] |
| pub use core::builder::STEP_SPAN_TARGET; |
| pub use core::config::flags::{Flags, Subcommand}; |
| pub use core::config::{ChangeId, Config}; |
| |
| #[cfg(feature = "tracing")] |
| use tracing::{instrument, span}; |
| pub use utils::change_tracker::{ |
| CONFIG_CHANGE_HISTORY, find_recent_config_change_ids, human_readable_changes, |
| }; |
| pub use utils::helpers::{PanicTracker, symlink_dir}; |
| #[cfg(feature = "tracing")] |
| pub use utils::tracing::setup_tracing; |
| |
| use crate::core::build_steps::vendor::VENDOR_DIR; |
| |
| const LLVM_TOOLS: &[&str] = &[ |
| "llvm-cov", // used to generate coverage report |
| "llvm-nm", // used to inspect binaries; it shows symbol names, their sizes and visibility |
| "llvm-objcopy", // used to transform ELFs into binary format which flashing tools consume |
| "llvm-objdump", // used to disassemble programs |
| "llvm-profdata", // used to inspect and merge files generated by profiles |
| "llvm-readobj", // used to get information from ELFs/objects that the other tools don't provide |
| "llvm-size", // used to prints the size of the linker sections of a program |
| "llvm-strip", // used to discard symbols from binary files to reduce their size |
| "llvm-ar", // used for creating and modifying archive files |
| "llvm-as", // used to convert LLVM assembly to LLVM bitcode |
| "llvm-dis", // used to disassemble LLVM bitcode |
| "llvm-link", // Used to link LLVM bitcode |
| "llc", // used to compile LLVM bytecode |
| "opt", // used to optimize LLVM bytecode |
| ]; |
| |
| /// LLD file names for all flavors. |
| const LLD_FILE_NAMES: &[&str] = &["ld.lld", "ld64.lld", "lld-link", "wasm-ld"]; |
| |
| /// Extra `--check-cfg` to add when building the compiler or tools |
| /// (Mode restriction, config name, config values (if any)) |
| #[expect(clippy::type_complexity)] // It's fine for hard-coded list and type is explained above. |
| const EXTRA_CHECK_CFGS: &[(Option<Mode>, &str, Option<&[&'static str]>)] = &[ |
| (Some(Mode::Rustc), "bootstrap", None), |
| (Some(Mode::Codegen), "bootstrap", None), |
| (Some(Mode::ToolRustcPrivate), "bootstrap", None), |
| (Some(Mode::ToolStd), "bootstrap", None), |
| (Some(Mode::ToolRustcPrivate), "rust_analyzer", None), |
| (Some(Mode::ToolStd), "rust_analyzer", None), |
| // Any library specific cfgs like `target_os`, `target_arch` should be put in |
| // priority the `[lints.rust.unexpected_cfgs.check-cfg]` table |
| // in the appropriate `library/{std,alloc,core}/Cargo.toml` |
| ]; |
| |
| /// A structure representing a Rust compiler. |
| /// |
| /// Each compiler has a `stage` that it is associated with and a `host` that |
| /// corresponds to the platform the compiler runs on. This structure is used as |
| /// a parameter to many methods below. |
| #[derive(Eq, PartialOrd, Ord, Clone, Copy, Debug)] |
| pub struct Compiler { |
| stage: u32, |
| host: TargetSelection, |
| /// Indicates whether the compiler was forced to use a specific stage. |
| /// This field is ignored in `Hash` and `PartialEq` implementations as only the `stage` |
| /// and `host` fields are relevant for those. |
| forced_compiler: bool, |
| } |
| |
| impl std::hash::Hash for Compiler { |
| fn hash<H: std::hash::Hasher>(&self, state: &mut H) { |
| self.stage.hash(state); |
| self.host.hash(state); |
| } |
| } |
| |
| impl PartialEq for Compiler { |
| fn eq(&self, other: &Self) -> bool { |
| self.stage == other.stage && self.host == other.host |
| } |
| } |
| |
| /// Represents a codegen backend. |
| #[derive(Debug, Clone, PartialEq, Eq, Hash, Default)] |
| pub enum CodegenBackendKind { |
| #[default] |
| Llvm, |
| Cranelift, |
| Gcc, |
| Custom(String), |
| } |
| |
| impl CodegenBackendKind { |
| /// Name of the codegen backend, as identified in the `compiler` directory |
| /// (`rustc_codegen_<name>`). |
| pub fn name(&self) -> &str { |
| match self { |
| CodegenBackendKind::Llvm => "llvm", |
| CodegenBackendKind::Cranelift => "cranelift", |
| CodegenBackendKind::Gcc => "gcc", |
| CodegenBackendKind::Custom(name) => name, |
| } |
| } |
| |
| /// Name of the codegen backend's crate, e.g. `rustc_codegen_cranelift`. |
| pub fn crate_name(&self) -> String { |
| format!("rustc_codegen_{}", self.name()) |
| } |
| |
| pub fn is_llvm(&self) -> bool { |
| matches!(self, Self::Llvm) |
| } |
| |
| pub fn is_cranelift(&self) -> bool { |
| matches!(self, Self::Cranelift) |
| } |
| |
| pub fn is_gcc(&self) -> bool { |
| matches!(self, Self::Gcc) |
| } |
| } |
| |
| impl std::str::FromStr for CodegenBackendKind { |
| type Err = &'static str; |
| |
| fn from_str(s: &str) -> Result<Self, Self::Err> { |
| match s.to_lowercase().as_str() { |
| "" => Err("Invalid empty backend name"), |
| "gcc" => Ok(Self::Gcc), |
| "llvm" => Ok(Self::Llvm), |
| "cranelift" => Ok(Self::Cranelift), |
| _ => Ok(Self::Custom(s.to_string())), |
| } |
| } |
| } |
| |
| #[derive(PartialEq, Eq, Copy, Clone, Debug)] |
| pub enum DocTests { |
| /// Run normal tests and doc tests (default). |
| Yes, |
| /// Do not run any doc tests. |
| No, |
| /// Only run doc tests. |
| Only, |
| } |
| |
| pub enum GitRepo { |
| Rustc, |
| Llvm, |
| } |
| |
| /// Global configuration for the build system. |
| /// |
| /// This structure transitively contains all configuration for the build system. |
| /// All filesystem-encoded configuration is in `config`, all flags are in |
| /// `flags`, and then parsed or probed information is listed in the keys below. |
| /// |
| /// This structure is a parameter of almost all methods in the build system, |
| /// although most functions are implemented as free functions rather than |
| /// methods specifically on this structure itself (to make it easier to |
| /// organize). |
| pub struct Build { |
| /// User-specified configuration from `bootstrap.toml`. |
| config: Config, |
| |
| // Version information |
| version: String, |
| |
| // Properties derived from the above configuration |
| src: PathBuf, |
| out: PathBuf, |
| bootstrap_out: PathBuf, |
| cargo_info: GitInfo, |
| rust_analyzer_info: GitInfo, |
| clippy_info: GitInfo, |
| miri_info: GitInfo, |
| rustfmt_info: GitInfo, |
| enzyme_info: GitInfo, |
| in_tree_llvm_info: GitInfo, |
| in_tree_gcc_info: GitInfo, |
| local_rebuild: bool, |
| fail_fast: bool, |
| doc_tests: DocTests, |
| verbosity: usize, |
| |
| /// Build triple for the pre-compiled snapshot compiler. |
| host_target: TargetSelection, |
| /// Which triples to produce a compiler toolchain for. |
| hosts: Vec<TargetSelection>, |
| /// Which triples to build libraries (core/alloc/std/test/proc_macro) for. |
| targets: Vec<TargetSelection>, |
| |
| initial_rustc: PathBuf, |
| initial_rustdoc: PathBuf, |
| initial_cargo: PathBuf, |
| initial_lld: PathBuf, |
| initial_relative_libdir: PathBuf, |
| initial_sysroot: PathBuf, |
| |
| // Runtime state filled in later on |
| // C/C++ compilers and archiver for all targets |
| cc: HashMap<TargetSelection, cc::Tool>, |
| cxx: HashMap<TargetSelection, cc::Tool>, |
| ar: HashMap<TargetSelection, PathBuf>, |
| ranlib: HashMap<TargetSelection, PathBuf>, |
| wasi_sdk_path: Option<PathBuf>, |
| |
| // Miscellaneous |
| // allow bidirectional lookups: both name -> path and path -> name |
| crates: HashMap<String, Crate>, |
| crate_paths: HashMap<PathBuf, String>, |
| is_sudo: bool, |
| prerelease_version: Cell<Option<u32>>, |
| |
| #[cfg(feature = "build-metrics")] |
| metrics: crate::utils::metrics::BuildMetrics, |
| |
| #[cfg(feature = "tracing")] |
| step_graph: std::cell::RefCell<crate::utils::step_graph::StepGraph>, |
| } |
| |
| #[derive(Debug, Clone)] |
| struct Crate { |
| name: String, |
| deps: HashSet<String>, |
| path: PathBuf, |
| features: Vec<String>, |
| } |
| |
| impl Crate { |
| fn local_path(&self, build: &Build) -> PathBuf { |
| self.path.strip_prefix(&build.config.src).unwrap().into() |
| } |
| } |
| |
| /// When building Rust various objects are handled differently. |
| #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] |
| pub enum DependencyType { |
| /// Libraries originating from proc-macros. |
| Host, |
| /// Typical Rust libraries. |
| Target, |
| /// Non Rust libraries and objects shipped to ease usage of certain targets. |
| TargetSelfContained, |
| } |
| |
| /// The various "modes" of invoking Cargo. |
| /// |
| /// These entries currently correspond to the various output directories of the |
| /// build system, with each mod generating output in a different directory. |
| #[derive(Debug, Hash, Clone, Copy, PartialEq, Eq)] |
| pub enum Mode { |
| /// Build the standard library, placing output in the "stageN-std" directory. |
| Std, |
| |
| /// Build librustc, and compiler libraries, placing output in the "stageN-rustc" directory. |
| Rustc, |
| |
| /// Build a codegen backend for rustc, placing the output in the "stageN-codegen" directory. |
| Codegen, |
| |
| /// Build a tool, placing output in the "bootstrap-tools" |
| /// directory. This is for miscellaneous sets of tools that extend |
| /// bootstrap. |
| /// |
| /// These tools are intended to be only executed on the host system that |
| /// invokes bootstrap, and they thus cannot be cross-compiled. |
| /// |
| /// They are always built using the stage0 compiler, and they |
| /// can be compiled with stable Rust. |
| /// |
| /// These tools also essentially do not participate in staging. |
| ToolBootstrap, |
| |
| /// Build a cross-compilable helper tool. These tools do not depend on unstable features or |
| /// compiler internals, but they might be cross-compilable (so we cannot build them using the |
| /// stage0 compiler, unlike `ToolBootstrap`). |
| /// |
| /// Some of these tools are also shipped in our `dist` archives. |
| /// While we could compile them using the stage0 compiler when not cross-compiling, we instead |
| /// use the in-tree compiler (and std) to build them, so that we can ship e.g. std security |
| /// fixes and avoid depending fully on stage0 for the artifacts that we ship. |
| /// |
| /// This mode is used e.g. for linkers and linker tools invoked by rustc on its host target. |
| ToolTarget, |
| |
| /// Build a tool which uses the locally built std, placing output in the |
| /// "stageN-tools" directory. Its usage is quite rare; historically it was |
| /// needed by compiletest, but now it is mainly used by `test-float-parse`. |
| ToolStd, |
| |
| /// Build a tool which uses the `rustc_private` mechanism, and thus |
| /// the locally built rustc rlib artifacts, |
| /// placing the output in the "stageN-tools" directory. This is used for |
| /// everything that links to rustc as a library, such as rustdoc, clippy, |
| /// rustfmt, miri, etc. |
| ToolRustcPrivate, |
| } |
| |
| impl Mode { |
| pub fn is_tool(&self) -> bool { |
| match self { |
| Mode::ToolBootstrap | Mode::ToolRustcPrivate | Mode::ToolStd | Mode::ToolTarget => true, |
| Mode::Std | Mode::Codegen | Mode::Rustc => false, |
| } |
| } |
| |
| pub fn must_support_dlopen(&self) -> bool { |
| match self { |
| Mode::Std | Mode::Codegen => true, |
| Mode::ToolBootstrap |
| | Mode::ToolRustcPrivate |
| | Mode::ToolStd |
| | Mode::ToolTarget |
| | Mode::Rustc => false, |
| } |
| } |
| } |
| |
| /// When `rust.rust_remap_debuginfo` is requested, the compiler needs to know how to |
| /// opportunistically unremap compiler vs non-compiler sources. We use two schemes, |
| /// [`RemapScheme::Compiler`] and [`RemapScheme::NonCompiler`]. |
| pub enum RemapScheme { |
| /// The [`RemapScheme::Compiler`] scheme will remap to `/rustc-dev/{hash}`. |
| Compiler, |
| /// The [`RemapScheme::NonCompiler`] scheme will remap to `/rustc/{hash}`. |
| NonCompiler, |
| } |
| |
| #[derive(Debug, Hash, Clone, Copy, PartialEq, Eq)] |
| pub enum CLang { |
| C, |
| Cxx, |
| } |
| |
| #[derive(Debug, Clone, Copy, PartialEq, Eq)] |
| pub enum FileType { |
| /// An executable binary file (like a `.exe`). |
| Executable, |
| /// A native, binary library file (like a `.so`, `.dll`, `.a`, `.lib` or `.o`). |
| NativeLibrary, |
| /// An executable (non-binary) script file (like a `.py` or `.sh`). |
| Script, |
| /// Any other regular file that is non-executable. |
| Regular, |
| } |
| |
| impl FileType { |
| /// Get Unix permissions appropriate for this file type. |
| pub fn perms(self) -> u32 { |
| match self { |
| FileType::Executable | FileType::Script => 0o755, |
| FileType::Regular | FileType::NativeLibrary => 0o644, |
| } |
| } |
| |
| pub fn could_have_split_debuginfo(self) -> bool { |
| match self { |
| FileType::Executable | FileType::NativeLibrary => true, |
| FileType::Script | FileType::Regular => false, |
| } |
| } |
| } |
| |
| macro_rules! forward { |
| ( $( $fn:ident( $($param:ident: $ty:ty),* ) $( -> $ret:ty)? ),+ $(,)? ) => { |
| impl Build { |
| $( fn $fn(&self, $($param: $ty),* ) $( -> $ret)? { |
| self.config.$fn( $($param),* ) |
| } )+ |
| } |
| } |
| } |
| |
| forward! { |
| do_if_verbose(f: impl Fn()), |
| is_verbose() -> bool, |
| create(path: &Path, s: &str), |
| remove(f: &Path), |
| tempdir() -> PathBuf, |
| llvm_link_shared() -> bool, |
| download_rustc() -> bool, |
| } |
| |
| /// An alternative way of specifying what target and stage is involved in some bootstrap activity. |
| /// Ideally using a `Compiler` directly should be preferred. |
| struct TargetAndStage { |
| target: TargetSelection, |
| stage: u32, |
| } |
| |
| impl From<(TargetSelection, u32)> for TargetAndStage { |
| fn from((target, stage): (TargetSelection, u32)) -> Self { |
| Self { target, stage } |
| } |
| } |
| |
| impl From<Compiler> for TargetAndStage { |
| fn from(compiler: Compiler) -> Self { |
| Self { target: compiler.host, stage: compiler.stage } |
| } |
| } |
| |
| impl Build { |
| /// Creates a new set of build configuration from the `flags` on the command |
| /// line and the filesystem `config`. |
| /// |
| /// By default all build output will be placed in the current directory. |
| pub fn new(mut config: Config) -> Build { |
| let src = config.src.clone(); |
| let out = config.out.clone(); |
| |
| #[cfg(unix)] |
| // keep this consistent with the equivalent check in x.py: |
| // https://github.com/rust-lang/rust/blob/a8a33cf27166d3eabaffc58ed3799e054af3b0c6/src/bootstrap/bootstrap.py#L796-L797 |
| let is_sudo = match env::var_os("SUDO_USER") { |
| Some(_sudo_user) => { |
| // SAFETY: getuid() system call is always successful and no return value is reserved |
| // to indicate an error. |
| // |
| // For more context, see https://man7.org/linux/man-pages/man2/geteuid.2.html |
| let uid = unsafe { libc::getuid() }; |
| uid == 0 |
| } |
| None => false, |
| }; |
| #[cfg(not(unix))] |
| let is_sudo = false; |
| |
| let rust_info = config.rust_info.clone(); |
| let cargo_info = config.cargo_info.clone(); |
| let rust_analyzer_info = config.rust_analyzer_info.clone(); |
| let clippy_info = config.clippy_info.clone(); |
| let miri_info = config.miri_info.clone(); |
| let rustfmt_info = config.rustfmt_info.clone(); |
| let enzyme_info = config.enzyme_info.clone(); |
| let in_tree_llvm_info = config.in_tree_llvm_info.clone(); |
| let in_tree_gcc_info = config.in_tree_gcc_info.clone(); |
| |
| let initial_target_libdir = command(&config.initial_rustc) |
| .run_in_dry_run() |
| .args(["--print", "target-libdir"]) |
| .run_capture_stdout(&config) |
| .stdout() |
| .trim() |
| .to_owned(); |
| |
| let initial_target_dir = Path::new(&initial_target_libdir) |
| .parent() |
| .unwrap_or_else(|| panic!("{initial_target_libdir} has no parent")); |
| |
| let initial_lld = initial_target_dir.join("bin").join("rust-lld"); |
| |
| let initial_relative_libdir = if cfg!(test) { |
| // On tests, bootstrap uses the shim rustc, not the one from the stage0 toolchain. |
| PathBuf::default() |
| } else { |
| let ancestor = initial_target_dir.ancestors().nth(2).unwrap_or_else(|| { |
| panic!("Not enough ancestors for {}", initial_target_dir.display()) |
| }); |
| |
| ancestor |
| .strip_prefix(&config.initial_sysroot) |
| .unwrap_or_else(|_| { |
| panic!( |
| "Couldn’t resolve the initial relative libdir from {}", |
| initial_target_dir.display() |
| ) |
| }) |
| .to_path_buf() |
| }; |
| |
| let version = std::fs::read_to_string(src.join("src").join("version")) |
| .expect("failed to read src/version"); |
| let version = version.trim(); |
| |
| let mut bootstrap_out = std::env::current_exe() |
| .expect("could not determine path to running process") |
| .parent() |
| .unwrap() |
| .to_path_buf(); |
| // Since bootstrap is hardlink to deps/bootstrap-*, Solaris can sometimes give |
| // path with deps/ which is bad and needs to be avoided. |
| if bootstrap_out.ends_with("deps") { |
| bootstrap_out.pop(); |
| } |
| if !bootstrap_out.join(exe("rustc", config.host_target)).exists() && !cfg!(test) { |
| // this restriction can be lifted whenever https://github.com/rust-lang/rfcs/pull/3028 is implemented |
| panic!( |
| "`rustc` not found in {}, run `cargo build --bins` before `cargo run`", |
| bootstrap_out.display() |
| ) |
| } |
| |
| if rust_info.is_from_tarball() && config.description.is_none() { |
| config.description = Some("built from a source tarball".to_owned()); |
| } |
| |
| let mut build = Build { |
| initial_lld, |
| initial_relative_libdir, |
| initial_rustc: config.initial_rustc.clone(), |
| initial_rustdoc: config |
| .initial_rustc |
| .with_file_name(exe("rustdoc", config.host_target)), |
| initial_cargo: config.initial_cargo.clone(), |
| initial_sysroot: config.initial_sysroot.clone(), |
| local_rebuild: config.local_rebuild, |
| fail_fast: config.cmd.fail_fast(), |
| doc_tests: config.cmd.doc_tests(), |
| verbosity: config.exec_ctx.verbosity as usize, |
| |
| host_target: config.host_target, |
| hosts: config.hosts.clone(), |
| targets: config.targets.clone(), |
| |
| config, |
| version: version.to_string(), |
| src, |
| out, |
| bootstrap_out, |
| |
| cargo_info, |
| rust_analyzer_info, |
| clippy_info, |
| miri_info, |
| rustfmt_info, |
| enzyme_info, |
| in_tree_llvm_info, |
| in_tree_gcc_info, |
| cc: HashMap::new(), |
| cxx: HashMap::new(), |
| ar: HashMap::new(), |
| ranlib: HashMap::new(), |
| wasi_sdk_path: env::var_os("WASI_SDK_PATH").map(PathBuf::from), |
| crates: HashMap::new(), |
| crate_paths: HashMap::new(), |
| is_sudo, |
| prerelease_version: Cell::new(None), |
| |
| #[cfg(feature = "build-metrics")] |
| metrics: crate::utils::metrics::BuildMetrics::init(), |
| |
| #[cfg(feature = "tracing")] |
| step_graph: std::cell::RefCell::new(crate::utils::step_graph::StepGraph::default()), |
| }; |
| |
| // If local-rust is the same major.minor as the current version, then force a |
| // local-rebuild |
| let local_version_verbose = command(&build.initial_rustc) |
| .run_in_dry_run() |
| .args(["--version", "--verbose"]) |
| .run_capture_stdout(&build) |
| .stdout(); |
| let local_release = local_version_verbose |
| .lines() |
| .filter_map(|x| x.strip_prefix("release:")) |
| .next() |
| .unwrap() |
| .trim(); |
| if local_release.split('.').take(2).eq(version.split('.').take(2)) { |
| build.do_if_verbose(|| println!("auto-detected local-rebuild {local_release}")); |
| build.local_rebuild = true; |
| } |
| |
| build.do_if_verbose(|| println!("finding compilers")); |
| utils::cc_detect::fill_compilers(&mut build); |
| // When running `setup`, the profile is about to change, so any requirements we have now may |
| // be different on the next invocation. Don't check for them until the next time x.py is |
| // run. This is ok because `setup` never runs any build commands, so it won't fail if commands are missing. |
| // |
| // Similarly, for `setup` we don't actually need submodules or cargo metadata. |
| if !matches!(build.config.cmd, Subcommand::Setup { .. }) { |
| build.do_if_verbose(|| println!("running sanity check")); |
| crate::core::sanity::check(&mut build); |
| |
| // Make sure we update these before gathering metadata so we don't get an error about missing |
| // Cargo.toml files. |
| let rust_submodules = ["library/backtrace"]; |
| for s in rust_submodules { |
| build.require_submodule( |
| s, |
| Some( |
| "The submodule is required for the standard library \ |
| and the main Cargo workspace.", |
| ), |
| ); |
| } |
| // Now, update all existing submodules. |
| build.update_existing_submodules(); |
| |
| build.do_if_verbose(|| println!("learning about cargo")); |
| crate::core::metadata::build(&mut build); |
| } |
| |
| // Create symbolic link to use host sysroot from a consistent path (e.g., in the rust-analyzer config file). |
| let build_triple = build.out.join(build.host_target); |
| t!(fs::create_dir_all(&build_triple)); |
| let host = build.out.join("host"); |
| if host.is_symlink() { |
| // Left over from a previous build; overwrite it. |
| // This matters if `build.build` has changed between invocations. |
| #[cfg(windows)] |
| t!(fs::remove_dir(&host)); |
| #[cfg(not(windows))] |
| t!(fs::remove_file(&host)); |
| } |
| t!( |
| symlink_dir(&build.config, &build_triple, &host), |
| format!("symlink_dir({} => {}) failed", host.display(), build_triple.display()) |
| ); |
| |
| build |
| } |
| |
| /// Updates a submodule, and exits with a failure if submodule management |
| /// is disabled and the submodule does not exist. |
| /// |
| /// The given submodule name should be its path relative to the root of |
| /// the main repository. |
| /// |
| /// The given `err_hint` will be shown to the user if the submodule is not |
| /// checked out and submodule management is disabled. |
| #[cfg_attr( |
| feature = "tracing", |
| instrument( |
| level = "trace", |
| name = "Build::require_submodule", |
| skip_all, |
| fields(submodule = submodule), |
| ), |
| )] |
| pub fn require_submodule(&self, submodule: &str, err_hint: Option<&str>) { |
| if self.rust_info().is_from_tarball() { |
| return; |
| } |
| |
| // When testing bootstrap itself, it is much faster to ignore |
| // submodules. Almost all Steps work fine without their submodules. |
| if cfg!(test) && !self.config.submodules() { |
| return; |
| } |
| self.config.update_submodule(submodule); |
| let absolute_path = self.config.src.join(submodule); |
| if !absolute_path.exists() || dir_is_empty(&absolute_path) { |
| let maybe_enable = if !self.config.submodules() |
| && self.config.rust_info.is_managed_git_subrepository() |
| { |
| "\nConsider setting `build.submodules = true` or manually initializing the submodules." |
| } else { |
| "" |
| }; |
| let err_hint = err_hint.map_or_else(String::new, |e| format!("\n{e}")); |
| eprintln!( |
| "submodule {submodule} does not appear to be checked out, \ |
| but it is required for this step{maybe_enable}{err_hint}" |
| ); |
| exit!(1); |
| } |
| } |
| |
| /// If any submodule has been initialized already, sync it unconditionally. |
| /// This avoids contributors checking in a submodule change by accident. |
| fn update_existing_submodules(&self) { |
| // Avoid running git when there isn't a git checkout, or the user has |
| // explicitly disabled submodules in `bootstrap.toml`. |
| if !self.config.submodules() { |
| return; |
| } |
| let output = helpers::git(Some(&self.src)) |
| .args(["config", "--file"]) |
| .arg(".gitmodules") |
| .args(["--get-regexp", "path"]) |
| .run_capture(self) |
| .stdout(); |
| std::thread::scope(|s| { |
| // Look for `submodule.$name.path = $path` |
| // Sample output: `submodule.src/rust-installer.path src/tools/rust-installer` |
| for line in output.lines() { |
| let submodule = line.split_once(' ').unwrap().1; |
| let config = self.config.clone(); |
| s.spawn(move || { |
| Self::update_existing_submodule(&config, submodule); |
| }); |
| } |
| }); |
| } |
| |
| /// Updates the given submodule only if it's initialized already; nothing happens otherwise. |
| pub fn update_existing_submodule(config: &Config, submodule: &str) { |
| // Avoid running git when there isn't a git checkout. |
| if !config.submodules() { |
| return; |
| } |
| |
| if config.git_info(false, Path::new(submodule)).is_managed_git_subrepository() { |
| config.update_submodule(submodule); |
| } |
| } |
| |
| /// Executes the entire build, as configured by the flags and configuration. |
| #[cfg_attr(feature = "tracing", instrument(level = "debug", name = "Build::build", skip_all))] |
| pub fn build(&mut self) { |
| trace!("setting up job management"); |
| unsafe { |
| crate::utils::job::setup(self); |
| } |
| |
| // Handle hard-coded subcommands. |
| { |
| #[cfg(feature = "tracing")] |
| let _hardcoded_span = |
| span!(tracing::Level::DEBUG, "handling hardcoded subcommands (Format, Perf)") |
| .entered(); |
| |
| match &self.config.cmd { |
| Subcommand::Format { check, all } => { |
| return core::build_steps::format::format( |
| &builder::Builder::new(self), |
| *check, |
| *all, |
| &self.config.paths, |
| ); |
| } |
| Subcommand::Perf(args) => { |
| return core::build_steps::perf::perf(&builder::Builder::new(self), args); |
| } |
| _cmd => { |
| debug!(cmd = ?_cmd, "not a hardcoded subcommand; returning to normal handling"); |
| } |
| } |
| |
| debug!("handling subcommand normally"); |
| } |
| |
| if !self.config.dry_run() { |
| #[cfg(feature = "tracing")] |
| let _real_run_span = span!(tracing::Level::DEBUG, "executing real run").entered(); |
| |
| // We first do a dry-run. This is a sanity-check to ensure that |
| // steps don't do anything expensive in the dry-run. |
| { |
| #[cfg(feature = "tracing")] |
| let _sanity_check_span = |
| span!(tracing::Level::DEBUG, "(1) executing dry-run sanity-check").entered(); |
| self.config.set_dry_run(DryRun::SelfCheck); |
| let builder = builder::Builder::new(self); |
| builder.execute_cli(); |
| } |
| |
| // Actual run. |
| { |
| #[cfg(feature = "tracing")] |
| let _actual_run_span = |
| span!(tracing::Level::DEBUG, "(2) executing actual run").entered(); |
| self.config.set_dry_run(DryRun::Disabled); |
| let builder = builder::Builder::new(self); |
| builder.execute_cli(); |
| } |
| } else { |
| #[cfg(feature = "tracing")] |
| let _dry_run_span = span!(tracing::Level::DEBUG, "executing dry run").entered(); |
| |
| let builder = builder::Builder::new(self); |
| builder.execute_cli(); |
| } |
| |
| #[cfg(feature = "tracing")] |
| debug!("checking for postponed test failures from `test --no-fail-fast`"); |
| |
| // Check for postponed failures from `test --no-fail-fast`. |
| self.config.exec_ctx().report_failures_and_exit(); |
| |
| #[cfg(feature = "build-metrics")] |
| self.metrics.persist(self); |
| } |
| |
| fn rust_info(&self) -> &GitInfo { |
| &self.config.rust_info |
| } |
| |
| /// Gets the space-separated set of activated features for the standard library. |
| /// This can be configured with the `std-features` key in bootstrap.toml. |
| fn std_features(&self, target: TargetSelection) -> String { |
| let mut features: BTreeSet<&str> = |
| self.config.rust_std_features.iter().map(|s| s.as_str()).collect(); |
| |
| match self.config.llvm_libunwind(target) { |
| LlvmLibunwind::InTree => features.insert("llvm-libunwind"), |
| LlvmLibunwind::System => features.insert("system-llvm-libunwind"), |
| LlvmLibunwind::No => false, |
| }; |
| |
| if self.config.backtrace { |
| features.insert("backtrace"); |
| } |
| |
| if self.config.profiler_enabled(target) { |
| features.insert("profiler"); |
| } |
| |
| // If zkvm target, generate memcpy, etc. |
| if target.contains("zkvm") { |
| features.insert("compiler-builtins-mem"); |
| } |
| |
| if self.config.llvm_enzyme { |
| features.insert("llvm_enzyme"); |
| } |
| |
| features.into_iter().collect::<Vec<_>>().join(" ") |
| } |
| |
| /// Gets the space-separated set of activated features for the compiler. |
| fn rustc_features(&self, kind: Kind, target: TargetSelection, crates: &[String]) -> String { |
| let possible_features_by_crates: HashSet<_> = crates |
| .iter() |
| .flat_map(|krate| &self.crates[krate].features) |
| .map(std::ops::Deref::deref) |
| .collect(); |
| let check = |feature: &str| -> bool { |
| crates.is_empty() || possible_features_by_crates.contains(feature) |
| }; |
| let mut features = vec![]; |
| if self.config.jemalloc(target) && check("jemalloc") { |
| features.push("jemalloc"); |
| } |
| if (self.config.llvm_enabled(target) || kind == Kind::Check) && check("llvm") { |
| features.push("llvm"); |
| } |
| if self.config.llvm_enzyme { |
| features.push("llvm_enzyme"); |
| } |
| if self.config.llvm_offload { |
| features.push("llvm_offload"); |
| } |
| // keep in sync with `bootstrap/compile.rs:rustc_cargo_env` |
| if self.config.rust_randomize_layout && check("rustc_randomized_layouts") { |
| features.push("rustc_randomized_layouts"); |
| } |
| if self.config.compile_time_deps && kind == Kind::Check { |
| features.push("check_only"); |
| } |
| |
| // If debug logging is on, then we want the default for tracing: |
| // https://github.com/tokio-rs/tracing/blob/3dd5c03d907afdf2c39444a29931833335171554/tracing/src/level_filters.rs#L26 |
| // which is everything (including debug/trace/etc.) |
| // if its unset, if debug_assertions is on, then debug_logging will also be on |
| // as well as tracing *ignoring* this feature when debug_assertions is on |
| if !self.config.rust_debug_logging && check("max_level_info") { |
| features.push("max_level_info"); |
| } |
| |
| features.join(" ") |
| } |
| |
| /// Component directory that Cargo will produce output into (e.g. |
| /// release/debug) |
| fn cargo_dir(&self) -> &'static str { |
| if self.config.rust_optimize.is_release() { "release" } else { "debug" } |
| } |
| |
| fn tools_dir(&self, build_compiler: Compiler) -> PathBuf { |
| let out = self |
| .out |
| .join(build_compiler.host) |
| .join(format!("stage{}-tools-bin", build_compiler.stage + 1)); |
| t!(fs::create_dir_all(&out)); |
| out |
| } |
| |
| /// Returns the root directory for all output generated in a particular |
| /// stage when being built with a particular build compiler. |
| /// |
| /// The mode indicates what the root directory is for. |
| fn stage_out(&self, build_compiler: Compiler, mode: Mode) -> PathBuf { |
| use std::fmt::Write; |
| |
| fn bootstrap_tool() -> (Option<u32>, &'static str) { |
| (None, "bootstrap-tools") |
| } |
| fn staged_tool(build_compiler: Compiler) -> (Option<u32>, &'static str) { |
| (Some(build_compiler.stage + 1), "tools") |
| } |
| |
| let (stage, suffix) = match mode { |
| // Std is special, stage N std is built with stage N rustc |
| Mode::Std => (Some(build_compiler.stage), "std"), |
| // The rest of things are built with stage N-1 rustc |
| Mode::Rustc => (Some(build_compiler.stage + 1), "rustc"), |
| Mode::Codegen => (Some(build_compiler.stage + 1), "codegen"), |
| Mode::ToolBootstrap => bootstrap_tool(), |
| Mode::ToolStd | Mode::ToolRustcPrivate => (Some(build_compiler.stage + 1), "tools"), |
| Mode::ToolTarget => { |
| // If we're not cross-compiling (the common case), share the target directory with |
| // bootstrap tools to reuse the build cache. |
| if build_compiler.stage == 0 { |
| bootstrap_tool() |
| } else { |
| staged_tool(build_compiler) |
| } |
| } |
| }; |
| let path = self.out.join(build_compiler.host); |
| let mut dir_name = String::new(); |
| if let Some(stage) = stage { |
| write!(dir_name, "stage{stage}-").unwrap(); |
| } |
| dir_name.push_str(suffix); |
| path.join(dir_name) |
| } |
| |
| /// Returns the root output directory for all Cargo output in a given stage, |
| /// running a particular compiler, whether or not we're building the |
| /// standard library, and targeting the specified architecture. |
| fn cargo_out(&self, build_compiler: Compiler, mode: Mode, target: TargetSelection) -> PathBuf { |
| self.stage_out(build_compiler, mode).join(target).join(self.cargo_dir()) |
| } |
| |
| /// Root output directory of LLVM for `target` |
| /// |
| /// Note that if LLVM is configured externally then the directory returned |
| /// will likely be empty. |
| fn llvm_out(&self, target: TargetSelection) -> PathBuf { |
| if self.config.llvm_from_ci && self.config.is_host_target(target) { |
| self.config.ci_llvm_root() |
| } else { |
| self.out.join(target).join("llvm") |
| } |
| } |
| |
| fn enzyme_out(&self, target: TargetSelection) -> PathBuf { |
| self.out.join(&*target.triple).join("enzyme") |
| } |
| |
| fn gcc_out(&self, target: TargetSelection) -> PathBuf { |
| self.out.join(&*target.triple).join("gcc") |
| } |
| |
| fn lld_out(&self, target: TargetSelection) -> PathBuf { |
| self.out.join(target).join("lld") |
| } |
| |
| /// Output directory for all documentation for a target |
| fn doc_out(&self, target: TargetSelection) -> PathBuf { |
| self.out.join(target).join("doc") |
| } |
| |
| /// Output directory for all JSON-formatted documentation for a target |
| fn json_doc_out(&self, target: TargetSelection) -> PathBuf { |
| self.out.join(target).join("json-doc") |
| } |
| |
| fn test_out(&self, target: TargetSelection) -> PathBuf { |
| self.out.join(target).join("test") |
| } |
| |
| /// Output directory for all documentation for a target |
| fn compiler_doc_out(&self, target: TargetSelection) -> PathBuf { |
| self.out.join(target).join("compiler-doc") |
| } |
| |
| /// Output directory for some generated md crate documentation for a target (temporary) |
| fn md_doc_out(&self, target: TargetSelection) -> PathBuf { |
| self.out.join(target).join("md-doc") |
| } |
| |
| /// Path to the vendored Rust crates. |
| fn vendored_crates_path(&self) -> Option<PathBuf> { |
| if self.config.vendor { Some(self.src.join(VENDOR_DIR)) } else { None } |
| } |
| |
| /// Returns the path to `FileCheck` binary for the specified target |
| fn llvm_filecheck(&self, target: TargetSelection) -> PathBuf { |
| let target_config = self.config.target_config.get(&target); |
| if let Some(s) = target_config.and_then(|c| c.llvm_filecheck.as_ref()) { |
| s.to_path_buf() |
| } else if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) { |
| let llvm_bindir = command(s).arg("--bindir").run_capture_stdout(self).stdout(); |
| let filecheck = Path::new(llvm_bindir.trim()).join(exe("FileCheck", target)); |
| if filecheck.exists() { |
| filecheck |
| } else { |
| // On Fedora the system LLVM installs FileCheck in the |
| // llvm subdirectory of the libdir. |
| let llvm_libdir = command(s).arg("--libdir").run_capture_stdout(self).stdout(); |
| let lib_filecheck = |
| Path::new(llvm_libdir.trim()).join("llvm").join(exe("FileCheck", target)); |
| if lib_filecheck.exists() { |
| lib_filecheck |
| } else { |
| // Return the most normal file name, even though |
| // it doesn't exist, so that any error message |
| // refers to that. |
| filecheck |
| } |
| } |
| } else { |
| let base = self.llvm_out(target).join("build"); |
| let base = if !self.ninja() && target.is_msvc() { |
| if self.config.llvm_optimize { |
| if self.config.llvm_release_debuginfo { |
| base.join("RelWithDebInfo") |
| } else { |
| base.join("Release") |
| } |
| } else { |
| base.join("Debug") |
| } |
| } else { |
| base |
| }; |
| base.join("bin").join(exe("FileCheck", target)) |
| } |
| } |
| |
| /// Directory for libraries built from C/C++ code and shared between stages. |
| fn native_dir(&self, target: TargetSelection) -> PathBuf { |
| self.out.join(target).join("native") |
| } |
| |
| /// Root output directory for rust_test_helpers library compiled for |
| /// `target` |
| fn test_helpers_out(&self, target: TargetSelection) -> PathBuf { |
| self.native_dir(target).join("rust-test-helpers") |
| } |
| |
| /// Adds the `RUST_TEST_THREADS` env var if necessary |
| fn add_rust_test_threads(&self, cmd: &mut BootstrapCommand) { |
| if env::var_os("RUST_TEST_THREADS").is_none() { |
| cmd.env("RUST_TEST_THREADS", self.jobs().to_string()); |
| } |
| } |
| |
| /// Returns the libdir of the snapshot compiler. |
| fn rustc_snapshot_libdir(&self) -> PathBuf { |
| self.rustc_snapshot_sysroot().join(libdir(self.config.host_target)) |
| } |
| |
| /// Returns the sysroot of the snapshot compiler. |
| fn rustc_snapshot_sysroot(&self) -> &Path { |
| static SYSROOT_CACHE: OnceLock<PathBuf> = OnceLock::new(); |
| SYSROOT_CACHE.get_or_init(|| { |
| command(&self.initial_rustc) |
| .run_in_dry_run() |
| .args(["--print", "sysroot"]) |
| .run_capture_stdout(self) |
| .stdout() |
| .trim() |
| .to_owned() |
| .into() |
| }) |
| } |
| |
| fn info(&self, msg: &str) { |
| match self.config.get_dry_run() { |
| DryRun::SelfCheck => (), |
| DryRun::Disabled | DryRun::UserSelected => { |
| println!("{msg}"); |
| } |
| } |
| } |
| |
| /// Return a `Group` guard for a [`Step`] that: |
| /// - Performs `action` |
| /// - If the action is `Kind::Test`, use [`Build::msg_test`] instead. |
| /// - On `what` |
| /// - Where `what` possibly corresponds to a `mode` |
| /// - `action` is performed with/on the given compiler (`target_and_stage`). |
| /// - Since for some steps it is not possible to pass a single compiler here, it is also |
| /// possible to pass the host and stage explicitly. |
| /// - With a given `target`. |
| /// |
| /// [`Step`]: crate::core::builder::Step |
| #[must_use = "Groups should not be dropped until the Step finishes running"] |
| #[track_caller] |
| fn msg( |
| &self, |
| action: impl Into<Kind>, |
| what: impl Display, |
| mode: impl Into<Option<Mode>>, |
| target_and_stage: impl Into<TargetAndStage>, |
| target: impl Into<Option<TargetSelection>>, |
| ) -> Option<gha::Group> { |
| let target_and_stage = target_and_stage.into(); |
| let action = action.into(); |
| assert!( |
| action != Kind::Test, |
| "Please use `Build::msg_test` instead of `Build::msg(Kind::Test)`" |
| ); |
| |
| let actual_stage = match mode.into() { |
| // Std has the same stage as the compiler that builds it |
| Some(Mode::Std) => target_and_stage.stage, |
| // Other things have stage corresponding to their build compiler + 1 |
| Some( |
| Mode::Rustc |
| | Mode::Codegen |
| | Mode::ToolBootstrap |
| | Mode::ToolTarget |
| | Mode::ToolStd |
| | Mode::ToolRustcPrivate, |
| ) |
| | None => target_and_stage.stage + 1, |
| }; |
| |
| let action = action.description(); |
| let what = what.to_string(); |
| let msg = |fmt| { |
| let space = if !what.is_empty() { " " } else { "" }; |
| format!("{action} stage{actual_stage} {what}{space}{fmt}") |
| }; |
| let msg = if let Some(target) = target.into() { |
| let build_stage = target_and_stage.stage; |
| let host = target_and_stage.target; |
| if host == target { |
| msg(format_args!("(stage{build_stage} -> stage{actual_stage}, {target})")) |
| } else { |
| msg(format_args!("(stage{build_stage}:{host} -> stage{actual_stage}:{target})")) |
| } |
| } else { |
| msg(format_args!("")) |
| }; |
| self.group(&msg) |
| } |
| |
| /// Return a `Group` guard for a [`Step`] that tests `what` with the given `stage` and `target`. |
| /// Use this instead of [`Build::msg`] for test steps, because for them it is not always clear |
| /// what exactly is a build compiler. |
| /// |
| /// [`Step`]: crate::core::builder::Step |
| #[must_use = "Groups should not be dropped until the Step finishes running"] |
| #[track_caller] |
| fn msg_test( |
| &self, |
| what: impl Display, |
| target: TargetSelection, |
| stage: u32, |
| ) -> Option<gha::Group> { |
| let action = Kind::Test.description(); |
| let msg = format!("{action} stage{stage} {what} ({target})"); |
| self.group(&msg) |
| } |
| |
| /// Return a `Group` guard for a [`Step`] that is only built once and isn't affected by `--stage`. |
| /// |
| /// [`Step`]: crate::core::builder::Step |
| #[must_use = "Groups should not be dropped until the Step finishes running"] |
| #[track_caller] |
| fn msg_unstaged( |
| &self, |
| action: impl Into<Kind>, |
| what: impl Display, |
| target: TargetSelection, |
| ) -> Option<gha::Group> { |
| let action = action.into().description(); |
| let msg = format!("{action} {what} for {target}"); |
| self.group(&msg) |
| } |
| |
| #[track_caller] |
| fn group(&self, msg: &str) -> Option<gha::Group> { |
| match self.config.get_dry_run() { |
| DryRun::SelfCheck => None, |
| DryRun::Disabled | DryRun::UserSelected => Some(gha::group(msg)), |
| } |
| } |
| |
| /// Returns the number of parallel jobs that have been configured for this |
| /// build. |
| fn jobs(&self) -> u32 { |
| self.config.jobs.unwrap_or_else(|| { |
| std::thread::available_parallelism().map_or(1, std::num::NonZeroUsize::get) as u32 |
| }) |
| } |
| |
| fn debuginfo_map_to(&self, which: GitRepo, remap_scheme: RemapScheme) -> Option<String> { |
| if !self.config.rust_remap_debuginfo { |
| return None; |
| } |
| |
| match which { |
| GitRepo::Rustc => { |
| let sha = self.rust_sha().unwrap_or(&self.version); |
| |
| match remap_scheme { |
| RemapScheme::Compiler => { |
| // For compiler sources, remap via `/rustc-dev/{sha}` to allow |
| // distinguishing between compiler sources vs library sources, since |
| // `rustc-dev` dist component places them under |
| // `$sysroot/lib/rustlib/rustc-src/rust` as opposed to `rust-src`'s |
| // `$sysroot/lib/rustlib/src/rust`. |
| // |
| // Keep this scheme in sync with `rustc_metadata::rmeta::decoder`'s |
| // `try_to_translate_virtual_to_real`. |
| Some(format!("/rustc-dev/{sha}")) |
| } |
| RemapScheme::NonCompiler => { |
| // For non-compiler sources, use `/rustc/{sha}` remapping scheme. |
| Some(format!("/rustc/{sha}")) |
| } |
| } |
| } |
| GitRepo::Llvm => Some(String::from("/rustc/llvm")), |
| } |
| } |
| |
| /// Returns the path to the C compiler for the target specified. |
| fn cc(&self, target: TargetSelection) -> PathBuf { |
| if self.config.dry_run() { |
| return PathBuf::new(); |
| } |
| self.cc[&target].path().into() |
| } |
| |
| /// Returns the internal `cc::Tool` for the C compiler. |
| fn cc_tool(&self, target: TargetSelection) -> Tool { |
| self.cc[&target].clone() |
| } |
| |
| /// Returns the internal `cc::Tool` for the C++ compiler. |
| fn cxx_tool(&self, target: TargetSelection) -> Tool { |
| self.cxx[&target].clone() |
| } |
| |
| /// Returns C flags that `cc-rs` thinks should be enabled for the |
| /// specified target by default. |
| fn cc_handled_clags(&self, target: TargetSelection, c: CLang) -> Vec<String> { |
| if self.config.dry_run() { |
| return Vec::new(); |
| } |
| let base = match c { |
| CLang::C => self.cc[&target].clone(), |
| CLang::Cxx => self.cxx[&target].clone(), |
| }; |
| |
| // Filter out -O and /O (the optimization flags) that we picked up |
| // from cc-rs, that's up to the caller to figure out. |
| base.args() |
| .iter() |
| .map(|s| s.to_string_lossy().into_owned()) |
| .filter(|s| !s.starts_with("-O") && !s.starts_with("/O")) |
| .collect::<Vec<String>>() |
| } |
| |
| /// Returns extra C flags that `cc-rs` doesn't handle. |
| fn cc_unhandled_cflags( |
| &self, |
| target: TargetSelection, |
| which: GitRepo, |
| c: CLang, |
| ) -> Vec<String> { |
| let mut base = Vec::new(); |
| |
| // If we're compiling C++ on macOS then we add a flag indicating that |
| // we want libc++ (more filled out than libstdc++), ensuring that |
| // LLVM/etc are all properly compiled. |
| if matches!(c, CLang::Cxx) && target.contains("apple-darwin") { |
| base.push("-stdlib=libc++".into()); |
| } |
| |
| // Work around an apparently bad MinGW / GCC optimization, |
| // See: https://lists.llvm.org/pipermail/cfe-dev/2016-December/051980.html |
| // See: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=78936 |
| if &*target.triple == "i686-pc-windows-gnu" { |
| base.push("-fno-omit-frame-pointer".into()); |
| } |
| |
| if let Some(map_to) = self.debuginfo_map_to(which, RemapScheme::NonCompiler) { |
| let map = format!("{}={}", self.src.display(), map_to); |
| let cc = self.cc(target); |
| if cc.ends_with("clang") || cc.ends_with("gcc") { |
| base.push(format!("-fdebug-prefix-map={map}")); |
| } else if cc.ends_with("clang-cl.exe") { |
| base.push("-Xclang".into()); |
| base.push(format!("-fdebug-prefix-map={map}")); |
| } |
| } |
| base |
| } |
| |
| /// Returns the path to the `ar` archive utility for the target specified. |
| fn ar(&self, target: TargetSelection) -> Option<PathBuf> { |
| if self.config.dry_run() { |
| return None; |
| } |
| self.ar.get(&target).cloned() |
| } |
| |
| /// Returns the path to the `ranlib` utility for the target specified. |
| fn ranlib(&self, target: TargetSelection) -> Option<PathBuf> { |
| if self.config.dry_run() { |
| return None; |
| } |
| self.ranlib.get(&target).cloned() |
| } |
| |
| /// Returns the path to the C++ compiler for the target specified. |
| fn cxx(&self, target: TargetSelection) -> Result<PathBuf, String> { |
| if self.config.dry_run() { |
| return Ok(PathBuf::new()); |
| } |
| match self.cxx.get(&target) { |
| Some(p) => Ok(p.path().into()), |
| None => Err(format!("target `{target}` is not configured as a host, only as a target")), |
| } |
| } |
| |
| /// Returns the path to the linker for the given target if it needs to be overridden. |
| fn linker(&self, target: TargetSelection) -> Option<PathBuf> { |
| if self.config.dry_run() { |
| return Some(PathBuf::new()); |
| } |
| if let Some(linker) = self.config.target_config.get(&target).and_then(|c| c.linker.clone()) |
| { |
| Some(linker) |
| } else if target.contains("vxworks") { |
| // need to use CXX compiler as linker to resolve the exception functions |
| // that are only existed in CXX libraries |
| Some(self.cxx[&target].path().into()) |
| } else if !self.config.is_host_target(target) |
| && helpers::use_host_linker(target) |
| && !target.is_msvc() |
| { |
| Some(self.cc(target)) |
| } else if self.config.bootstrap_override_lld.is_used() |
| && self.is_lld_direct_linker(target) |
| && self.host_target == target |
| { |
| match self.config.bootstrap_override_lld { |
| BootstrapOverrideLld::SelfContained => Some(self.initial_lld.clone()), |
| BootstrapOverrideLld::External => Some("lld".into()), |
| BootstrapOverrideLld::None => None, |
| } |
| } else { |
| None |
| } |
| } |
| |
| // Is LLD configured directly through `-Clinker`? |
| // Only MSVC targets use LLD directly at the moment. |
| fn is_lld_direct_linker(&self, target: TargetSelection) -> bool { |
| target.is_msvc() |
| } |
| |
| /// Returns if this target should statically link the C runtime, if specified |
| fn crt_static(&self, target: TargetSelection) -> Option<bool> { |
| if target.contains("pc-windows-msvc") { |
| Some(true) |
| } else { |
| self.config.target_config.get(&target).and_then(|t| t.crt_static) |
| } |
| } |
| |
| /// Returns the "musl root" for this `target`, if defined. |
| /// |
| /// If this is a native target (host is also musl) and no musl-root is given, |
| /// it falls back to the system toolchain in /usr. |
| fn musl_root(&self, target: TargetSelection) -> Option<&Path> { |
| let configured_root = self |
| .config |
| .target_config |
| .get(&target) |
| .and_then(|t| t.musl_root.as_ref()) |
| .or(self.config.musl_root.as_ref()) |
| .map(|p| &**p); |
| |
| if self.config.is_host_target(target) && configured_root.is_none() { |
| Some(Path::new("/usr")) |
| } else { |
| configured_root |
| } |
| } |
| |
| /// Returns the "musl libdir" for this `target`. |
| fn musl_libdir(&self, target: TargetSelection) -> Option<PathBuf> { |
| self.config |
| .target_config |
| .get(&target) |
| .and_then(|t| t.musl_libdir.clone()) |
| .or_else(|| self.musl_root(target).map(|root| root.join("lib"))) |
| } |
| |
| /// Returns the `lib` directory for the WASI target specified, if |
| /// configured. |
| /// |
| /// This first consults `wasi-root` as configured in per-target |
| /// configuration, and failing that it assumes that `$WASI_SDK_PATH` is |
| /// set in the environment, and failing that `None` is returned. |
| fn wasi_libdir(&self, target: TargetSelection) -> Option<PathBuf> { |
| let configured = |
| self.config.target_config.get(&target).and_then(|t| t.wasi_root.as_ref()).map(|p| &**p); |
| if let Some(path) = configured { |
| return Some(path.join("lib").join(target.to_string())); |
| } |
| let mut env_root = self.wasi_sdk_path.clone()?; |
| env_root.push("share"); |
| env_root.push("wasi-sysroot"); |
| env_root.push("lib"); |
| env_root.push(target.to_string()); |
| Some(env_root) |
| } |
| |
| /// Returns `true` if this is a no-std `target`, if defined |
| fn no_std(&self, target: TargetSelection) -> Option<bool> { |
| self.config.target_config.get(&target).map(|t| t.no_std) |
| } |
| |
| /// Returns `true` if the target will be tested using the `remote-test-client` |
| /// and `remote-test-server` binaries. |
| fn remote_tested(&self, target: TargetSelection) -> bool { |
| self.qemu_rootfs(target).is_some() |
| || target.contains("android") |
| || env::var_os("TEST_DEVICE_ADDR").is_some() |
| } |
| |
| /// Returns an optional "runner" to pass to `compiletest` when executing |
| /// test binaries. |
| /// |
| /// An example of this would be a WebAssembly runtime when testing the wasm |
| /// targets. |
| fn runner(&self, target: TargetSelection) -> Option<String> { |
| let configured_runner = |
| self.config.target_config.get(&target).and_then(|t| t.runner.as_ref()).map(|p| &**p); |
| if let Some(runner) = configured_runner { |
| return Some(runner.to_owned()); |
| } |
| |
| if target.starts_with("wasm") && target.contains("wasi") { |
| self.default_wasi_runner(target) |
| } else { |
| None |
| } |
| } |
| |
| /// When a `runner` configuration is not provided and a WASI-looking target |
| /// is being tested this is consulted to prove the environment to see if |
| /// there's a runtime already lying around that seems reasonable to use. |
| fn default_wasi_runner(&self, target: TargetSelection) -> Option<String> { |
| let mut finder = crate::core::sanity::Finder::new(); |
| |
| // Look for Wasmtime, and for its default options be sure to disable |
| // its caching system since we're executing quite a lot of tests and |
| // ideally shouldn't pollute the cache too much. |
| if let Some(path) = finder.maybe_have("wasmtime") |
| && let Ok(mut path) = path.into_os_string().into_string() |
| { |
| path.push_str(" run -C cache=n --dir ."); |
| // Make sure that tests have access to RUSTC_BOOTSTRAP. This (for example) is |
| // required for libtest to work on beta/stable channels. |
| // |
| // NB: with Wasmtime 20 this can change to `-S inherit-env` to |
| // inherit the entire environment rather than just this single |
| // environment variable. |
| path.push_str(" --env RUSTC_BOOTSTRAP"); |
| |
| if target.contains("wasip2") { |
| path.push_str(" --wasi inherit-network --wasi allow-ip-name-lookup"); |
| } |
| |
| return Some(path); |
| } |
| |
| None |
| } |
| |
| /// Returns whether the specified tool is configured as part of this build. |
| /// |
| /// This requires that both the `extended` key is set and the `tools` key is |
| /// either unset or specifically contains the specified tool. |
| fn tool_enabled(&self, tool: &str) -> bool { |
| if !self.config.extended { |
| return false; |
| } |
| match &self.config.tools { |
| Some(set) => set.contains(tool), |
| None => true, |
| } |
| } |
| |
| /// Returns the root of the "rootfs" image that this target will be using, |
| /// if one was configured. |
| /// |
| /// If `Some` is returned then that means that tests for this target are |
| /// emulated with QEMU and binaries will need to be shipped to the emulator. |
| fn qemu_rootfs(&self, target: TargetSelection) -> Option<&Path> { |
| self.config.target_config.get(&target).and_then(|t| t.qemu_rootfs.as_ref()).map(|p| &**p) |
| } |
| |
| /// Temporary directory that extended error information is emitted to. |
| fn extended_error_dir(&self) -> PathBuf { |
| self.out.join("tmp/extended-error-metadata") |
| } |
| |
| /// Tests whether the `compiler` compiling for `target` should be forced to |
| /// use a stage1 compiler instead. |
| /// |
| /// Currently, by default, the build system does not perform a "full |
| /// bootstrap" by default where we compile the compiler three times. |
| /// Instead, we compile the compiler two times. The final stage (stage2) |
| /// just copies the libraries from the previous stage, which is what this |
| /// method detects. |
| /// |
| /// Here we return `true` if: |
| /// |
| /// * The build isn't performing a full bootstrap |
| /// * The `compiler` is in the final stage, 2 |
| /// * We're not cross-compiling, so the artifacts are already available in |
| /// stage1 |
| /// |
| /// When all of these conditions are met the build will lift artifacts from |
| /// the previous stage forward. |
| fn force_use_stage1(&self, stage: u32, target: TargetSelection) -> bool { |
| !self.config.full_bootstrap |
| && !self.config.download_rustc() |
| && stage >= 2 |
| && (self.hosts.contains(&target) || target == self.host_target) |
| } |
| |
| /// Checks whether the `compiler` compiling for `target` should be forced to |
| /// use a stage2 compiler instead. |
| /// |
| /// When we download the pre-compiled version of rustc and compiler stage is >= 2, |
| /// it should be forced to use a stage2 compiler. |
| fn force_use_stage2(&self, stage: u32) -> bool { |
| self.config.download_rustc() && stage >= 2 |
| } |
| |
| /// Given `num` in the form "a.b.c" return a "release string" which |
| /// describes the release version number. |
| /// |
| /// For example on nightly this returns "a.b.c-nightly", on beta it returns |
| /// "a.b.c-beta.1" and on stable it just returns "a.b.c". |
| fn release(&self, num: &str) -> String { |
| match &self.config.channel[..] { |
| "stable" => num.to_string(), |
| "beta" => { |
| if !self.config.omit_git_hash { |
| format!("{}-beta.{}", num, self.beta_prerelease_version()) |
| } else { |
| format!("{num}-beta") |
| } |
| } |
| "nightly" => format!("{num}-nightly"), |
| _ => format!("{num}-dev"), |
| } |
| } |
| |
| fn beta_prerelease_version(&self) -> u32 { |
| fn extract_beta_rev_from_file<P: AsRef<Path>>(version_file: P) -> Option<String> { |
| let version = fs::read_to_string(version_file).ok()?; |
| |
| helpers::extract_beta_rev(&version) |
| } |
| |
| if let Some(s) = self.prerelease_version.get() { |
| return s; |
| } |
| |
| // First check if there is a version file available. |
| // If available, we read the beta revision from that file. |
| // This only happens when building from a source tarball when Git should not be used. |
| let count = extract_beta_rev_from_file(self.src.join("version")).unwrap_or_else(|| { |
| // Figure out how many merge commits happened since we branched off main. |
| // That's our beta number! |
| // (Note that we use a `..` range, not the `...` symmetric difference.) |
| helpers::git(Some(&self.src)) |
| .arg("rev-list") |
| .arg("--count") |
| .arg("--merges") |
| .arg(format!( |
| "refs/remotes/origin/{}..HEAD", |
| self.config.stage0_metadata.config.nightly_branch |
| )) |
| .run_in_dry_run() |
| .run_capture(self) |
| .stdout() |
| }); |
| let n = count.trim().parse().unwrap(); |
| self.prerelease_version.set(Some(n)); |
| n |
| } |
| |
| /// Returns the value of `release` above for Rust itself. |
| fn rust_release(&self) -> String { |
| self.release(&self.version) |
| } |
| |
| /// Returns the "package version" for a component given the `num` release |
| /// number. |
| /// |
| /// The package version is typically what shows up in the names of tarballs. |
| /// For channels like beta/nightly it's just the channel name, otherwise |
| /// it's the `num` provided. |
| fn package_vers(&self, num: &str) -> String { |
| match &self.config.channel[..] { |
| "stable" => num.to_string(), |
| "beta" => "beta".to_string(), |
| "nightly" => "nightly".to_string(), |
| _ => format!("{num}-dev"), |
| } |
| } |
| |
| /// Returns the value of `package_vers` above for Rust itself. |
| fn rust_package_vers(&self) -> String { |
| self.package_vers(&self.version) |
| } |
| |
| /// Returns the `version` string associated with this compiler for Rust |
| /// itself. |
| /// |
| /// Note that this is a descriptive string which includes the commit date, |
| /// sha, version, etc. |
| fn rust_version(&self) -> String { |
| let mut version = self.rust_info().version(self, &self.version); |
| if let Some(ref s) = self.config.description |
| && !s.is_empty() |
| { |
| version.push_str(" ("); |
| version.push_str(s); |
| version.push(')'); |
| } |
| version |
| } |
| |
| /// Returns the full commit hash. |
| fn rust_sha(&self) -> Option<&str> { |
| self.rust_info().sha() |
| } |
| |
| /// Returns the `a.b.c` version that the given package is at. |
| fn release_num(&self, package: &str) -> String { |
| let toml_file_name = self.src.join(format!("src/tools/{package}/Cargo.toml")); |
| let toml = t!(fs::read_to_string(toml_file_name)); |
| for line in toml.lines() { |
| if let Some(stripped) = |
| line.strip_prefix("version = \"").and_then(|s| s.strip_suffix('"')) |
| { |
| return stripped.to_owned(); |
| } |
| } |
| |
| panic!("failed to find version in {package}'s Cargo.toml") |
| } |
| |
| /// Returns `true` if unstable features should be enabled for the compiler |
| /// we're building. |
| fn unstable_features(&self) -> bool { |
| !matches!(&self.config.channel[..], "stable" | "beta") |
| } |
| |
| /// Returns a Vec of all the dependencies of the given root crate, |
| /// including transitive dependencies and the root itself. Only includes |
| /// "local" crates (those in the local source tree, not from a registry). |
| fn in_tree_crates(&self, root: &str, target: Option<TargetSelection>) -> Vec<&Crate> { |
| let mut ret = Vec::new(); |
| let mut list = vec![root.to_owned()]; |
| let mut visited = HashSet::new(); |
| while let Some(krate) = list.pop() { |
| let krate = self |
| .crates |
| .get(&krate) |
| .unwrap_or_else(|| panic!("metadata missing for {krate}: {:?}", self.crates)); |
| ret.push(krate); |
| for dep in &krate.deps { |
| if !self.crates.contains_key(dep) { |
| // Ignore non-workspace members. |
| continue; |
| } |
| // Don't include optional deps if their features are not |
| // enabled. Ideally this would be computed from `cargo |
| // metadata --features …`, but that is somewhat slow. In |
| // the future, we may want to consider just filtering all |
| // build and dev dependencies in metadata::build. |
| if visited.insert(dep) |
| && (dep != "profiler_builtins" |
| || target |
| .map(|t| self.config.profiler_enabled(t)) |
| .unwrap_or_else(|| self.config.any_profiler_enabled())) |
| && (dep != "rustc_codegen_llvm" |
| || self.config.hosts.iter().any(|host| self.config.llvm_enabled(*host))) |
| { |
| list.push(dep.clone()); |
| } |
| } |
| } |
| ret.sort_unstable_by_key(|krate| krate.name.clone()); // reproducible order needed for tests |
| ret |
| } |
| |
| fn read_stamp_file(&self, stamp: &BuildStamp) -> Vec<(PathBuf, DependencyType)> { |
| if self.config.dry_run() { |
| return Vec::new(); |
| } |
| |
| if !stamp.path().exists() { |
| eprintln!( |
| "ERROR: Unable to find the stamp file {}, did you try to keep a nonexistent build stage?", |
| stamp.path().display() |
| ); |
| crate::exit!(1); |
| } |
| |
| let mut paths = Vec::new(); |
| let contents = t!(fs::read(stamp.path()), stamp.path()); |
| // This is the method we use for extracting paths from the stamp file passed to us. See |
| // run_cargo for more information (in compile.rs). |
| for part in contents.split(|b| *b == 0) { |
| if part.is_empty() { |
| continue; |
| } |
| let dependency_type = match part[0] as char { |
| 'h' => DependencyType::Host, |
| 's' => DependencyType::TargetSelfContained, |
| 't' => DependencyType::Target, |
| _ => unreachable!(), |
| }; |
| let path = PathBuf::from(t!(str::from_utf8(&part[1..]))); |
| paths.push((path, dependency_type)); |
| } |
| paths |
| } |
| |
| /// Copies a file from `src` to `dst`. |
| /// |
| /// If `src` is a symlink, `src` will be resolved to the actual path |
| /// and copied to `dst` instead of the symlink itself. |
| #[track_caller] |
| pub fn resolve_symlink_and_copy(&self, src: &Path, dst: &Path) { |
| self.copy_link_internal(src, dst, true); |
| } |
| |
| /// Links a file from `src` to `dst`. |
| /// Attempts to use hard links if possible, falling back to copying. |
| /// You can neither rely on this being a copy nor it being a link, |
| /// so do not write to dst. |
| #[track_caller] |
| pub fn copy_link(&self, src: &Path, dst: &Path, file_type: FileType) { |
| self.copy_link_internal(src, dst, false); |
| |
| if file_type.could_have_split_debuginfo() |
| && let Some(dbg_file) = split_debuginfo(src) |
| { |
| self.copy_link_internal( |
| &dbg_file, |
| &dst.with_extension(dbg_file.extension().unwrap()), |
| false, |
| ); |
| } |
| } |
| |
| #[track_caller] |
| fn copy_link_internal(&self, src: &Path, dst: &Path, dereference_symlinks: bool) { |
| if self.config.dry_run() { |
| return; |
| } |
| if src == dst { |
| return; |
| } |
| |
| #[cfg(feature = "tracing")] |
| let _span = trace_io!("file-copy-link", ?src, ?dst); |
| |
| if let Err(e) = fs::remove_file(dst) |
| && cfg!(windows) |
| && e.kind() != io::ErrorKind::NotFound |
| { |
| // workaround for https://github.com/rust-lang/rust/issues/127126 |
| // if removing the file fails, attempt to rename it instead. |
| let now = t!(SystemTime::now().duration_since(SystemTime::UNIX_EPOCH)); |
| let _ = fs::rename(dst, format!("{}-{}", dst.display(), now.as_nanos())); |
| } |
| let mut metadata = t!(src.symlink_metadata(), format!("src = {}", src.display())); |
| let mut src = src.to_path_buf(); |
| if metadata.file_type().is_symlink() { |
| if dereference_symlinks { |
| src = t!(fs::canonicalize(src)); |
| metadata = t!(fs::metadata(&src), format!("target = {}", src.display())); |
| } else { |
| let link = t!(fs::read_link(src)); |
| t!(self.symlink_file(link, dst)); |
| return; |
| } |
| } |
| if let Ok(()) = fs::hard_link(&src, dst) { |
| // Attempt to "easy copy" by creating a hard link (symlinks are privileged on windows), |
| // but if that fails just fall back to a slow `copy` operation. |
| } else { |
| if let Err(e) = fs::copy(&src, dst) { |
| panic!("failed to copy `{}` to `{}`: {}", src.display(), dst.display(), e) |
| } |
| t!(fs::set_permissions(dst, metadata.permissions())); |
| |
| // Restore file times because changing permissions on e.g. Linux using `chmod` can cause |
| // file access time to change. |
| let file_times = fs::FileTimes::new() |
| .set_accessed(t!(metadata.accessed())) |
| .set_modified(t!(metadata.modified())); |
| t!(set_file_times(dst, file_times)); |
| } |
| } |
| |
| /// Links the `src` directory recursively to `dst`. Both are assumed to exist |
| /// when this function is called. |
| /// Will attempt to use hard links if possible and fall back to copying. |
| #[track_caller] |
| pub fn cp_link_r(&self, src: &Path, dst: &Path) { |
| if self.config.dry_run() { |
| return; |
| } |
| for f in self.read_dir(src) { |
| let path = f.path(); |
| let name = path.file_name().unwrap(); |
| let dst = dst.join(name); |
| if t!(f.file_type()).is_dir() { |
| t!(fs::create_dir_all(&dst)); |
| self.cp_link_r(&path, &dst); |
| } else { |
| self.copy_link(&path, &dst, FileType::Regular); |
| } |
| } |
| } |
| |
| /// Copies the `src` directory recursively to `dst`. Both are assumed to exist |
| /// when this function is called. |
| /// Will attempt to use hard links if possible and fall back to copying. |
| /// Unwanted files or directories can be skipped |
| /// by returning `false` from the filter function. |
| #[track_caller] |
| pub fn cp_link_filtered(&self, src: &Path, dst: &Path, filter: &dyn Fn(&Path) -> bool) { |
| // Immediately recurse with an empty relative path |
| self.cp_link_filtered_recurse(src, dst, Path::new(""), filter) |
| } |
| |
| // Inner function does the actual work |
| #[track_caller] |
| fn cp_link_filtered_recurse( |
| &self, |
| src: &Path, |
| dst: &Path, |
| relative: &Path, |
| filter: &dyn Fn(&Path) -> bool, |
| ) { |
| for f in self.read_dir(src) { |
| let path = f.path(); |
| let name = path.file_name().unwrap(); |
| let dst = dst.join(name); |
| let relative = relative.join(name); |
| // Only copy file or directory if the filter function returns true |
| if filter(&relative) { |
| if t!(f.file_type()).is_dir() { |
| let _ = fs::remove_dir_all(&dst); |
| self.create_dir(&dst); |
| self.cp_link_filtered_recurse(&path, &dst, &relative, filter); |
| } else { |
| self.copy_link(&path, &dst, FileType::Regular); |
| } |
| } |
| } |
| } |
| |
| fn copy_link_to_folder(&self, src: &Path, dest_folder: &Path) { |
| let file_name = src.file_name().unwrap(); |
| let dest = dest_folder.join(file_name); |
| self.copy_link(src, &dest, FileType::Regular); |
| } |
| |
| fn install(&self, src: &Path, dstdir: &Path, file_type: FileType) { |
| if self.config.dry_run() { |
| return; |
| } |
| let dst = dstdir.join(src.file_name().unwrap()); |
| |
| #[cfg(feature = "tracing")] |
| let _span = trace_io!("install", ?src, ?dst); |
| |
| t!(fs::create_dir_all(dstdir)); |
| if !src.exists() { |
| panic!("ERROR: File \"{}\" not found!", src.display()); |
| } |
| |
| self.copy_link_internal(src, &dst, true); |
| chmod(&dst, file_type.perms()); |
| |
| // If this file can have debuginfo, look for split debuginfo and install it too. |
| if file_type.could_have_split_debuginfo() |
| && let Some(dbg_file) = split_debuginfo(src) |
| { |
| self.install(&dbg_file, dstdir, FileType::Regular); |
| } |
| } |
| |
| fn read(&self, path: &Path) -> String { |
| if self.config.dry_run() { |
| return String::new(); |
| } |
| t!(fs::read_to_string(path)) |
| } |
| |
| #[track_caller] |
| fn create_dir(&self, dir: &Path) { |
| if self.config.dry_run() { |
| return; |
| } |
| |
| #[cfg(feature = "tracing")] |
| let _span = trace_io!("dir-create", ?dir); |
| |
| t!(fs::create_dir_all(dir)) |
| } |
| |
| fn remove_dir(&self, dir: &Path) { |
| if self.config.dry_run() { |
| return; |
| } |
| |
| #[cfg(feature = "tracing")] |
| let _span = trace_io!("dir-remove", ?dir); |
| |
| t!(fs::remove_dir_all(dir)) |
| } |
| |
| /// Make sure that `dir` will be an empty existing directory after this function ends. |
| /// If it existed before, it will be first deleted. |
| fn clear_dir(&self, dir: &Path) { |
| if self.config.dry_run() { |
| return; |
| } |
| |
| #[cfg(feature = "tracing")] |
| let _span = trace_io!("dir-clear", ?dir); |
| |
| let _ = std::fs::remove_dir_all(dir); |
| self.create_dir(dir); |
| } |
| |
| fn read_dir(&self, dir: &Path) -> impl Iterator<Item = fs::DirEntry> { |
| let iter = match fs::read_dir(dir) { |
| Ok(v) => v, |
| Err(_) if self.config.dry_run() => return vec![].into_iter(), |
| Err(err) => panic!("could not read dir {dir:?}: {err:?}"), |
| }; |
| iter.map(|e| t!(e)).collect::<Vec<_>>().into_iter() |
| } |
| |
| fn symlink_file<P: AsRef<Path>, Q: AsRef<Path>>(&self, src: P, link: Q) -> io::Result<()> { |
| #[cfg(unix)] |
| use std::os::unix::fs::symlink as symlink_file; |
| #[cfg(windows)] |
| use std::os::windows::fs::symlink_file; |
| if !self.config.dry_run() { symlink_file(src.as_ref(), link.as_ref()) } else { Ok(()) } |
| } |
| |
| /// Returns if config.ninja is enabled, and checks for ninja existence, |
| /// exiting with a nicer error message if not. |
| fn ninja(&self) -> bool { |
| let mut cmd_finder = crate::core::sanity::Finder::new(); |
| |
| if self.config.ninja_in_file { |
| // Some Linux distros rename `ninja` to `ninja-build`. |
| // CMake can work with either binary name. |
| if cmd_finder.maybe_have("ninja-build").is_none() |
| && cmd_finder.maybe_have("ninja").is_none() |
| { |
| eprintln!( |
| " |
| Couldn't find required command: ninja (or ninja-build) |
| |
| You should install ninja as described at |
| <https://github.com/ninja-build/ninja/wiki/Pre-built-Ninja-packages>, |
| or set `ninja = false` in the `[llvm]` section of `bootstrap.toml`. |
| Alternatively, set `download-ci-llvm = true` in that `[llvm]` section |
| to download LLVM rather than building it. |
| " |
| ); |
| exit!(1); |
| } |
| } |
| |
| // If ninja isn't enabled but we're building for MSVC then we try |
| // doubly hard to enable it. It was realized in #43767 that the msbuild |
| // CMake generator for MSVC doesn't respect configuration options like |
| // disabling LLVM assertions, which can often be quite important! |
| // |
| // In these cases we automatically enable Ninja if we find it in the |
| // environment. |
| if !self.config.ninja_in_file |
| && self.config.host_target.is_msvc() |
| && cmd_finder.maybe_have("ninja").is_some() |
| { |
| return true; |
| } |
| |
| self.config.ninja_in_file |
| } |
| |
| pub fn colored_stdout<R, F: FnOnce(&mut dyn WriteColor) -> R>(&self, f: F) -> R { |
| self.colored_stream_inner(StandardStream::stdout, self.config.stdout_is_tty, f) |
| } |
| |
| pub fn colored_stderr<R, F: FnOnce(&mut dyn WriteColor) -> R>(&self, f: F) -> R { |
| self.colored_stream_inner(StandardStream::stderr, self.config.stderr_is_tty, f) |
| } |
| |
| fn colored_stream_inner<R, F, C>(&self, constructor: C, is_tty: bool, f: F) -> R |
| where |
| C: Fn(ColorChoice) -> StandardStream, |
| F: FnOnce(&mut dyn WriteColor) -> R, |
| { |
| let choice = match self.config.color { |
| flags::Color::Always => ColorChoice::Always, |
| flags::Color::Never => ColorChoice::Never, |
| flags::Color::Auto if !is_tty => ColorChoice::Never, |
| flags::Color::Auto => ColorChoice::Auto, |
| }; |
| let mut stream = constructor(choice); |
| let result = f(&mut stream); |
| stream.reset().unwrap(); |
| result |
| } |
| |
| pub fn exec_ctx(&self) -> &ExecutionContext { |
| &self.config.exec_ctx |
| } |
| |
| pub fn report_summary(&self, path: &Path, start_time: Instant) { |
| self.config.exec_ctx.profiler().report_summary(path, start_time); |
| } |
| |
| #[cfg(feature = "tracing")] |
| pub fn report_step_graph(self, directory: &Path) { |
| self.step_graph.into_inner().store_to_dot_files(directory); |
| } |
| } |
| |
| impl AsRef<ExecutionContext> for Build { |
| fn as_ref(&self) -> &ExecutionContext { |
| &self.config.exec_ctx |
| } |
| } |
| |
| #[cfg(unix)] |
| fn chmod(path: &Path, perms: u32) { |
| use std::os::unix::fs::*; |
| t!(fs::set_permissions(path, fs::Permissions::from_mode(perms))); |
| } |
| #[cfg(windows)] |
| fn chmod(_path: &Path, _perms: u32) {} |
| |
| impl Compiler { |
| pub fn new(stage: u32, host: TargetSelection) -> Self { |
| Self { stage, host, forced_compiler: false } |
| } |
| |
| pub fn forced_compiler(&mut self, forced_compiler: bool) { |
| self.forced_compiler = forced_compiler; |
| } |
| |
| /// Returns `true` if this is a snapshot compiler for `build`'s configuration |
| pub fn is_snapshot(&self, build: &Build) -> bool { |
| self.stage == 0 && self.host == build.host_target |
| } |
| |
| /// Indicates whether the compiler was forced to use a specific stage. |
| pub fn is_forced_compiler(&self) -> bool { |
| self.forced_compiler |
| } |
| } |
| |
| fn envify(s: &str) -> String { |
| s.chars() |
| .map(|c| match c { |
| '-' => '_', |
| c => c, |
| }) |
| .flat_map(|c| c.to_uppercase()) |
| .collect() |
| } |
| |
| /// Ensures that the behavior dump directory is properly initialized. |
| pub fn prepare_behaviour_dump_dir(build: &Build) { |
| static INITIALIZED: OnceLock<bool> = OnceLock::new(); |
| |
| let dump_path = build.out.join("bootstrap-shims-dump"); |
| |
| let initialized = INITIALIZED.get().unwrap_or(&false); |
| if !initialized { |
| // clear old dumps |
| if dump_path.exists() { |
| t!(fs::remove_dir_all(&dump_path)); |
| } |
| |
| t!(fs::create_dir_all(&dump_path)); |
| |
| t!(INITIALIZED.set(true)); |
| } |
| } |