Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 35 additions & 0 deletions crates/cargo-util/src/paths.rs
Original file line number Diff line number Diff line change
Expand Up @@ -869,6 +869,41 @@ fn exclude_from_time_machine(path: &Path) {
// doesn't prevent Cargo from working
}

/// Hardlinks a directory recursively.
///
/// Directories are created and files are hardlinked.
pub fn hardlink_dir_all(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> io::Result<()> {
fs::create_dir_all(&dst)?;
for entry in fs::read_dir(src)? {
let entry = entry?;
let ty = entry.file_type()?;
if ty.is_dir() {
hardlink_dir_all(entry.path(), dst.as_ref().join(entry.file_name()))?;
} else {
if let Err(err) = std::fs::hard_link(entry.path(), dst.as_ref().join(entry.file_name()))
&& !matches!(err.kind(), io::ErrorKind::AlreadyExists)
{
return Err(err);
}
}
}
Ok(())
}

/// Checks if a directory has a least one file including in nested directories.
pub fn has_files(path: impl AsRef<Path>) -> Result<bool> {
for entry in fs::read_dir(path)? {
let entry = entry?;
let ty = entry.file_type()?;
if ty.is_dir() {
has_files(entry.path())?;
} else {
return Ok(true);
}
}
return Ok(false);
}

#[cfg(test)]
mod tests {
use super::join_paths;
Expand Down
30 changes: 30 additions & 0 deletions src/cargo/core/compiler/build_runner/compilation_files.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ use lazycell::LazyCell;
use tracing::debug;

use super::{BuildContext, BuildRunner, CompileKind, FileFlavor, Layout};
use crate::core::compiler::layout::BuildUnitLockLocation;
use crate::core::compiler::{CompileMode, CompileTarget, CrateType, FileType, Unit};
use crate::core::{Target, TargetKind, Workspace};
use crate::util::{self, CargoResult, StableHasher};
Expand Down Expand Up @@ -258,6 +259,28 @@ impl<'a, 'gctx: 'a> CompilationFiles<'a, 'gctx> {
self.host.build_dir().root()
}

pub fn build_unit(&self, unit: &Unit) -> PathBuf {
let dir = self.pkg_dir(unit);
self.layout(unit.kind).build_dir().build_unit(&dir)
}

pub fn build_unit_cache(&self, unit: &Unit) -> PathBuf {
let dir = self.pkg_dir(unit);
self.layout(unit.kind).build_cache().build_unit(&dir)
}

pub fn build_unit_cache_populated(&self, unit: &Unit) -> PathBuf {
let dir = self.pkg_dir(unit);
self.layout(unit.kind)
.build_cache()
.build_unit_populated(&dir)
}

pub fn build_unit_cache_lock(&self, unit: &Unit) -> BuildUnitLockLocation {
let dir = self.pkg_dir(unit);
self.layout(unit.kind).build_cache().build_unit_lock(&dir)
}

/// Returns the host `deps` directory path.
pub fn host_deps(&self, unit: &Unit) -> PathBuf {
let dir = self.pkg_dir(unit);
Expand All @@ -277,6 +300,13 @@ impl<'a, 'gctx: 'a> CompilationFiles<'a, 'gctx> {
self.layout(unit.kind).build_dir().fingerprint(&dir)
}

/// The path of the partial and full locks for a given build unit
/// when fine grain locking is enabled.
pub fn build_unit_lock(&self, unit: &Unit) -> BuildUnitLockLocation {
let dir = self.pkg_dir(unit);
self.layout(unit.kind).build_dir().build_unit_lock(&dir)
}

/// Directory where incremental output for the given unit should go.
pub fn incremental_dir(&self, unit: &Unit) -> &Path {
self.layout(unit.kind).build_dir().incremental()
Expand Down
43 changes: 41 additions & 2 deletions src/cargo/core/compiler/build_runner/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ use std::sync::{Arc, Mutex};

use crate::core::PackageId;
use crate::core::compiler::compilation::{self, UnitOutput};
use crate::core::compiler::locking::{BuildCacheLock, LockingStrategy};
use crate::core::compiler::{self, Unit, artifact};
use crate::util::cache_lock::CacheLockMode;
use crate::util::errors::CargoResult;
Expand All @@ -15,6 +16,7 @@ use cargo_util::paths;
use filetime::FileTime;
use itertools::Itertools;
use jobserver::Client;
use tracing::warn;

use super::build_plan::BuildPlan;
use super::custom_build::{self, BuildDeps, BuildScriptOutputs, BuildScripts};
Expand Down Expand Up @@ -89,6 +91,11 @@ pub struct BuildRunner<'a, 'gctx> {
/// because the target has a type error. This is in an Arc<Mutex<..>>
/// because it is continuously updated as the job progresses.
pub failed_scrape_units: Arc<Mutex<HashSet<UnitHash>>>,

/// By default we use fine grain locking, but disable locking on some filesystems like NFS
/// or fallback to coarse grain locking based if we detected we cannot suport fine grain
/// locking in the current environment.
pub locking_strategy: LockingStrategy,
}

impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
Expand All @@ -111,6 +118,8 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
}
};

let locking_strategy = LockingStrategy::determine_locking_strategy(&bcx.ws, Some(&bcx))?;

Ok(Self {
bcx,
compilation: Compilation::new(bcx)?,
Expand All @@ -128,6 +137,7 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
lto: HashMap::new(),
metadata_for_doc_units: HashMap::new(),
failed_scrape_units: Arc::new(Mutex::new(HashSet::new())),
locking_strategy,
})
}

Expand Down Expand Up @@ -310,9 +320,38 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
.insert(dir.clone().into_path_buf());
}
}

for (unit, _) in &self.bcx.unit_graph {
self.save_to_cache(unit)?;
}

Ok(self.compilation)
}

fn save_to_cache(&self, unit: &Unit) -> CargoResult<()> {
let destination = self.files().build_unit_cache(unit);
let destination_populated = self.files().build_unit_cache_populated(unit);
let destination_lock = self.files().build_unit_lock(unit);
let source = self.files().build_unit(unit);
if destination_populated.exists() {
// The unit is already cached
return Ok(());
}

let _lock = BuildCacheLock::write(&destination_lock)?;

// If we ever try to save a non-existent build unit, its probably a bug with cargo.
// Use `debug_assert` as we don't want to waste time getting the file metadata in real
// operation.
debug_assert!(source.exists(), "missing {:?}", source);

paths::hardlink_dir_all(source, destination)?;

std::fs::File::create(destination_populated)?;

Ok(())
}

fn collect_tests_and_executables(&mut self, unit: &Unit) -> CargoResult<()> {
for output in self.outputs(unit)?.iter() {
if matches!(
Expand Down Expand Up @@ -360,11 +399,11 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> {
#[tracing::instrument(skip_all)]
pub fn prepare_units(&mut self) -> CargoResult<()> {
let dest = self.bcx.profiles.get_dir_name();
let host_layout = Layout::new(self.bcx.ws, None, &dest)?;
let host_layout = Layout::new(self.bcx.ws, None, &dest, &self.locking_strategy)?;
let mut targets = HashMap::new();
for kind in self.bcx.all_kinds.iter() {
if let CompileKind::Target(target) = *kind {
let layout = Layout::new(self.bcx.ws, Some(target), &dest)?;
let layout = Layout::new(self.bcx.ws, Some(target), &dest, &self.locking_strategy)?;
targets.insert(target, layout);
}
}
Expand Down
13 changes: 13 additions & 0 deletions src/cargo/core/compiler/job_queue/job.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,14 @@ impl Job {
}
}

/// Creates a new job representing a cache hit.
pub fn new_cached(work: Work) -> Job {
Job {
work,
fresh: Freshness::Cached,
}
}

/// Consumes this job by running it, returning the result of the
/// computation.
pub fn run(self, state: &JobState<'_, '_>) -> CargoResult<()> {
Expand Down Expand Up @@ -100,6 +108,7 @@ impl fmt::Debug for Job {
#[derive(Debug, Clone)]
pub enum Freshness {
Fresh,
Cached,
Dirty(DirtyReason),
}

Expand All @@ -111,4 +120,8 @@ impl Freshness {
pub fn is_fresh(&self) -> bool {
matches!(self, Freshness::Fresh)
}

pub fn is_cached(&self) -> bool {
matches!(self, Freshness::Cached)
}
}
5 changes: 4 additions & 1 deletion src/cargo/core/compiler/job_queue/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ use jobserver::{Acquired, HelperThread};
use semver::Version;
use tracing::{debug, trace};

pub use self::job::Freshness::{self, Dirty, Fresh};
pub use self::job::Freshness::{self, Cached, Dirty, Fresh};
pub use self::job::{Job, Work};
pub use self::job_state::JobState;
use super::build_runner::OutputFile;
Expand Down Expand Up @@ -1190,6 +1190,9 @@ impl<'gctx> DrainState<'gctx> {
gctx.shell().verbose(|c| c.status("Fresh", &unit.pkg))?;
}
}
Cached => {
gctx.shell().status("Cached", &unit.pkg)?;
}
}
Ok(())
}
Expand Down
Loading
Loading