Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(core): hashing fixes #29247

Merged
merged 1 commit into from
Dec 9, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
41 changes: 20 additions & 21 deletions packages/nx/src/hasher/native-task-hasher-impl.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ import { NxJsonConfiguration } from '../config/nx-json';
import { createTaskGraph } from '../tasks-runner/create-task-graph';
import { NativeTaskHasherImpl } from './native-task-hasher-impl';
import { ProjectGraphBuilder } from '../project-graph/project-graph-builder';
import { testOnlyTransferFileMap } from '../native';

describe('native task hasher', () => {
let tempFs: TempFs;
Expand Down Expand Up @@ -166,9 +165,9 @@ describe('native task hasher', () => {
"unrelated:ProjectConfiguration": "11133337791644294114",
"unrelated:TsConfig": "2264969541778889434",
"unrelated:{projectRoot}/**/*": "4127219831408253695",
"workspace:[{workspaceRoot}/nx.json,{workspaceRoot}/.gitignore,{workspaceRoot}/.nxignore]": "18099427347122160586",
"workspace:[{workspaceRoot}/nx.json,{workspaceRoot}/.gitignore,{workspaceRoot}/.nxignore]": "6993407921919898285",
},
"value": "391066910278240047",
"value": "15987635381237972716",
},
]
`);
Expand Down Expand Up @@ -232,9 +231,9 @@ describe('native task hasher', () => {
"parent:ProjectConfiguration": "8031122597231773116",
"parent:TsConfig": "2264969541778889434",
"parent:{projectRoot}/**/*": "17059468255294227635",
"workspace:[{workspaceRoot}/nx.json,{workspaceRoot}/.gitignore,{workspaceRoot}/.nxignore]": "18099427347122160586",
"workspace:[{workspaceRoot}/nx.json,{workspaceRoot}/.gitignore,{workspaceRoot}/.nxignore]": "6993407921919898285",
},
"value": "2068118780828544905",
"value": "10262178246623018030",
}
`);
});
Expand Down Expand Up @@ -312,9 +311,9 @@ describe('native task hasher', () => {
"parent:!{projectRoot}/**/*.spec.ts": "8911122541468969799",
"parent:ProjectConfiguration": "3608670998275221195",
"parent:TsConfig": "2264969541778889434",
"workspace:[{workspaceRoot}/nx.json,{workspaceRoot}/.gitignore,{workspaceRoot}/.nxignore]": "11114659294156087056",
"workspace:[{workspaceRoot}/nx.json,{workspaceRoot}/.gitignore,{workspaceRoot}/.nxignore]": "9567402949680805009",
},
"value": "7780216706447676384",
"value": "14320402761058545796",
}
`);
});
Expand Down Expand Up @@ -379,9 +378,9 @@ describe('native task hasher', () => {
"parent:!{projectRoot}/**/*.spec.ts": "8911122541468969799",
"parent:ProjectConfiguration": "16402137858974842465",
"parent:TsConfig": "2264969541778889434",
"workspace:[{workspaceRoot}/nx.json,{workspaceRoot}/.gitignore,{workspaceRoot}/.nxignore]": "11114659294156087056",
"workspace:[{workspaceRoot}/nx.json,{workspaceRoot}/.gitignore,{workspaceRoot}/.nxignore]": "9567402949680805009",
},
"value": "16063851723942996830",
"value": "2453961902871518313",
},
{
"details": {
Expand All @@ -390,9 +389,9 @@ describe('native task hasher', () => {
"parent:ProjectConfiguration": "16402137858974842465",
"parent:TsConfig": "2264969541778889434",
"parent:{projectRoot}/**/*": "17059468255294227635",
"workspace:[{workspaceRoot}/nx.json,{workspaceRoot}/.gitignore,{workspaceRoot}/.nxignore]": "11114659294156087056",
"workspace:[{workspaceRoot}/nx.json,{workspaceRoot}/.gitignore,{workspaceRoot}/.nxignore]": "9567402949680805009",
},
"value": "1153029350223570014",
"value": "5894031627295207190",
},
]
`);
Expand Down Expand Up @@ -480,11 +479,11 @@ describe('native task hasher', () => {
"parent:ProjectConfiguration": "14398811678394411425",
"parent:TsConfig": "2264969541778889434",
"parent:{projectRoot}/**/*": "17059468255294227635",
"workspace:[{workspaceRoot}/global1]": "14542405497386871555",
"workspace:[{workspaceRoot}/global2]": "12932836274958677781",
"workspace:[{workspaceRoot}/nx.json,{workspaceRoot}/.gitignore,{workspaceRoot}/.nxignore]": "12076281115618125366",
"workspace:[{workspaceRoot}/global1]": "11580065831422255455",
"workspace:[{workspaceRoot}/global2]": "6389465682922235219",
"workspace:[{workspaceRoot}/nx.json,{workspaceRoot}/.gitignore,{workspaceRoot}/.nxignore]": "1359893257201181462",
},
"value": "11623032905580707496",
"value": "12394084267697729491",
},
]
`);
Expand Down Expand Up @@ -537,9 +536,9 @@ describe('native task hasher', () => {
"parent:ProjectConfiguration": "3608670998275221195",
"parent:TsConfig": "8661678577354855152",
"parent:{projectRoot}/**/*": "17059468255294227635",
"workspace:[{workspaceRoot}/nx.json,{workspaceRoot}/.gitignore,{workspaceRoot}/.nxignore]": "18099427347122160586",
"workspace:[{workspaceRoot}/nx.json,{workspaceRoot}/.gitignore,{workspaceRoot}/.nxignore]": "6993407921919898285",
},
"value": "15449891577656158381",
"value": "16657264716563422624",
}
`);
});
Expand Down Expand Up @@ -616,9 +615,9 @@ describe('native task hasher', () => {
"parent:ProjectConfiguration": "3608670998275221195",
"parent:TsConfig": "2264969541778889434",
"parent:{projectRoot}/**/*": "17059468255294227635",
"workspace:[{workspaceRoot}/nx.json,{workspaceRoot}/.gitignore,{workspaceRoot}/.nxignore]": "18099427347122160586",
"workspace:[{workspaceRoot}/nx.json,{workspaceRoot}/.gitignore,{workspaceRoot}/.nxignore]": "6993407921919898285",
},
"value": "7701541978018526456",
"value": "1325637283470296766",
}
`);

Expand All @@ -639,9 +638,9 @@ describe('native task hasher', () => {
"parent:ProjectConfiguration": "3608670998275221195",
"parent:TsConfig": "2264969541778889434",
"parent:{projectRoot}/**/*": "17059468255294227635",
"workspace:[{workspaceRoot}/nx.json,{workspaceRoot}/.gitignore,{workspaceRoot}/.nxignore]": "18099427347122160586",
"workspace:[{workspaceRoot}/nx.json,{workspaceRoot}/.gitignore,{workspaceRoot}/.nxignore]": "6993407921919898285",
},
"value": "7701541978018526456",
"value": "1325637283470296766",
}
`);
});
Expand Down
1 change: 1 addition & 0 deletions packages/nx/src/native/glob.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ impl NxGlobSetBuilder {
}
}

#[derive(Debug)]
pub struct NxGlobSet {
included_globs: GlobSet,
excluded_globs: GlobSet,
Expand Down
69 changes: 51 additions & 18 deletions packages/nx/src/native/tasks/hashers/hash_workspace_files.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use std::sync::Arc;

use anyhow::*;
use dashmap::DashMap;
use tracing::{trace, warn};
use tracing::{debug, debug_span, trace, warn};

use crate::native::types::FileData;
use crate::native::{glob::build_glob_set, hasher::hash};
Expand Down Expand Up @@ -47,20 +47,21 @@ pub fn hash_workspace_files(
let glob = build_glob_set(&globs)?;

let mut hasher = xxhash_rust::xxh3::Xxh3::new();
let mut hashes: Vec<String> = Vec::new();
for file in all_workspace_files
.iter()
.filter(|file| glob.is_match(&file.file))
{
trace!("{:?} was found with glob {:?}", file.file, globs);
hashes.push(file.hash.clone());
hashes.push(file.file.clone());
}
hasher.update(hashes.join(",").as_bytes());
let hashed_value = hasher.digest().to_string();
debug_span!("Hashing workspace fileset", cache_key).in_scope(|| {
for file in all_workspace_files
.iter()
.filter(|file| glob.is_match(&file.file))
{
debug!("Adding {:?} ({:?}) to hash", file.hash, file.file);
hasher.update(file.file.clone().as_bytes());
hasher.update(file.hash.clone().as_bytes());
}
let hashed_value = hasher.digest().to_string();
debug!("Hash Value: {:?}", hashed_value);

cache.insert(cache_key.to_string(), hashed_value.clone());
Ok(hashed_value)
cache.insert(cache_key.to_string(), hashed_value.clone());
Ok(hashed_value)
})
}

#[cfg(test)]
Expand Down Expand Up @@ -111,9 +112,41 @@ mod test {
Arc::new(DashMap::new()),
)
.unwrap();
assert_eq!(result, hash([
gitignore_file.hash,
gitignore_file.file
].join(",").as_bytes()));
assert_eq!(result, "15841935230129999746");
}

#[test]
fn test_hash_workspace_files_is_deterministic() {
let gitignore_file = FileData {
file: ".gitignore".into(),
hash: "123".into(),
};
let nxignore_file = FileData {
file: ".nxignore".into(),
hash: "456".into(),
};
let package_json_file = FileData {
file: "package.json".into(),
hash: "789".into(),
};
let project_file = FileData {
file: "packages/project/project.json".into(),
hash: "abc".into(),
};

for i in 0..1000 {
let result = hash_workspace_files(
&["{workspaceRoot}/**/*".to_string()],
&[
gitignore_file.clone(),
nxignore_file.clone(),
package_json_file.clone(),
project_file.clone(),
],
Arc::new(DashMap::new()),
)
.unwrap();
assert_eq!(result, "13759877301064854697");
}
}
}
20 changes: 12 additions & 8 deletions packages/nx/src/native/tasks/task_hasher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -82,8 +82,8 @@ impl TaskHasher {
hash_plans: External<HashMap<String, Vec<HashInstruction>>>,
js_env: HashMap<String, String>,
) -> anyhow::Result<NapiDashMap<String, HashDetails>> {
debug!("hashing plans {:?}", hash_plans.as_ref());
trace!("plan length: {}", hash_plans.len());
trace!("hashing plans {:?}", hash_plans.as_ref());
debug!("plan length: {}", hash_plans.len());
trace!("all workspace files: {}", self.all_workspace_files.len());
trace!("project_file_map: {}", self.project_file_map.len());

Expand Down Expand Up @@ -136,18 +136,22 @@ impl TaskHasher {
})?;

hashes.iter_mut().for_each(|mut h| {
let hash_details = h.value_mut();
let (hash_id, hash_details) = h.pair_mut();
let mut keys = hash_details.details.keys().collect::<Vec<_>>();
keys.par_sort();
let mut hasher = xxhash_rust::xxh3::Xxh3::new();
for key in keys {
hasher.update(hash_details.details[key].as_bytes());
}
hash_details.value = hasher.digest().to_string();
trace_span!("Assembling hash", hash_id).in_scope(|| {
for key in keys {
trace!("Adding {} ({}) to hash", hash_details.details[key], key);
hasher.update(hash_details.details[key].as_bytes());
}
let hash = hasher.digest().to_string();
trace!("Hash Value: {}", hash);
hash_details.value = hash;
});
});

trace!("hashing took {:?}", hash_time.elapsed());
debug!(?hashes);
Ok(hashes)
}

Expand Down
45 changes: 44 additions & 1 deletion packages/nx/src/native/tests/workspace_files.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import { dirname, join } from 'path';
import { readJsonFile } from '../../utils/fileutils';
import { cacheDirectoryForWorkspace } from '../../utils/cache-directory';

describe('workspace files', () => {
describe('Workspace Context', () => {
function createParseConfigurationsFunction(tempDir: string) {
return async (filenames: string[]) => {
const res = {};
Expand Down Expand Up @@ -187,6 +187,49 @@ describe('workspace files', () => {
`);
});

describe('hashing', () => {
let context: WorkspaceContext;
let fs: TempFs;

beforeEach(async () => {
fs = new TempFs('workspace-files');

const files = {};
for (let i = 0; i < 1000; i++) {
files[`file${i}.txt`] = i.toString();
}

await fs.createFiles(files);

context = new WorkspaceContext(
fs.tempDir,
cacheDirectoryForWorkspace(fs.tempDir)
);
});

it('should hash consistently when nothing changes', () => {
let hash = context.hashFilesMatchingGlob(['**/*.txt']);
for (let i = 0; i < 100; i++) {
const newContext = new WorkspaceContext(
fs.tempDir,
cacheDirectoryForWorkspace(fs.tempDir)
);
expect(newContext.hashFilesMatchingGlob(['**/*.txt'])).toEqual(hash);
}
});

it('should hash differently if a file is renamed', () => {
let hash1 = context.hashFilesMatchingGlob(['**/*.txt']);
const newContext = new WorkspaceContext(
fs.tempDir,
cacheDirectoryForWorkspace(fs.tempDir)
);
fs.renameFile('file0.txt', 'file00.txt');
let hash2 = newContext.hashFilesMatchingGlob(['**/*.txt']);
expect(hash1).not.toEqual(hash2);
});
});

describe('globbing', () => {
let context: WorkspaceContext;
let fs: TempFs;
Expand Down
24 changes: 13 additions & 11 deletions packages/nx/src/native/workspace/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,6 @@ use std::ops::Deref;
use std::path::{Path, PathBuf};
use std::sync::Arc;

use napi::bindgen_prelude::External;
use rayon::prelude::*;
use tracing::{trace, warn};

use crate::native::hasher::hash;
use crate::native::logger::enable_logger;
use crate::native::project_graph::utils::{find_project_for_path, ProjectRootMappings};
Expand All @@ -18,6 +14,10 @@ use crate::native::workspace::types::{
FileMap, NxWorkspaceFilesExternals, ProjectFiles, UpdatedWorkspaceFiles,
};
use crate::native::workspace::{config_files, types::NxWorkspaceFiles, workspace_files};
use napi::bindgen_prelude::External;
use rayon::prelude::*;
use tracing::{trace, warn};
use xxhash_rust::xxh3;

#[napi]
pub struct WorkspaceContext {
Expand Down Expand Up @@ -238,13 +238,15 @@ impl WorkspaceContext {
exclude: Option<Vec<String>>,
) -> napi::Result<String> {
let files = &self.all_file_data();
let globbed_files = config_files::glob_files(files, globs, exclude)?;
Ok(hash(
&globbed_files
.map(|file| file.hash.as_bytes())
.collect::<Vec<_>>()
.concat(),
))
let globbed_files = config_files::glob_files(files, globs, exclude)?.collect::<Vec<_>>();

let mut hasher = xxh3::Xxh3::new();
for file in globbed_files {
hasher.update(file.file.as_bytes());
hasher.update(file.hash.as_bytes());
}

Ok(hasher.digest().to_string())
}

#[napi]
Expand Down
Loading