Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Makes patches optional #2543

Merged
merged 7 commits into from
Mar 29, 2021
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
212 changes: 111 additions & 101 deletions .pnp.cjs

Large diffs are not rendered by default.

Binary file not shown.
Binary file not shown.
Binary file not shown.
22 changes: 22 additions & 0 deletions .yarn/versions/36d85c5f.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
releases:
"@yarnpkg/cli": minor
"@yarnpkg/plugin-compat": minor
"@yarnpkg/plugin-patch": minor

declined:
- "@yarnpkg/plugin-constraints"
- "@yarnpkg/plugin-dlx"
- "@yarnpkg/plugin-essentials"
- "@yarnpkg/plugin-init"
- "@yarnpkg/plugin-interactive-tools"
- "@yarnpkg/plugin-node-modules"
- "@yarnpkg/plugin-npm-cli"
- "@yarnpkg/plugin-pack"
- "@yarnpkg/plugin-pnp"
- "@yarnpkg/plugin-stage"
- "@yarnpkg/plugin-typescript"
- "@yarnpkg/plugin-version"
- "@yarnpkg/plugin-workspace-tools"
- "@yarnpkg/builder"
- "@yarnpkg/core"
- "@yarnpkg/doctor"
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
"packages/*"
],
"dependencies": {
"@arcanis/sherlock": "^1.0.38",
"@arcanis/sherlock": "^2.0.2",
"@babel/cli": "^7.10.1",
"@babel/core": "^7.10.2",
"@babel/plugin-proposal-class-properties": "^7.10.1",
Expand Down
2 changes: 1 addition & 1 deletion packages/plugin-compat/sources/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ const plugin: Plugin<CoreHooks & PatchHooks> = {
return structUtils.makeDescriptor(dependency, structUtils.makeRange({
protocol: `patch:`,
source: structUtils.stringifyDescriptor(dependency),
selector: `builtin<compat/${structUtils.stringifyIdent(dependency)}>`,
selector: `~builtin<compat/${structUtils.stringifyIdent(dependency)}>`,
params: null,
}));
},
Expand Down
109 changes: 70 additions & 39 deletions packages/plugin-patch/sources/PatchFetcher.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
import {Fetcher, FetchOptions, MinimalFetchOptions, ReportError, MessageName} from '@yarnpkg/core';
import {Locator} from '@yarnpkg/core';
import {miscUtils, structUtils} from '@yarnpkg/core';
import {ppath, xfs, ZipFS, Filename, CwdFS, PortablePath} from '@yarnpkg/fslib';
import {getLibzipPromise} from '@yarnpkg/libzip';
import {Fetcher, FetchOptions, MinimalFetchOptions, ReportError, MessageName, Report} from '@yarnpkg/core';
import {Locator} from '@yarnpkg/core';
import {miscUtils, structUtils} from '@yarnpkg/core';
import {ppath, xfs, ZipFS, Filename, CwdFS, PortablePath} from '@yarnpkg/fslib';
import {getLibzipPromise} from '@yarnpkg/libzip';

import * as patchUtils from './patchUtils';
import {UnmatchedHunkError} from './tools/UnmatchedHunkError';
import {reportHunk} from './tools/format';
import * as patchUtils from './patchUtils';
import {UnmatchedHunkError} from './tools/UnmatchedHunkError';
import {reportHunk} from './tools/format';

export class PatchFetcher implements Fetcher {
supports(locator: Locator, opts: MinimalFetchOptions) {
Expand Down Expand Up @@ -44,56 +44,87 @@ export class PatchFetcher implements Fetcher {
const patchFiles = await patchUtils.loadPatchFiles(parentLocator, patchPaths, opts);

const tmpDir = await xfs.mktempPromise();
const tmpFile = ppath.join(tmpDir, `patched.zip` as Filename);
const currentFile = ppath.join(tmpDir, `current.zip` as Filename);

const sourceFetch = await opts.fetcher.fetch(sourceLocator, opts);
const prefixPath = structUtils.getIdentVendorPath(locator);

const libzip = await getLibzipPromise();

const patchedPackage = new ZipFS(tmpFile, {
// First we create a copy of the package that we'll be free to mutate
const initialCopy = new ZipFS(currentFile, {
libzip,
create: true,
level: opts.project.configuration.get(`compressionLevel`),
});

await patchedPackage.mkdirpPromise(prefixPath);
await initialCopy.mkdirpPromise(prefixPath);

await miscUtils.releaseAfterUseAsync(async () => {
await patchedPackage.copyPromise(prefixPath, sourceFetch.prefixPath, {baseFs: sourceFetch.packageFs, stableSort: true});
await initialCopy.copyPromise(prefixPath, sourceFetch.prefixPath, {baseFs: sourceFetch.packageFs, stableSort: true});
}, sourceFetch.releaseFs);

const patchFs = new CwdFS(ppath.resolve(PortablePath.root, prefixPath), {baseFs: patchedPackage});

for (const patchFile of patchFiles) {
if (patchFile !== null) {
try {
await patchUtils.applyPatchFile(patchUtils.parsePatchFile(patchFile), {
baseFs: patchFs,
version: sourceVersion,
});
} catch (err) {
if (!(err instanceof UnmatchedHunkError))
throw err;

const enableInlineHunks = opts.project.configuration.get(`enableInlineHunks`);
const suggestion = !enableInlineHunks
? ` (set enableInlineHunks for details)`
: ``;

throw new ReportError(MessageName.PATCH_HUNK_FAILED, err.message + suggestion, report => {
if (!enableInlineHunks)
return;

reportHunk(err.hunk, {
configuration: opts.project.configuration,
report,
});
initialCopy.saveAndClose();

for (const {source, optional} of patchFiles) {
if (source === null)
continue;

// Then for each patchfile, we open this copy anew, and try to apply the
// changeset. We need to open it for each patchfile (rather than only a
// single time) because it lets us easily rollback when hitting errors
// on optional patches (we just need to call `discardAndClose`).
const patchedPackage = new ZipFS(currentFile, {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Could you change this part and the initialCopy to be in-memory, then write the resulting buffer to disk for the returned ZipFS instance?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't think I can, since we need to take snapshots in-between each patch. If I was to closeAndSave a memory zipFs, then we'd to already have in memory another one representing the last snapshot; this would be difficult unless we clone the memory zipFs somehow, which I don't know how to do efficiently.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Oh I forgot getBufferAndClose 🤔 Still, it would require to keep the whole archive in memory, twice per package. I'm a bit worried it would be heavy (by contrast, by closing / opening, it only needs the archive listing)... given that the general case is to apply a single patch, I'd tend to first try the fs approach and switch only if we see a noticeable impact.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can we at least profile both approaches and compare the time spent and the memory usage to make sure that we don't introduce a significant performance regression? Since compression is the slowest part of the fetch step, I'd prefer to avoid having to compress the archive for each patch file. I've also just realized that my changes still aren't optimal because they don't prevent compression (I have to pass level: 0 for that to happen).

Copy link
Member Author

@arcanis arcanis Mar 2, 2021

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Sure, will check - I however expect the compression level to affect new writes, not the existing ones. With that in mind we can't disable compression, since it would require to go back and compress the affected files after all the patches have been applied - far too complex for a fringe use case (multiple patches on the same package).

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

For the record: 22s install time for TS code cache, vs 15s before.

libzip,
level: opts.project.configuration.get(`compressionLevel`),
});

const patchFs = new CwdFS(ppath.resolve(PortablePath.root, prefixPath), {
baseFs: patchedPackage,
});

try {
await patchUtils.applyPatchFile(patchUtils.parsePatchFile(source), {
baseFs: patchFs,
version: sourceVersion,
});
} catch (err) {
if (!(err instanceof UnmatchedHunkError))
throw err;

const enableInlineHunks = opts.project.configuration.get(`enableInlineHunks`);
const suggestion = !enableInlineHunks && !optional
? ` (set enableInlineHunks for details)`
: ``;

const message = `${structUtils.prettyLocator(opts.project.configuration, locator)}: ${err.message}${suggestion}`;
const reportExtra = (report: Report) => {
if (!enableInlineHunks)
return;

reportHunk(err.hunk, {
configuration: opts.project.configuration,
report,
});
};

// By discarding the current changes, the next patch will start from
// where we were.
patchedPackage.discardAndClose();

if (optional) {
opts.report.reportWarningOnce(MessageName.PATCH_HUNK_FAILED, message, {reportExtra});
} else {
throw new ReportError(MessageName.PATCH_HUNK_FAILED, message, reportExtra);
}
}

patchedPackage.saveAndClose();
}

return patchedPackage;
return new ZipFS(currentFile, {
libzip,
level: opts.project.configuration.get(`compressionLevel`),
});
}
}
2 changes: 1 addition & 1 deletion packages/plugin-patch/sources/PatchResolver.ts
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ export class PatchResolver implements Resolver {
if (typeof sourcePackage === `undefined`)
throw new Error(`Assertion failed: The dependency should have been resolved`);

const patchHash = hashUtils.makeHash(`${CACHE_VERSION}`, ...patchFiles).slice(0, 6);
const patchHash = hashUtils.makeHash(`${CACHE_VERSION}`, ...patchFiles.map(spec => JSON.stringify(spec))).slice(0, 6);

return [patchUtils.makeLocator(descriptor, {parentLocator, sourcePackage, patchPaths, patchHash})];
}
Expand Down
66 changes: 41 additions & 25 deletions packages/plugin-patch/sources/patchUtils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,10 @@ type VisitPatchPathOptions<T> = {
};

function visitPatchPath<T>({onAbsolute, onRelative, onBuiltin}: VisitPatchPathOptions<T>, patchPath: PortablePath) {
const optional = patchPath.startsWith(`~`);
if (optional)
patchPath = patchPath.slice(1) as PortablePath;

const builtinMatch = patchPath.match(BUILTIN_REGEXP);
if (builtinMatch !== null)
return onBuiltin(builtinMatch[1]);
Expand All @@ -91,6 +95,14 @@ function visitPatchPath<T>({onAbsolute, onRelative, onBuiltin}: VisitPatchPathOp
}
}

export function extractPatchFlags(patchPath: PortablePath) {
const optional = patchPath.startsWith(`~`);
if (optional)
patchPath = patchPath.slice(1) as PortablePath;

return {optional};
}

export function isParentRequired(patchPath: PortablePath) {
return visitPatchPath({
onAbsolute: () => false,
Expand Down Expand Up @@ -119,36 +131,40 @@ export async function loadPatchFiles(parentLocator: Locator | null, patchPaths:
// First we obtain the specification for all the patches that we'll have to
// apply to the original package.
const patchFiles = await miscUtils.releaseAfterUseAsync(async () => {
return await Promise.all(patchPaths.map(async patchPath => visitPatchPath({
onAbsolute: async () => {
return await xfs.readFilePromise(patchPath, `utf8`);
},

onRelative: async () => {
if (effectiveParentFetch === null)
throw new Error(`Assertion failed: The parent locator should have been fetched`);

return await effectiveParentFetch.packageFs.readFilePromise(ppath.join(effectiveParentFetch.prefixPath, patchPath), `utf8`);
},

onBuiltin: async name => {
return await opts.project.configuration.firstHook((hooks: PatchHooks) => {
return hooks.getBuiltinPatch;
}, opts.project, name);
},
}, patchPath)));
return await Promise.all(patchPaths.map(async patchPath => {
const flags = extractPatchFlags(patchPath);

const source = await visitPatchPath({
onAbsolute: async () => {
return await xfs.readFilePromise(patchPath, `utf8`);
},

onRelative: async () => {
if (effectiveParentFetch === null)
throw new Error(`Assertion failed: The parent locator should have been fetched`);

return await effectiveParentFetch.packageFs.readFilePromise(ppath.join(effectiveParentFetch.prefixPath, patchPath), `utf8`);
},

onBuiltin: async name => {
return await opts.project.configuration.firstHook((hooks: PatchHooks) => {
return hooks.getBuiltinPatch;
}, opts.project, name);
},
}, patchPath);

return {...flags, source};
}));
});

// Normalizes the line endings to prevent mismatches when cloning a
// repository on Windows systems (the default settings for Git are to
// convert newlines back and forth, which would mess with the checksum)
return patchFiles.map(definition => {
if (typeof definition === `string`) {
return definition.replace(/\r\n?/g, `\n`);
} else {
return definition;
}
});
for (const spec of patchFiles)
if (typeof spec.source === `string`)
spec.source = spec.source.replace(/\r\n?/g, `\n`);

return patchFiles;
}

export async function extractPackageToDisk(locator: Locator, {cache, project}: {cache: Cache, project: Project}) {
Expand Down
8 changes: 6 additions & 2 deletions packages/yarnpkg-core/sources/Report.ts
Original file line number Diff line number Diff line change
Expand Up @@ -93,21 +93,25 @@ export abstract class Report {
};
}

reportInfoOnce(name: MessageName, text: string, opts?: {key?: any}) {
reportInfoOnce(name: MessageName, text: string, opts?: {key?: any, reportExtra?: (report: Report) => void}) {
const key = opts && opts.key ? opts.key : text;

if (!this.reportedInfos.has(key)) {
this.reportedInfos.add(key);
this.reportInfo(name, text);

opts?.reportExtra?.(this);
}
}

reportWarningOnce(name: MessageName, text: string, opts?: {key?: any}) {
reportWarningOnce(name: MessageName, text: string, opts?: {key?: any, reportExtra?: (report: Report) => void}) {
const key = opts && opts.key ? opts.key : text;

if (!this.reportedWarnings.has(key)) {
this.reportedWarnings.add(key);
this.reportWarning(name, text);

opts?.reportExtra?.(this);
}
}

Expand Down
Loading