Skip to content

Commit

Permalink
Makes patches optional (#2543)
Browse files Browse the repository at this point in the history
* Tolerates failing patches; fallback on the original sources

* Adds support for optional patches

* Versions

* Fixes cache

* Removes comments
  • Loading branch information
arcanis authored Mar 29, 2021
1 parent 208c0dc commit f09dc2f
Show file tree
Hide file tree
Showing 22 changed files with 320 additions and 229 deletions.
284 changes: 147 additions & 137 deletions .pnp.cjs

Large diffs are not rendered by default.

Binary file not shown.
Binary file not shown.
Binary file not shown.
22 changes: 22 additions & 0 deletions .yarn/versions/36d85c5f.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
releases:
"@yarnpkg/cli": minor
"@yarnpkg/plugin-compat": minor
"@yarnpkg/plugin-patch": minor

declined:
- "@yarnpkg/plugin-constraints"
- "@yarnpkg/plugin-dlx"
- "@yarnpkg/plugin-essentials"
- "@yarnpkg/plugin-init"
- "@yarnpkg/plugin-interactive-tools"
- "@yarnpkg/plugin-node-modules"
- "@yarnpkg/plugin-npm-cli"
- "@yarnpkg/plugin-pack"
- "@yarnpkg/plugin-pnp"
- "@yarnpkg/plugin-stage"
- "@yarnpkg/plugin-typescript"
- "@yarnpkg/plugin-version"
- "@yarnpkg/plugin-workspace-tools"
- "@yarnpkg/builder"
- "@yarnpkg/core"
- "@yarnpkg/doctor"
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
"packages/*"
],
"dependencies": {
"@arcanis/sherlock": "^1.0.38",
"@arcanis/sherlock": "^2.0.2",
"@babel/cli": "^7.10.1",
"@babel/core": "^7.10.2",
"@babel/plugin-proposal-class-properties": "^7.10.1",
Expand Down
2 changes: 1 addition & 1 deletion packages/plugin-compat/sources/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ const plugin: Plugin<CoreHooks & PatchHooks> = {
return structUtils.makeDescriptor(dependency, structUtils.makeRange({
protocol: `patch:`,
source: structUtils.stringifyDescriptor(dependency),
selector: `builtin<compat/${structUtils.stringifyIdent(dependency)}>`,
selector: `~builtin<compat/${structUtils.stringifyIdent(dependency)}>`,
params: null,
}));
},
Expand Down
109 changes: 70 additions & 39 deletions packages/plugin-patch/sources/PatchFetcher.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
import {Fetcher, FetchOptions, MinimalFetchOptions, ReportError, MessageName} from '@yarnpkg/core';
import {Locator} from '@yarnpkg/core';
import {miscUtils, structUtils} from '@yarnpkg/core';
import {ppath, xfs, ZipFS, Filename, CwdFS, PortablePath} from '@yarnpkg/fslib';
import {getLibzipPromise} from '@yarnpkg/libzip';
import {Fetcher, FetchOptions, MinimalFetchOptions, ReportError, MessageName, Report} from '@yarnpkg/core';
import {Locator} from '@yarnpkg/core';
import {miscUtils, structUtils} from '@yarnpkg/core';
import {ppath, xfs, ZipFS, Filename, CwdFS, PortablePath} from '@yarnpkg/fslib';
import {getLibzipPromise} from '@yarnpkg/libzip';

import * as patchUtils from './patchUtils';
import {UnmatchedHunkError} from './tools/UnmatchedHunkError';
import {reportHunk} from './tools/format';
import * as patchUtils from './patchUtils';
import {UnmatchedHunkError} from './tools/UnmatchedHunkError';
import {reportHunk} from './tools/format';

export class PatchFetcher implements Fetcher {
supports(locator: Locator, opts: MinimalFetchOptions) {
Expand Down Expand Up @@ -44,56 +44,87 @@ export class PatchFetcher implements Fetcher {
const patchFiles = await patchUtils.loadPatchFiles(parentLocator, patchPaths, opts);

const tmpDir = await xfs.mktempPromise();
const tmpFile = ppath.join(tmpDir, `patched.zip` as Filename);
const currentFile = ppath.join(tmpDir, `current.zip` as Filename);

const sourceFetch = await opts.fetcher.fetch(sourceLocator, opts);
const prefixPath = structUtils.getIdentVendorPath(locator);

const libzip = await getLibzipPromise();

const patchedPackage = new ZipFS(tmpFile, {
// First we create a copy of the package that we'll be free to mutate
const initialCopy = new ZipFS(currentFile, {
libzip,
create: true,
level: opts.project.configuration.get(`compressionLevel`),
});

await patchedPackage.mkdirpPromise(prefixPath);
await initialCopy.mkdirpPromise(prefixPath);

await miscUtils.releaseAfterUseAsync(async () => {
await patchedPackage.copyPromise(prefixPath, sourceFetch.prefixPath, {baseFs: sourceFetch.packageFs, stableSort: true});
await initialCopy.copyPromise(prefixPath, sourceFetch.prefixPath, {baseFs: sourceFetch.packageFs, stableSort: true});
}, sourceFetch.releaseFs);

const patchFs = new CwdFS(ppath.resolve(PortablePath.root, prefixPath), {baseFs: patchedPackage});

for (const patchFile of patchFiles) {
if (patchFile !== null) {
try {
await patchUtils.applyPatchFile(patchUtils.parsePatchFile(patchFile), {
baseFs: patchFs,
version: sourceVersion,
});
} catch (err) {
if (!(err instanceof UnmatchedHunkError))
throw err;

const enableInlineHunks = opts.project.configuration.get(`enableInlineHunks`);
const suggestion = !enableInlineHunks
? ` (set enableInlineHunks for details)`
: ``;

throw new ReportError(MessageName.PATCH_HUNK_FAILED, err.message + suggestion, report => {
if (!enableInlineHunks)
return;

reportHunk(err.hunk, {
configuration: opts.project.configuration,
report,
});
initialCopy.saveAndClose();

for (const {source, optional} of patchFiles) {
if (source === null)
continue;

// Then for each patchfile, we open this copy anew, and try to apply the
// changeset. We need to open it for each patchfile (rather than only a
// single time) because it lets us easily rollback when hitting errors
// on optional patches (we just need to call `discardAndClose`).
const patchedPackage = new ZipFS(currentFile, {
libzip,
level: opts.project.configuration.get(`compressionLevel`),
});

const patchFs = new CwdFS(ppath.resolve(PortablePath.root, prefixPath), {
baseFs: patchedPackage,
});

try {
await patchUtils.applyPatchFile(patchUtils.parsePatchFile(source), {
baseFs: patchFs,
version: sourceVersion,
});
} catch (err) {
if (!(err instanceof UnmatchedHunkError))
throw err;

const enableInlineHunks = opts.project.configuration.get(`enableInlineHunks`);
const suggestion = !enableInlineHunks && !optional
? ` (set enableInlineHunks for details)`
: ``;

const message = `${structUtils.prettyLocator(opts.project.configuration, locator)}: ${err.message}${suggestion}`;
const reportExtra = (report: Report) => {
if (!enableInlineHunks)
return;

reportHunk(err.hunk, {
configuration: opts.project.configuration,
report,
});
};

// By discarding the current changes, the next patch will start from
// where we were.
patchedPackage.discardAndClose();

if (optional) {
opts.report.reportWarningOnce(MessageName.PATCH_HUNK_FAILED, message, {reportExtra});
} else {
throw new ReportError(MessageName.PATCH_HUNK_FAILED, message, reportExtra);
}
}

patchedPackage.saveAndClose();
}

return patchedPackage;
return new ZipFS(currentFile, {
libzip,
level: opts.project.configuration.get(`compressionLevel`),
});
}
}
2 changes: 1 addition & 1 deletion packages/plugin-patch/sources/PatchResolver.ts
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ export class PatchResolver implements Resolver {
if (typeof sourcePackage === `undefined`)
throw new Error(`Assertion failed: The dependency should have been resolved`);

const patchHash = hashUtils.makeHash(`${CACHE_VERSION}`, ...patchFiles).slice(0, 6);
const patchHash = hashUtils.makeHash(`${CACHE_VERSION}`, ...patchFiles.map(spec => JSON.stringify(spec))).slice(0, 6);

return [patchUtils.makeLocator(descriptor, {parentLocator, sourcePackage, patchPaths, patchHash})];
}
Expand Down
66 changes: 41 additions & 25 deletions packages/plugin-patch/sources/patchUtils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,10 @@ type VisitPatchPathOptions<T> = {
};

function visitPatchPath<T>({onAbsolute, onRelative, onBuiltin}: VisitPatchPathOptions<T>, patchPath: PortablePath) {
const optional = patchPath.startsWith(`~`);
if (optional)
patchPath = patchPath.slice(1) as PortablePath;

const builtinMatch = patchPath.match(BUILTIN_REGEXP);
if (builtinMatch !== null)
return onBuiltin(builtinMatch[1]);
Expand All @@ -91,6 +95,14 @@ function visitPatchPath<T>({onAbsolute, onRelative, onBuiltin}: VisitPatchPathOp
}
}

export function extractPatchFlags(patchPath: PortablePath) {
const optional = patchPath.startsWith(`~`);
if (optional)
patchPath = patchPath.slice(1) as PortablePath;

return {optional};
}

export function isParentRequired(patchPath: PortablePath) {
return visitPatchPath({
onAbsolute: () => false,
Expand Down Expand Up @@ -119,36 +131,40 @@ export async function loadPatchFiles(parentLocator: Locator | null, patchPaths:
// First we obtain the specification for all the patches that we'll have to
// apply to the original package.
const patchFiles = await miscUtils.releaseAfterUseAsync(async () => {
return await Promise.all(patchPaths.map(async patchPath => visitPatchPath({
onAbsolute: async () => {
return await xfs.readFilePromise(patchPath, `utf8`);
},

onRelative: async () => {
if (effectiveParentFetch === null)
throw new Error(`Assertion failed: The parent locator should have been fetched`);

return await effectiveParentFetch.packageFs.readFilePromise(ppath.join(effectiveParentFetch.prefixPath, patchPath), `utf8`);
},

onBuiltin: async name => {
return await opts.project.configuration.firstHook((hooks: PatchHooks) => {
return hooks.getBuiltinPatch;
}, opts.project, name);
},
}, patchPath)));
return await Promise.all(patchPaths.map(async patchPath => {
const flags = extractPatchFlags(patchPath);

const source = await visitPatchPath({
onAbsolute: async () => {
return await xfs.readFilePromise(patchPath, `utf8`);
},

onRelative: async () => {
if (effectiveParentFetch === null)
throw new Error(`Assertion failed: The parent locator should have been fetched`);

return await effectiveParentFetch.packageFs.readFilePromise(ppath.join(effectiveParentFetch.prefixPath, patchPath), `utf8`);
},

onBuiltin: async name => {
return await opts.project.configuration.firstHook((hooks: PatchHooks) => {
return hooks.getBuiltinPatch;
}, opts.project, name);
},
}, patchPath);

return {...flags, source};
}));
});

// Normalizes the line endings to prevent mismatches when cloning a
// repository on Windows systems (the default settings for Git are to
// convert newlines back and forth, which would mess with the checksum)
return patchFiles.map(definition => {
if (typeof definition === `string`) {
return definition.replace(/\r\n?/g, `\n`);
} else {
return definition;
}
});
for (const spec of patchFiles)
if (typeof spec.source === `string`)
spec.source = spec.source.replace(/\r\n?/g, `\n`);

return patchFiles;
}

export async function extractPackageToDisk(locator: Locator, {cache, project}: {cache: Cache, project: Project}) {
Expand Down
8 changes: 6 additions & 2 deletions packages/yarnpkg-core/sources/Report.ts
Original file line number Diff line number Diff line change
Expand Up @@ -93,21 +93,25 @@ export abstract class Report {
};
}

reportInfoOnce(name: MessageName, text: string, opts?: {key?: any}) {
reportInfoOnce(name: MessageName, text: string, opts?: {key?: any, reportExtra?: (report: Report) => void}) {
const key = opts && opts.key ? opts.key : text;

if (!this.reportedInfos.has(key)) {
this.reportedInfos.add(key);
this.reportInfo(name, text);

opts?.reportExtra?.(this);
}
}

reportWarningOnce(name: MessageName, text: string, opts?: {key?: any}) {
reportWarningOnce(name: MessageName, text: string, opts?: {key?: any, reportExtra?: (report: Report) => void}) {
const key = opts && opts.key ? opts.key : text;

if (!this.reportedWarnings.has(key)) {
this.reportedWarnings.add(key);
this.reportWarning(name, text);

opts?.reportExtra?.(this);
}
}

Expand Down
Loading

0 comments on commit f09dc2f

Please sign in to comment.