diff --git a/.circleci/config.yml b/.circleci/config.yml index ef942c1f..9d0f8ea2 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -10,6 +10,7 @@ workflows: jobs: - node/test: name: test-<< matrix.executor >>-<< matrix.node-version >> + override-ci-command: yarn install --frozen-lockfile --ignore-engines pre-steps: - when: condition: diff --git a/.gitattributes b/.gitattributes index b6fcc928..57140a26 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1 +1,2 @@ test/input/**/*.txt text eol=lf +* text=auto eol=lf diff --git a/.gitignore b/.gitignore index 91671227..2001b2ee 100644 --- a/.gitignore +++ b/.gitignore @@ -8,3 +8,4 @@ .node-version npm-debug.log .idea +lib diff --git a/.prettierrc.json b/.prettierrc.json new file mode 100644 index 00000000..e8680947 --- /dev/null +++ b/.prettierrc.json @@ -0,0 +1,15 @@ +{ + "trailingComma": "all", + "tabWidth": 2, + "singleQuote": true, + "printWidth": 100, + "parser": "typescript", + "overrides": [ + { + "files": ["*.json", "*.jsonc", "*.json5"], + "options": { + "parser": "json" + } + } + ] +} \ No newline at end of file diff --git a/lib/asar.js b/lib/asar.js deleted file mode 100644 index a0c45636..00000000 --- a/lib/asar.js +++ /dev/null @@ -1,229 +0,0 @@ -'use strict' - -const fs = require('./wrapped-fs') -const path = require('path') -const minimatch = require('minimatch') - -const Filesystem = require('./filesystem') -const disk = require('./disk') -const crawlFilesystem = require('./crawlfs') - -/** - * Whether a directory should be excluded from packing due to the `--unpack-dir" option. - * - * @param {string} dirPath - directory path to check - * @param {string} pattern - literal prefix [for backward compatibility] or glob pattern - * @param {array} unpackDirs - Array of directory paths previously marked as unpacked - */ -function isUnpackedDir (dirPath, pattern, unpackDirs) { - if (dirPath.startsWith(pattern) || minimatch(dirPath, pattern)) { - if (!unpackDirs.includes(dirPath)) { - unpackDirs.push(dirPath) - } - return true - } else { - return unpackDirs.some(unpackDir => dirPath.startsWith(unpackDir)) - } -} - -module.exports.createPackage = async function (src, dest) { - return module.exports.createPackageWithOptions(src, dest, {}) -} - -module.exports.createPackageWithOptions = async function (src, dest, options) { - const globOptions = options.globOptions ? options.globOptions : {} - globOptions.dot = options.dot === undefined ? true : options.dot - - const pattern = src + (options.pattern ? options.pattern : '/**/*') - - const [filenames, metadata] = await crawlFilesystem(pattern, globOptions) - return module.exports.createPackageFromFiles(src, dest, filenames, metadata, options) -} - -/** - * Create an ASAR archive from a list of filenames. - * - * @param {string} src: Base path. All files are relative to this. - * @param {string} dest: Archive filename (& path). - * @param {array} filenames: List of filenames relative to src. - * @param {object} metadata: Object with filenames as keys and {type='directory|file|link', stat: fs.stat} as values. (Optional) - * @param {object} options: Options passed to `createPackageWithOptions`. -*/ -module.exports.createPackageFromFiles = async function (src, dest, filenames, metadata, options) { - if (typeof metadata === 'undefined' || metadata === null) { metadata = {} } - if (typeof options === 'undefined' || options === null) { options = {} } - - src = path.normalize(src) - dest = path.normalize(dest) - filenames = filenames.map(function (filename) { return path.normalize(filename) }) - - const filesystem = new Filesystem(src) - const files = [] - const unpackDirs = [] - - let filenamesSorted = [] - if (options.ordering) { - const orderingFiles = (await fs.readFile(options.ordering)).toString().split('\n').map(line => { - if (line.includes(':')) { line = line.split(':').pop() } - line = line.trim() - if (line.startsWith('/')) { line = line.slice(1) } - return line - }) - - const ordering = [] - for (const file of orderingFiles) { - const pathComponents = file.split(path.sep) - let str = src - for (const pathComponent of pathComponents) { - str = path.join(str, pathComponent) - ordering.push(str) - } - } - - let missing = 0 - const total = filenames.length - - for (const file of ordering) { - if (!filenamesSorted.includes(file) && filenames.includes(file)) { - filenamesSorted.push(file) - } - } - - for (const file of filenames) { - if (!filenamesSorted.includes(file)) { - filenamesSorted.push(file) - missing += 1 - } - } - - console.log(`Ordering file has ${((total - missing) / total) * 100}% coverage.`) - } else { - filenamesSorted = filenames - } - - const handleFile = async function (filename) { - if (!metadata[filename]) { - metadata[filename] = await crawlFilesystem.determineFileType(filename) - } - const file = metadata[filename] - - let shouldUnpack - switch (file.type) { - case 'directory': - if (options.unpackDir) { - shouldUnpack = isUnpackedDir(path.relative(src, filename), options.unpackDir, unpackDirs) - } else { - shouldUnpack = false - } - filesystem.insertDirectory(filename, shouldUnpack) - break - case 'file': - shouldUnpack = false - if (options.unpack) { - shouldUnpack = minimatch(filename, options.unpack, { matchBase: true }) - } - if (!shouldUnpack && options.unpackDir) { - const dirName = path.relative(src, path.dirname(filename)) - shouldUnpack = isUnpackedDir(dirName, options.unpackDir, unpackDirs) - } - files.push({ filename: filename, unpack: shouldUnpack }) - return filesystem.insertFile(filename, shouldUnpack, file, options) - case 'link': - filesystem.insertLink(filename) - break - } - return Promise.resolve() - } - - const insertsDone = async function () { - await fs.mkdirp(path.dirname(dest)) - return disk.writeFilesystem(dest, filesystem, files, metadata) - } - - const names = filenamesSorted.slice() - - const next = async function (name) { - if (!name) { return insertsDone() } - - await handleFile(name) - return next(names.shift()) - } - - return next(names.shift()) -} - -module.exports.statFile = function (archive, filename, followLinks) { - const filesystem = disk.readFilesystemSync(archive) - return filesystem.getFile(filename, followLinks) -} - -module.exports.getRawHeader = function (archive) { - return disk.readArchiveHeaderSync(archive) -} - -module.exports.listPackage = function (archive, options) { - return disk.readFilesystemSync(archive).listFiles(options) -} - -module.exports.extractFile = function (archive, filename) { - const filesystem = disk.readFilesystemSync(archive) - return disk.readFileSync(filesystem, filename, filesystem.getFile(filename)) -} - -module.exports.extractAll = function (archive, dest) { - const filesystem = disk.readFilesystemSync(archive) - const filenames = filesystem.listFiles() - - // under windows just extract links as regular files - const followLinks = process.platform === 'win32' - - // create destination directory - fs.mkdirpSync(dest) - - const extractionErrors = [] - for (const fullPath of filenames) { - // Remove leading slash - const filename = fullPath.substr(1) - const destFilename = path.join(dest, filename) - const file = filesystem.getFile(filename, followLinks) - if (file.files) { - // it's a directory, create it and continue with the next entry - fs.mkdirpSync(destFilename) - } else if (file.link) { - // it's a symlink, create a symlink - const linkSrcPath = path.dirname(path.join(dest, file.link)) - const linkDestPath = path.dirname(destFilename) - const relativePath = path.relative(linkDestPath, linkSrcPath) - // try to delete output file, because we can't overwrite a link - try { - fs.unlinkSync(destFilename) - } catch {} - const linkTo = path.join(relativePath, path.basename(file.link)) - fs.symlinkSync(linkTo, destFilename) - } else { - // it's a file, try to extract it - try { - const content = disk.readFileSync(filesystem, filename, file) - fs.writeFileSync(destFilename, content) - if (file.executable) { - fs.chmodSync(destFilename, '755') - } - } catch (e) { - extractionErrors.push(e) - } - } - } - if (extractionErrors.length) { - throw new Error( - 'Unable to extract some files:\n\n' + - extractionErrors.map(error => error.stack).join('\n\n')) - } -} - -module.exports.uncache = function (archive) { - return disk.uncacheFilesystem(archive) -} - -module.exports.uncacheAll = function () { - disk.uncacheAll() -} diff --git a/lib/crawlfs.js b/lib/crawlfs.js deleted file mode 100644 index a26c3eb8..00000000 --- a/lib/crawlfs.js +++ /dev/null @@ -1,41 +0,0 @@ -'use strict' - -const { promisify } = require('util') - -const fs = require('./wrapped-fs') -const glob = promisify(require('glob')) - -async function determineFileType (filename) { - const stat = await fs.lstat(filename) - if (stat.isFile()) { - return { type: 'file', stat } - } else if (stat.isDirectory()) { - return { type: 'directory', stat } - } else if (stat.isSymbolicLink()) { - return { type: 'link', stat } - } -} - -module.exports = async function (dir, options) { - const metadata = {} - const crawled = await glob(dir, options) - const results = await Promise.all(crawled.map(async filename => [filename, await determineFileType(filename)])) - const links = [] - const filenames = results.map(([filename, type]) => { - if (type) { - metadata[filename] = type - if (type.type === 'link') links.push(filename) - } - return filename - }).filter((filename) => { - // Newer glob can return files inside symlinked directories, to avoid - // those appearing in archives we need to manually exclude theme here - const exactLinkIndex = links.findIndex(link => filename === link) - return links.every((link, index) => { - if (index === exactLinkIndex) return true - return !filename.startsWith(link) - }) - }) - return [filenames, metadata] -} -module.exports.determineFileType = determineFileType diff --git a/lib/disk.js b/lib/disk.js deleted file mode 100644 index ad06182d..00000000 --- a/lib/disk.js +++ /dev/null @@ -1,123 +0,0 @@ -'use strict' - -const fs = require('./wrapped-fs') -const path = require('path') -const pickle = require('./pickle') - -const Filesystem = require('./filesystem') -let filesystemCache = {} - -async function copyFile (dest, src, filename) { - const srcFile = path.join(src, filename) - const targetFile = path.join(dest, filename) - - const [content, stats] = await Promise.all([fs.readFile(srcFile), fs.stat(srcFile), fs.mkdirp(path.dirname(targetFile))]) - return fs.writeFile(targetFile, content, { mode: stats.mode }) -} - -async function streamTransformedFile (originalFilename, outStream, transformed) { - return new Promise((resolve, reject) => { - const stream = fs.createReadStream(transformed ? transformed.path : originalFilename) - stream.pipe(outStream, { end: false }) - stream.on('error', reject) - stream.on('end', () => resolve()) - }) -} - -const writeFileListToStream = async function (dest, filesystem, out, list, metadata) { - for (const file of list) { - if (file.unpack) { // the file should not be packed into archive - const filename = path.relative(filesystem.src, file.filename) - await copyFile(`${dest}.unpacked`, filesystem.src, filename) - } else { - await streamTransformedFile(file.filename, out, metadata[file.filename].transformed) - } - } - return out.end() -} - -module.exports.writeFilesystem = async function (dest, filesystem, files, metadata) { - const headerPickle = pickle.createEmpty() - headerPickle.writeString(JSON.stringify(filesystem.header)) - const headerBuf = headerPickle.toBuffer() - - const sizePickle = pickle.createEmpty() - sizePickle.writeUInt32(headerBuf.length) - const sizeBuf = sizePickle.toBuffer() - - const out = fs.createWriteStream(dest) - await new Promise((resolve, reject) => { - out.on('error', reject) - out.write(sizeBuf) - return out.write(headerBuf, () => resolve()) - }) - return writeFileListToStream(dest, filesystem, out, files, metadata) -} - -module.exports.readArchiveHeaderSync = function (archive) { - const fd = fs.openSync(archive, 'r') - let size - let headerBuf - try { - const sizeBuf = Buffer.alloc(8) - if (fs.readSync(fd, sizeBuf, 0, 8, null) !== 8) { - throw new Error('Unable to read header size') - } - - const sizePickle = pickle.createFromBuffer(sizeBuf) - size = sizePickle.createIterator().readUInt32() - headerBuf = Buffer.alloc(size) - if (fs.readSync(fd, headerBuf, 0, size, null) !== size) { - throw new Error('Unable to read header') - } - } finally { - fs.closeSync(fd) - } - - const headerPickle = pickle.createFromBuffer(headerBuf) - const header = headerPickle.createIterator().readString() - return { headerString: header, header: JSON.parse(header), headerSize: size } -} - -module.exports.readFilesystemSync = function (archive) { - if (!filesystemCache[archive]) { - const header = this.readArchiveHeaderSync(archive) - const filesystem = new Filesystem(archive) - filesystem.header = header.header - filesystem.headerSize = header.headerSize - filesystemCache[archive] = filesystem - } - return filesystemCache[archive] -} - -module.exports.uncacheFilesystem = function (archive) { - if (filesystemCache[archive]) { - filesystemCache[archive] = undefined - return true - } - return false -} - -module.exports.uncacheAll = function () { - filesystemCache = {} -} - -module.exports.readFileSync = function (filesystem, filename, info) { - let buffer = Buffer.alloc(info.size) - if (info.size <= 0) { return buffer } - if (info.unpacked) { - // it's an unpacked file, copy it. - buffer = fs.readFileSync(path.join(`${filesystem.src}.unpacked`, filename)) - } else { - // Node throws an exception when reading 0 bytes into a 0-size buffer, - // so we short-circuit the read in this case. - const fd = fs.openSync(filesystem.src, 'r') - try { - const offset = 8 + filesystem.headerSize + parseInt(info.offset) - fs.readSync(fd, buffer, 0, info.size, offset) - } finally { - fs.closeSync(fd) - } - } - return buffer -} diff --git a/lib/filesystem.js b/lib/filesystem.js deleted file mode 100644 index d921c506..00000000 --- a/lib/filesystem.js +++ /dev/null @@ -1,161 +0,0 @@ -'use strict' - -const fs = require('./wrapped-fs') -const os = require('os') -const path = require('path') -const { promisify } = require('util') -const stream = require('stream') -const getFileIntegrity = require('./integrity') - -const UINT32_MAX = 2 ** 32 - 1 - -const pipeline = promisify(stream.pipeline) - -class Filesystem { - constructor (src) { - this.src = path.resolve(src) - this.header = { files: Object.create(null) } - this.offset = BigInt(0) - } - - searchNodeFromDirectory (p) { - let json = this.header - const dirs = p.split(path.sep) - for (const dir of dirs) { - if (dir !== '.') { - if (!json.files[dir]) { - json.files[dir] = { files: Object.create(null) } - } - json = json.files[dir] - } - } - return json - } - - searchNodeFromPath (p) { - p = path.relative(this.src, p) - if (!p) { return this.header } - const name = path.basename(p) - const node = this.searchNodeFromDirectory(path.dirname(p)) - if (node.files == null) { - node.files = Object.create(null) - } - if (node.files[name] == null) { - node.files[name] = Object.create(null) - } - return node.files[name] - } - - insertDirectory (p, shouldUnpack) { - const node = this.searchNodeFromPath(p) - if (shouldUnpack) { - node.unpacked = shouldUnpack - } - node.files = node.files || Object.create(null) - return node.files - } - - async insertFile (p, shouldUnpack, file, options) { - const dirNode = this.searchNodeFromPath(path.dirname(p)) - const node = this.searchNodeFromPath(p) - if (shouldUnpack || dirNode.unpacked) { - node.size = file.stat.size - node.unpacked = true - node.integrity = await getFileIntegrity(p) - return Promise.resolve() - } - - let size - - const transformed = options.transform && options.transform(p) - if (transformed) { - const tmpdir = await fs.mkdtemp(path.join(os.tmpdir(), 'asar-')) - const tmpfile = path.join(tmpdir, path.basename(p)) - const out = fs.createWriteStream(tmpfile) - const readStream = fs.createReadStream(p) - - await pipeline(readStream, transformed, out) - file.transformed = { - path: tmpfile, - stat: await fs.lstat(tmpfile) - } - size = file.transformed.stat.size - } else { - size = file.stat.size - } - - // JavaScript cannot precisely present integers >= UINT32_MAX. - if (size > UINT32_MAX) { - throw new Error(`${p}: file size can not be larger than 4.2GB`) - } - - node.size = size - node.offset = this.offset.toString() - node.integrity = await getFileIntegrity(p) - if (process.platform !== 'win32' && (file.stat.mode & 0o100)) { - node.executable = true - } - this.offset += BigInt(size) - } - - insertLink (p) { - const symlink = fs.readlinkSync(p) - // /var => /private/var - const parentPath = fs.realpathSync(path.dirname(p)) - const link = path.relative(fs.realpathSync(this.src), path.join(parentPath, symlink)) - if (link.startsWith('..')) { - throw new Error(`${p}: file "${link}" links out of the package`) - } - const node = this.searchNodeFromPath(p) - node.link = link - return link - } - - listFiles (options) { - const files = [] - - const fillFilesFromMetadata = function (basePath, metadata) { - if (!metadata.files) { - return - } - - for (const [childPath, childMetadata] of Object.entries(metadata.files)) { - const fullPath = path.join(basePath, childPath) - const packState = childMetadata.unpacked ? 'unpack' : 'pack ' - files.push((options && options.isPack) ? `${packState} : ${fullPath}` : fullPath) - fillFilesFromMetadata(fullPath, childMetadata) - } - } - - fillFilesFromMetadata('/', this.header) - return files - } - - getNode (p) { - const node = this.searchNodeFromDirectory(path.dirname(p)) - const name = path.basename(p) - if (name) { - return node.files[name] - } else { - return node - } - } - - getFile (p, followLinks) { - followLinks = typeof followLinks === 'undefined' ? true : followLinks - const info = this.getNode(p) - - if (!info) { - throw new Error(`"${p}" was not found in this archive`) - } - - // if followLinks is false we don't resolve symlinks - if (info.link && followLinks) { - return this.getFile(info.link) - } else { - return info - } - } -} - -module.exports = Filesystem diff --git a/lib/index.d.ts b/lib/index.d.ts deleted file mode 100644 index b79528b7..00000000 --- a/lib/index.d.ts +++ /dev/null @@ -1,250 +0,0 @@ -import { Stats } from "fs"; - -interface IMinimatchOptions { - /** - * Dump a ton of stuff to stderr. - * - * @default false - */ - debug?: boolean | undefined; - - /** - * Do not expand `{a,b}` and `{1..3}` brace sets. - * - * @default false - */ - nobrace?: boolean | undefined; - - /** - * Disable `**` matching against multiple folder names. - * - * @default false - */ - noglobstar?: boolean | undefined; - - /** - * Allow patterns to match filenames starting with a period, - * even if the pattern does not explicitly have a period in that spot. - * - * Note that by default, `'a/**' + '/b'` will **not** match `a/.d/b`, unless `dot` is set. - * - * @default false - */ - dot?: boolean | undefined; - - /** - * Disable "extglob" style patterns like `+(a|b)`. - * - * @default false - */ - noext?: boolean | undefined; - - /** - * Perform a case-insensitive match. - * - * @default false - */ - nocase?: boolean | undefined; - - /** - * When a match is not found by `minimatch.match`, - * return a list containing the pattern itself if this option is set. - * Otherwise, an empty list is returned if there are no matches. - * - * @default false - */ - nonull?: boolean | undefined; - - /** - * If set, then patterns without slashes will be matched - * against the basename of the path if it contains slashes. For example, - * `a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. - * - * @default false - */ - matchBase?: boolean | undefined; - - /** - * Suppress the behavior of treating `#` at the start of a pattern as a comment. - * - * @default false - */ - nocomment?: boolean | undefined; - - /** - * Suppress the behavior of treating a leading `!` character as negation. - * - * @default false - */ - nonegate?: boolean | undefined; - - /** - * Returns from negate expressions the same as if they were not negated. - * (Ie, true on a hit, false on a miss.) - * - * @default false - */ - flipNegate?: boolean | undefined; - - /** - * Compare a partial path to a pattern. As long as the parts of the path that - * are present are not contradicted by the pattern, it will be treated as a - * match. This is useful in applications where you're walking through a - * folder structure, and don't yet have the full path, but want to ensure that - * you do not walk down paths that can never be a match. - * - * @default false - * - * @example - * import minimatch = require("minimatch"); - * - * minimatch('/a/b', '/a/*' + '/c/d', { partial: true }) // true, might be /a/b/c/d - * minimatch('/a/b', '/**' + '/d', { partial: true }) // true, might be /a/b/.../d - * minimatch('/x/y/z', '/a/**' + '/z', { partial: true }) // false, because x !== a - */ - partial?: boolean; - - /** - * Use `\\` as a path separator _only_, and _never_ as an escape - * character. If set, all `\\` characters are replaced with `/` in - * the pattern. Note that this makes it **impossible** to match - * against paths containing literal glob pattern characters, but - * allows matching with patterns constructed using `path.join()` and - * `path.resolve()` on Windows platforms, mimicking the (buggy!) - * behavior of earlier versions on Windows. Please use with - * caution, and be mindful of the caveat about Windows paths - * - * For legacy reasons, this is also set if - * `options.allowWindowsEscape` is set to the exact value `false`. - * - * @default false - */ - windowsPathsNoEscape?: boolean; -} - -import fs = require("fs"); -interface IGlobOptions extends IMinimatchOptions { - cwd?: string | undefined; - root?: string | undefined; - dot?: boolean | undefined; - nomount?: boolean | undefined; - mark?: boolean | undefined; - nosort?: boolean | undefined; - stat?: boolean | undefined; - silent?: boolean | undefined; - strict?: boolean | undefined; - cache?: - | { [path: string]: boolean | "DIR" | "FILE" | ReadonlyArray } - | undefined; - statCache?: - | { [path: string]: false | { isDirectory(): boolean } | undefined } - | undefined; - symlinks?: { [path: string]: boolean | undefined } | undefined; - realpathCache?: { [path: string]: string } | undefined; - sync?: boolean | undefined; - nounique?: boolean | undefined; - nonull?: boolean | undefined; - debug?: boolean | undefined; - nobrace?: boolean | undefined; - noglobstar?: boolean | undefined; - noext?: boolean | undefined; - nocase?: boolean | undefined; - matchBase?: any; - nodir?: boolean | undefined; - ignore?: string | ReadonlyArray | undefined; - follow?: boolean | undefined; - realpath?: boolean | undefined; - nonegate?: boolean | undefined; - nocomment?: boolean | undefined; - absolute?: boolean | undefined; - allowWindowsEscape?: boolean | undefined; - fs?: typeof fs; -} - -export type CreateOptions = { - dot?: boolean; - globOptions?: IGlobOptions; - ordering?: string; - pattern?: string; - transform?: (filePath: string) => NodeJS.ReadWriteStream | void; - unpack?: string; - unpackDir?: string; -}; - -export type ListOptions = { - isPack: boolean; -}; - -export type EntryMetadata = { - unpacked: boolean; -}; - -export type DirectoryMetadata = EntryMetadata & { - files: { [property: string]: EntryMetadata }; -}; - -export type FileMetadata = EntryMetadata & { - executable?: true; - offset?: number; - size?: number; -}; - -export type LinkMetadata = { - link: string; -}; - -export type Metadata = DirectoryMetadata | FileMetadata | LinkMetadata; - -export type InputMetadataType = 'directory' | 'file' | 'link'; - -export type InputMetadata = { - [property: string]: { - type: InputMetadataType; - stat: Stats; - } -}; - -export type DirectoryRecord = { - files: Record; -}; - -export type FileRecord = { - offset: string; - size: number; - executable?: boolean; - integrity: { - hash: string; - algorithm: 'SHA256'; - blocks: string[]; - blockSize: number; - }; -} - -export type ArchiveHeader = { - // The JSON parsed header string - header: DirectoryRecord; - headerString: string; - headerSize: number; -} - -export function createPackage(src: string, dest: string): Promise; -export function createPackageWithOptions( - src: string, - dest: string, - options: CreateOptions -): Promise; -export function createPackageFromFiles( - src: string, - dest: string, - filenames: string[], - metadata?: InputMetadata, - options?: CreateOptions -): Promise; - -export function statFile(archive: string, filename: string, followLinks?: boolean): Metadata; -export function getRawHeader(archive: string): ArchiveHeader; -export function listPackage(archive: string, options?: ListOptions): string[]; -export function extractFile(archive: string, filename: string): Buffer; -export function extractAll(archive: string, dest: string): void; -export function uncache(archive: string): boolean; -export function uncacheAll(): void; diff --git a/lib/integrity.js b/lib/integrity.js deleted file mode 100644 index 6fabee4f..00000000 --- a/lib/integrity.js +++ /dev/null @@ -1,62 +0,0 @@ -const crypto = require('crypto') -const fs = require('fs') -const stream = require('stream') -const { promisify } = require('util') - -const ALGORITHM = 'SHA256' -// 4MB default block size -const BLOCK_SIZE = 4 * 1024 * 1024 - -const pipeline = promisify(stream.pipeline) - -function hashBlock (block) { - return crypto.createHash(ALGORITHM).update(block).digest('hex') -} - -async function getFileIntegrity (path) { - const fileHash = crypto.createHash(ALGORITHM) - - const blocks = [] - let currentBlockSize = 0 - let currentBlock = [] - - await pipeline( - fs.createReadStream(path), - new stream.PassThrough({ - decodeStrings: false, - transform (_chunk, encoding, callback) { - fileHash.update(_chunk) - - function handleChunk (chunk) { - const diffToSlice = Math.min(BLOCK_SIZE - currentBlockSize, chunk.byteLength) - currentBlockSize += diffToSlice - currentBlock.push(chunk.slice(0, diffToSlice)) - if (currentBlockSize === BLOCK_SIZE) { - blocks.push(hashBlock(Buffer.concat(currentBlock))) - currentBlock = [] - currentBlockSize = 0 - } - if (diffToSlice < chunk.byteLength) { - handleChunk(chunk.slice(diffToSlice)) - } - } - handleChunk(_chunk) - callback() - }, - flush (callback) { - blocks.push(hashBlock(Buffer.concat(currentBlock))) - currentBlock = [] - callback() - } - }) - ) - - return { - algorithm: ALGORITHM, - hash: fileHash.digest('hex'), - blockSize: BLOCK_SIZE, - blocks: blocks - } -} - -module.exports = getFileIntegrity diff --git a/lib/pickle.js b/lib/pickle.js deleted file mode 100644 index 69436557..00000000 --- a/lib/pickle.js +++ /dev/null @@ -1,230 +0,0 @@ -// sizeof(T). -const SIZE_INT32 = 4 -const SIZE_UINT32 = 4 -const SIZE_INT64 = 8 -const SIZE_UINT64 = 8 -const SIZE_FLOAT = 4 -const SIZE_DOUBLE = 8 - -// The allocation granularity of the payload. -const PAYLOAD_UNIT = 64 - -// Largest JS number. -const CAPACITY_READ_ONLY = 9007199254740992 - -// Aligns 'i' by rounding it up to the next multiple of 'alignment'. -const alignInt = function (i, alignment) { - return i + (alignment - (i % alignment)) % alignment -} - -// PickleIterator reads data from a Pickle. The Pickle object must remain valid -// while the PickleIterator object is in use. -const PickleIterator = (function () { - function PickleIterator (pickle) { - this.payload = pickle.header - this.payloadOffset = pickle.headerSize - this.readIndex = 0 - this.endIndex = pickle.getPayloadSize() - } - - PickleIterator.prototype.readBool = function () { - return this.readInt() !== 0 - } - - PickleIterator.prototype.readInt = function () { - return this.readBytes(SIZE_INT32, Buffer.prototype.readInt32LE) - } - - PickleIterator.prototype.readUInt32 = function () { - return this.readBytes(SIZE_UINT32, Buffer.prototype.readUInt32LE) - } - - PickleIterator.prototype.readInt64 = function () { - return this.readBytes(SIZE_INT64, Buffer.prototype.readInt64LE) - } - - PickleIterator.prototype.readUInt64 = function () { - return this.readBytes(SIZE_UINT64, Buffer.prototype.readUInt64LE) - } - - PickleIterator.prototype.readFloat = function () { - return this.readBytes(SIZE_FLOAT, Buffer.prototype.readFloatLE) - } - - PickleIterator.prototype.readDouble = function () { - return this.readBytes(SIZE_DOUBLE, Buffer.prototype.readDoubleLE) - } - - PickleIterator.prototype.readString = function () { - return this.readBytes(this.readInt()).toString() - } - - PickleIterator.prototype.readBytes = function (length, method) { - const readPayloadOffset = this.getReadPayloadOffsetAndAdvance(length) - if (method != null) { - return method.call(this.payload, readPayloadOffset, length) - } else { - return this.payload.slice(readPayloadOffset, readPayloadOffset + length) - } - } - - PickleIterator.prototype.getReadPayloadOffsetAndAdvance = function (length) { - if (length > this.endIndex - this.readIndex) { - this.readIndex = this.endIndex - throw new Error('Failed to read data with length of ' + length) - } - const readPayloadOffset = this.payloadOffset + this.readIndex - this.advance(length) - return readPayloadOffset - } - - PickleIterator.prototype.advance = function (size) { - const alignedSize = alignInt(size, SIZE_UINT32) - if (this.endIndex - this.readIndex < alignedSize) { - this.readIndex = this.endIndex - } else { - this.readIndex += alignedSize - } - } - - return PickleIterator -})() - -// This class provides facilities for basic binary value packing and unpacking. -// -// The Pickle class supports appending primitive values (ints, strings, etc.) -// to a pickle instance. The Pickle instance grows its internal memory buffer -// dynamically to hold the sequence of primitive values. The internal memory -// buffer is exposed as the "data" of the Pickle. This "data" can be passed -// to a Pickle object to initialize it for reading. -// -// When reading from a Pickle object, it is important for the consumer to know -// what value types to read and in what order to read them as the Pickle does -// not keep track of the type of data written to it. -// -// The Pickle's data has a header which contains the size of the Pickle's -// payload. It can optionally support additional space in the header. That -// space is controlled by the header_size parameter passed to the Pickle -// constructor. -const Pickle = (function () { - function Pickle (buffer) { - if (buffer) { - this.initFromBuffer(buffer) - } else { - this.initEmpty() - } - } - - Pickle.prototype.initEmpty = function () { - this.header = Buffer.alloc(0) - this.headerSize = SIZE_UINT32 - this.capacityAfterHeader = 0 - this.writeOffset = 0 - this.resize(PAYLOAD_UNIT) - this.setPayloadSize(0) - } - - Pickle.prototype.initFromBuffer = function (buffer) { - this.header = buffer - this.headerSize = buffer.length - this.getPayloadSize() - this.capacityAfterHeader = CAPACITY_READ_ONLY - this.writeOffset = 0 - if (this.headerSize > buffer.length) { - this.headerSize = 0 - } - if (this.headerSize !== alignInt(this.headerSize, SIZE_UINT32)) { - this.headerSize = 0 - } - if (this.headerSize === 0) { - this.header = Buffer.alloc(0) - } - } - - Pickle.prototype.createIterator = function () { - return new PickleIterator(this) - } - - Pickle.prototype.toBuffer = function () { - return this.header.slice(0, this.headerSize + this.getPayloadSize()) - } - - Pickle.prototype.writeBool = function (value) { - return this.writeInt(value ? 1 : 0) - } - - Pickle.prototype.writeInt = function (value) { - return this.writeBytes(value, SIZE_INT32, Buffer.prototype.writeInt32LE) - } - - Pickle.prototype.writeUInt32 = function (value) { - return this.writeBytes(value, SIZE_UINT32, Buffer.prototype.writeUInt32LE) - } - - Pickle.prototype.writeInt64 = function (value) { - return this.writeBytes(value, SIZE_INT64, Buffer.prototype.writeInt64LE) - } - - Pickle.prototype.writeUInt64 = function (value) { - return this.writeBytes(value, SIZE_UINT64, Buffer.prototype.writeUInt64LE) - } - - Pickle.prototype.writeFloat = function (value) { - return this.writeBytes(value, SIZE_FLOAT, Buffer.prototype.writeFloatLE) - } - - Pickle.prototype.writeDouble = function (value) { - return this.writeBytes(value, SIZE_DOUBLE, Buffer.prototype.writeDoubleLE) - } - - Pickle.prototype.writeString = function (value) { - const length = Buffer.byteLength(value, 'utf8') - if (!this.writeInt(length)) { - return false - } - return this.writeBytes(value, length) - } - - Pickle.prototype.setPayloadSize = function (payloadSize) { - return this.header.writeUInt32LE(payloadSize, 0) - } - - Pickle.prototype.getPayloadSize = function () { - return this.header.readUInt32LE(0) - } - - Pickle.prototype.writeBytes = function (data, length, method) { - const dataLength = alignInt(length, SIZE_UINT32) - const newSize = this.writeOffset + dataLength - if (newSize > this.capacityAfterHeader) { - this.resize(Math.max(this.capacityAfterHeader * 2, newSize)) - } - if (method != null) { - method.call(this.header, data, this.headerSize + this.writeOffset) - } else { - this.header.write(data, this.headerSize + this.writeOffset, length) - } - const endOffset = this.headerSize + this.writeOffset + length - this.header.fill(0, endOffset, endOffset + dataLength - length) - this.setPayloadSize(newSize) - this.writeOffset = newSize - return true - } - - Pickle.prototype.resize = function (newCapacity) { - newCapacity = alignInt(newCapacity, PAYLOAD_UNIT) - this.header = Buffer.concat([this.header, Buffer.alloc(newCapacity)]) - this.capacityAfterHeader = newCapacity - } - - return Pickle -})() - -module.exports = { - createEmpty: function () { - return new Pickle() - }, - - createFromBuffer: function (buffer) { - return new Pickle(buffer) - } -} diff --git a/lib/wrapped-fs.js b/lib/wrapped-fs.js deleted file mode 100644 index 24f59d07..00000000 --- a/lib/wrapped-fs.js +++ /dev/null @@ -1,26 +0,0 @@ -'use strict' - -const fs = process.versions.electron ? require('original-fs') : require('fs') - -const promisifiedMethods = [ - 'lstat', - 'mkdtemp', - 'readFile', - 'stat', - 'writeFile' -] - -const promisified = {} - -for (const method of Object.keys(fs)) { - if (promisifiedMethods.includes(method)) { - promisified[method] = fs.promises[method] - } else { - promisified[method] = fs[method] - } -} -// To make it more like fs-extra -promisified.mkdirp = (dir) => fs.promises.mkdir(dir, { recursive: true }) -promisified.mkdirpSync = (dir) => fs.mkdirSync(dir, { recursive: true }) - -module.exports = promisified diff --git a/package.json b/package.json index b2460d88..e75e1a7d 100644 --- a/package.json +++ b/package.json @@ -3,14 +3,13 @@ "description": "Creating Electron app packages", "version": "0.0.0-development", "main": "./lib/asar.js", - "types": "./lib/index.d.ts", + "types": "./lib/asar.d.ts", "bin": { "asar": "./bin/asar.js" }, "files": [ "bin", - "lib", - "lib/index.d.ts" + "lib" ], "engines": { "node": ">=10.12.0" @@ -25,36 +24,31 @@ "url": "https://github.com/electron/asar/issues" }, "scripts": { + "build": "tsc", "mocha": "xvfb-maybe electron-mocha --reporter spec && mocha --reporter spec", - "test": "npm run lint && npm run mocha", - "lint": "tsd && standard", - "standard": "standard", - "tsd": "tsd" - }, - "standard": { - "env": { - "mocha": true - }, - "globals": [ - "BigInt" - ] - }, - "tsd": { - "directory": "test" + "test": "yarn lint && yarn mocha", + "lint": "yarn prettier:check", + "prettier": "prettier \"src/**/*.ts\" \"test/**/*.ts\" \"test/**/*.js\"", + "prettier:check": "yarn prettier --check", + "prettier:write": "yarn prettier --write", + "prepare": "tsc" }, "dependencies": { + "@types/glob": "^7.1.0", "commander": "^5.0.0", "glob": "^7.1.6", "minimatch": "^3.0.4" }, "devDependencies": { + "@types/minimatch": "^3.0.5", + "@types/node": "^12.0.0", "electron": "^22.0.0", "electron-mocha": "^11.0.2", "lodash": "^4.17.15", "mocha": "^10.1.0", + "prettier": "^3.3.3", "rimraf": "^3.0.2", - "standard": "^14.3.3", - "tsd": "^0.25.0", + "typescript": "^5.5.4", "xvfb-maybe": "^0.2.1" } } diff --git a/snapcraft.yaml b/snapcraft.yaml deleted file mode 100644 index b3bbd06a..00000000 --- a/snapcraft.yaml +++ /dev/null @@ -1,18 +0,0 @@ -name: asar -version: git -summary: Manipulate asar archive files -description: | - Asar is a simple extensive archive format, it works like tar that - concatenates all files together without compression, while having - random access support. - -confinement: classic - -parts: - asar: - plugin: nodejs - source: . - -apps: - asar: - command: lib/node_modules/asar/bin/asar.js diff --git a/src/asar.ts b/src/asar.ts new file mode 100644 index 00000000..f692385c --- /dev/null +++ b/src/asar.ts @@ -0,0 +1,269 @@ +import * as path from 'path'; +import * as minimatch from 'minimatch'; + +import fs from './wrapped-fs'; +import { Filesystem, FilesystemEntry } from './filesystem'; +import * as disk from './disk'; +import { CrawledFileType, crawl as crawlFilesystem, determineFileType } from './crawlfs'; +import { IOptions } from 'glob'; +import { Stats } from 'fs'; + +/** + * Whether a directory should be excluded from packing due to the `--unpack-dir" option. + * + * @param {string} dirPath - directory path to check + * @param {string} pattern - literal prefix [for backward compatibility] or glob pattern + * @param {array} unpackDirs - Array of directory paths previously marked as unpacked + */ +function isUnpackedDir(dirPath: string, pattern: string, unpackDirs: string[]) { + if (dirPath.startsWith(pattern) || minimatch(dirPath, pattern)) { + if (!unpackDirs.includes(dirPath)) { + unpackDirs.push(dirPath); + } + return true; + } else { + return unpackDirs.some((unpackDir) => dirPath.startsWith(unpackDir)); + } +} + +export async function createPackage(src: string, dest: string) { + return createPackageWithOptions(src, dest, {}); +} + +export type CreateOptions = { + dot?: boolean; + globOptions?: IOptions; + ordering?: string; + pattern?: string; + transform?: (filePath: string) => NodeJS.ReadWriteStream | void; + unpack?: string; + unpackDir?: string; +}; + +export async function createPackageWithOptions(src: string, dest: string, options: CreateOptions) { + const globOptions = options.globOptions ? options.globOptions : {}; + globOptions.dot = options.dot === undefined ? true : options.dot; + + const pattern = src + (options.pattern ? options.pattern : '/**/*'); + + const [filenames, metadata] = await crawlFilesystem(pattern, globOptions); + return module.exports.createPackageFromFiles(src, dest, filenames, metadata, options); +} + +/** + * Create an ASAR archive from a list of filenames. + * + * @param {string} src: Base path. All files are relative to this. + * @param {string} dest: Archive filename (& path). + * @param {array} filenames: List of filenames relative to src. + * @param {object} metadata: Object with filenames as keys and {type='directory|file|link', stat: fs.stat} as values. (Optional) + * @param {object} options: Options passed to `createPackageWithOptions`. + */ +export async function createPackageFromFiles( + src: string, + dest: string, + filenames: string[], + metadata: disk.InputMetadata, + options: CreateOptions, +) { + if (typeof metadata === 'undefined' || metadata === null) { + metadata = {}; + } + if (typeof options === 'undefined' || options === null) { + options = {}; + } + + src = path.normalize(src); + dest = path.normalize(dest); + filenames = filenames.map(function (filename) { + return path.normalize(filename); + }); + + const filesystem = new Filesystem(src); + const files: { filename: string; unpack: boolean }[] = []; + const unpackDirs: string[] = []; + + let filenamesSorted: string[] = []; + if (options.ordering) { + const orderingFiles = (await fs.readFile(options.ordering)) + .toString() + .split('\n') + .map((line) => { + if (line.includes(':')) { + line = line.split(':').pop()!; + } + line = line.trim(); + if (line.startsWith('/')) { + line = line.slice(1); + } + return line; + }); + + const ordering: string[] = []; + for (const file of orderingFiles) { + const pathComponents = file.split(path.sep); + let str = src; + for (const pathComponent of pathComponents) { + str = path.join(str, pathComponent); + ordering.push(str); + } + } + + let missing = 0; + const total = filenames.length; + + for (const file of ordering) { + if (!filenamesSorted.includes(file) && filenames.includes(file)) { + filenamesSorted.push(file); + } + } + + for (const file of filenames) { + if (!filenamesSorted.includes(file)) { + filenamesSorted.push(file); + missing += 1; + } + } + + console.log(`Ordering file has ${((total - missing) / total) * 100}% coverage.`); + } else { + filenamesSorted = filenames; + } + + const handleFile = async function (filename: string) { + if (!metadata[filename]) { + const fileType = await determineFileType(filename); + if (!fileType) { + throw new Error('Unknown file type for file: ' + filename); + } + metadata[filename] = fileType; + } + const file = metadata[filename]; + + let shouldUnpack; + switch (file.type) { + case 'directory': + if (options.unpackDir) { + shouldUnpack = isUnpackedDir(path.relative(src, filename), options.unpackDir, unpackDirs); + } else { + shouldUnpack = false; + } + filesystem.insertDirectory(filename, shouldUnpack); + break; + case 'file': + shouldUnpack = false; + if (options.unpack) { + shouldUnpack = minimatch(filename, options.unpack, { matchBase: true }); + } + if (!shouldUnpack && options.unpackDir) { + const dirName = path.relative(src, path.dirname(filename)); + shouldUnpack = isUnpackedDir(dirName, options.unpackDir, unpackDirs); + } + files.push({ filename: filename, unpack: shouldUnpack }); + return filesystem.insertFile(filename, shouldUnpack, file, options); + case 'link': + filesystem.insertLink(filename); + break; + } + return Promise.resolve(); + }; + + const insertsDone = async function () { + await fs.mkdirp(path.dirname(dest)); + return disk.writeFilesystem(dest, filesystem, files, metadata); + }; + + const names = filenamesSorted.slice(); + + const next = async function (name?: string) { + if (!name) { + return insertsDone(); + } + + await handleFile(name); + return next(names.shift()); + }; + + return next(names.shift()); +} + +export function statFile(archivePath: string, filename: string, followLinks: boolean): FilesystemEntry { + const filesystem = disk.readFilesystemSync(archivePath); + return filesystem.getFile(filename, followLinks); +} + +export function getRawHeader(archivePath: string) { + return disk.readArchiveHeaderSync(archivePath); +} + +export function listPackage(archivePath: string, options: { isPack: boolean }) { + return disk.readFilesystemSync(archivePath).listFiles(options); +} + +export function extractFile(archivePath: string, filename: string, followLinks: boolean) { + const filesystem = disk.readFilesystemSync(archivePath); + const fileInfo = filesystem.getFile(filename, followLinks); + if ('link' in fileInfo || 'files' in fileInfo) { + throw new Error('Expected to find file at: ' + filename + ' but found a directory or link'); + } + return disk.readFileSync(filesystem, filename, fileInfo); +} + +export function extractAll(archivePath: string, dest: string) { + const filesystem = disk.readFilesystemSync(archivePath); + const filenames = filesystem.listFiles(); + + // under windows just extract links as regular files + const followLinks = process.platform === 'win32'; + + // create destination directory + fs.mkdirpSync(dest); + + const extractionErrors: Error[] = []; + for (const fullPath of filenames) { + // Remove leading slash + const filename = fullPath.substr(1); + const destFilename = path.join(dest, filename); + const file = filesystem.getFile(filename, followLinks); + if ('files' in file) { + // it's a directory, create it and continue with the next entry + fs.mkdirpSync(destFilename); + } else if ('link' in file) { + // it's a symlink, create a symlink + const linkSrcPath = path.dirname(path.join(dest, file.link)); + const linkDestPath = path.dirname(destFilename); + const relativePath = path.relative(linkDestPath, linkSrcPath); + // try to delete output file, because we can't overwrite a link + try { + fs.unlinkSync(destFilename); + } catch {} + const linkTo = path.join(relativePath, path.basename(file.link)); + fs.symlinkSync(linkTo, destFilename); + } else { + // it's a file, try to extract it + try { + const content = disk.readFileSync(filesystem, filename, file); + fs.writeFileSync(destFilename, content); + if (file.executable) { + fs.chmodSync(destFilename, '755'); + } + } catch (e) { + extractionErrors.push(e as Error); + } + } + } + if (extractionErrors.length) { + throw new Error( + 'Unable to extract some files:\n\n' + + extractionErrors.map((error) => error.stack).join('\n\n'), + ); + } +} + +export function uncache(archivePath: string) { + return disk.uncacheFilesystem(archivePath); +} + +export function uncacheAll() { + disk.uncacheAll(); +} diff --git a/src/crawlfs.ts b/src/crawlfs.ts new file mode 100644 index 00000000..76dbc300 --- /dev/null +++ b/src/crawlfs.ts @@ -0,0 +1,55 @@ +import { promisify } from 'util'; +import { glob as _glob, IOptions } from 'glob'; + +import fs from './wrapped-fs'; +import { Stats } from 'fs'; + +const glob = promisify(_glob); + +export type CrawledFileType = { + type: 'file' | 'directory' | 'link'; + stat: Stats; + transformed?: { + path: string; + stat: Stats; + }; +}; + +export async function determineFileType(filename: string): Promise { + const stat = await fs.lstat(filename); + if (stat.isFile()) { + return { type: 'file', stat }; + } else if (stat.isDirectory()) { + return { type: 'directory', stat }; + } else if (stat.isSymbolicLink()) { + return { type: 'link', stat }; + } + return null; +} + +export async function crawl(dir: string, options: IOptions) { + const metadata: Record = {}; + const crawled = await glob(dir, options); + const results = await Promise.all( + crawled.map(async (filename) => [filename, await determineFileType(filename)]), + ); + const links: string[] = []; + const filenames = results + .map(([filename, type]) => { + if (type) { + metadata[filename] = type; + if (type.type === 'link') links.push(filename); + } + return filename; + }) + .filter((filename) => { + // Newer glob can return files inside symlinked directories, to avoid + // those appearing in archives we need to manually exclude theme here + const exactLinkIndex = links.findIndex((link) => filename === link); + return links.every((link, index) => { + if (index === exactLinkIndex) return true; + return !filename.startsWith(link); + }); + }); + return [filenames, metadata]; +} diff --git a/src/disk.ts b/src/disk.ts new file mode 100644 index 00000000..18f34ffb --- /dev/null +++ b/src/disk.ts @@ -0,0 +1,150 @@ +import * as path from 'path'; +import fs from './wrapped-fs'; +import { Pickle } from './pickle'; +import { Filesystem, FilesystemEntry, FilesystemFileEntry } from './filesystem'; +import { CrawledFileType } from './crawlfs'; +import { Stats } from 'fs'; + +let filesystemCache: Record = Object.create(null); + +async function copyFile(dest: string, src: string, filename: string) { + const srcFile = path.join(src, filename); + const targetFile = path.join(dest, filename); + + const [content, stats] = await Promise.all([ + fs.readFile(srcFile), + fs.stat(srcFile), + fs.mkdirp(path.dirname(targetFile)), + ]); + return fs.writeFile(targetFile, content, { mode: stats.mode }); +} + +async function streamTransformedFile( + originalFilename: string, + outStream: NodeJS.WritableStream, + transformed: CrawledFileType['transformed'], +) { + return new Promise((resolve, reject) => { + const stream = fs.createReadStream(transformed ? transformed.path : originalFilename); + stream.pipe(outStream, { end: false }); + stream.on('error', reject); + stream.on('end', () => resolve()); + }); +} + +export type InputMetadata = { + [property: string]: CrawledFileType; +}; + +export type BasicFilesArray = { filename: string; unpack: boolean }[]; + +const writeFileListToStream = async function ( + dest: string, + filesystem: Filesystem, + out: NodeJS.WritableStream, + fileList: BasicFilesArray, + metadata: InputMetadata, +) { + for (const file of fileList) { + if (file.unpack) { + // the file should not be packed into archive + const filename = path.relative(filesystem.getRootPath(), file.filename); + await copyFile(`${dest}.unpacked`, filesystem.getRootPath(), filename); + } else { + await streamTransformedFile(file.filename, out, metadata[file.filename].transformed); + } + } + return out.end(); +}; + +export async function writeFilesystem( + dest: string, + filesystem: Filesystem, + fileList: BasicFilesArray, + metadata: InputMetadata, +) { + const headerPickle = Pickle.createEmpty(); + headerPickle.writeString(JSON.stringify(filesystem.getHeader())); + const headerBuf = headerPickle.toBuffer(); + + const sizePickle = Pickle.createEmpty(); + sizePickle.writeUInt32(headerBuf.length); + const sizeBuf = sizePickle.toBuffer(); + + const out = fs.createWriteStream(dest); + await new Promise((resolve, reject) => { + out.on('error', reject); + out.write(sizeBuf); + return out.write(headerBuf, () => resolve()); + }); + return writeFileListToStream(dest, filesystem, out, fileList, metadata); +} + +export function readArchiveHeaderSync(archivePath: string) { + const fd = fs.openSync(archivePath, 'r'); + let size; + let headerBuf; + try { + const sizeBuf = Buffer.alloc(8); + if (fs.readSync(fd, sizeBuf, 0, 8, null) !== 8) { + throw new Error('Unable to read header size'); + } + + const sizePickle = Pickle.createFromBuffer(sizeBuf); + size = sizePickle.createIterator().readUInt32(); + headerBuf = Buffer.alloc(size); + if (fs.readSync(fd, headerBuf, 0, size, null) !== size) { + throw new Error('Unable to read header'); + } + } finally { + fs.closeSync(fd); + } + + const headerPickle = Pickle.createFromBuffer(headerBuf); + const header = headerPickle.createIterator().readString(); + return { headerString: header, header: JSON.parse(header), headerSize: size }; +} + +export function readFilesystemSync(archivePath: string) { + if (!filesystemCache[archivePath]) { + const header = readArchiveHeaderSync(archivePath); + const filesystem = new Filesystem(archivePath); + filesystem.setHeader(header.header, header.headerSize); + filesystemCache[archivePath] = filesystem; + } + return filesystemCache[archivePath]; +} + +export function uncacheFilesystem(archivePath: string) { + if (filesystemCache[archivePath]) { + filesystemCache[archivePath] = undefined; + return true; + } + return false; +} + +export function uncacheAll() { + filesystemCache = {}; +} + +export function readFileSync(filesystem: Filesystem, filename: string, info: FilesystemFileEntry) { + let buffer = Buffer.alloc(info.size); + if (info.size <= 0) { + return buffer; + } + if (info.unpacked) { + // it's an unpacked file, copy it. + buffer = fs.readFileSync(path.join(`${filesystem.getRootPath()}.unpacked`, filename)); + } else { + // Node throws an exception when reading 0 bytes into a 0-size buffer, + // so we short-circuit the read in this case. + const fd = fs.openSync(filesystem.getRootPath(), 'r'); + try { + const offset = 8 + filesystem.getHeaderSize() + parseInt(info.offset); + fs.readSync(fd, buffer, 0, info.size, offset); + } finally { + fs.closeSync(fd); + } + } + return buffer; +} diff --git a/src/filesystem.ts b/src/filesystem.ts new file mode 100644 index 00000000..89a4a2bc --- /dev/null +++ b/src/filesystem.ts @@ -0,0 +1,215 @@ +import * as os from 'os'; +import * as path from 'path'; +import { promisify } from 'util'; +import * as stream from 'stream'; + +import { FileIntegrity, getFileIntegrity } from './integrity'; +import fs from './wrapped-fs'; +import { CrawledFileType } from './crawlfs'; + +const UINT32_MAX = 2 ** 32 - 1; + +const pipeline = promisify(stream.pipeline); + +export type FilesystemDirectoryEntry = { + files: Record; + unpacked?: boolean; +}; + +export type FilesystemFileEntry = { + unpacked: boolean; + executable: boolean; + offset: string; + size: number; + integrity: FileIntegrity; +}; + +export type FilesystemLinkEntry = { + link: string; +}; + +export type FilesystemEntry = FilesystemDirectoryEntry | FilesystemFileEntry | FilesystemLinkEntry; + +export class Filesystem { + private src: string; + private header: FilesystemEntry; + private headerSize: number; + private offset: bigint; + + constructor(src: string) { + this.src = path.resolve(src); + this.header = { files: Object.create(null) }; + this.headerSize = 0; + this.offset = BigInt(0); + } + + getRootPath() { + return this.src; + } + + getHeader() { + return this.header; + } + + getHeaderSize() { + return this.headerSize; + } + + setHeader(header: FilesystemEntry, headerSize: number) { + this.header = header; + this.headerSize = headerSize; + } + + searchNodeFromDirectory(p: string) { + let json = this.header; + const dirs = p.split(path.sep); + for (const dir of dirs) { + if (dir !== '.') { + if ('files' in json) { + if (!json.files[dir]) { + json.files[dir] = { files: Object.create(null) }; + } + json = json.files[dir]; + } else { + throw new Error('Unexpected directory state while traversing: ' + p); + } + } + } + return json; + } + + searchNodeFromPath(p: string) { + p = path.relative(this.src, p); + if (!p) { + return this.header; + } + const name = path.basename(p); + const node = this.searchNodeFromDirectory(path.dirname(p)) as FilesystemDirectoryEntry; + if (!node.files) { + node.files = Object.create(null); + } + if (!node.files[name]) { + node.files[name] = Object.create(null); + } + return node.files[name]; + } + + insertDirectory(p: string, shouldUnpack: boolean) { + const node = this.searchNodeFromPath(p) as FilesystemDirectoryEntry; + if (shouldUnpack) { + node.unpacked = shouldUnpack; + } + node.files = node.files || Object.create(null); + return node.files; + } + + async insertFile( + p: string, + shouldUnpack: boolean, + file: CrawledFileType, + options: { + transform?: (filePath: string) => NodeJS.ReadWriteStream | void; + } = {}, + ) { + const dirNode = this.searchNodeFromPath(path.dirname(p)) as FilesystemDirectoryEntry; + const node = this.searchNodeFromPath(p) as FilesystemFileEntry; + if (shouldUnpack || dirNode.unpacked) { + node.size = file.stat.size; + node.unpacked = true; + node.integrity = await getFileIntegrity(p); + return Promise.resolve(); + } + + let size; + + const transformed = options.transform && options.transform(p); + if (transformed) { + const tmpdir = await fs.mkdtemp(path.join(os.tmpdir(), 'asar-')); + const tmpfile = path.join(tmpdir, path.basename(p)); + const out = fs.createWriteStream(tmpfile); + const readStream = fs.createReadStream(p); + + await pipeline(readStream, transformed, out); + file.transformed = { + path: tmpfile, + stat: await fs.lstat(tmpfile), + }; + size = file.transformed.stat.size; + } else { + size = file.stat.size; + } + + // JavaScript cannot precisely present integers >= UINT32_MAX. + if (size > UINT32_MAX) { + throw new Error(`${p}: file size can not be larger than 4.2GB`); + } + + node.size = size; + node.offset = this.offset.toString(); + node.integrity = await getFileIntegrity(p); + if (process.platform !== 'win32' && file.stat.mode & 0o100) { + node.executable = true; + } + this.offset += BigInt(size); + } + + insertLink(p: string) { + const symlink = fs.readlinkSync(p); + // /var => /private/var + const parentPath = fs.realpathSync(path.dirname(p)); + const link = path.relative(fs.realpathSync(this.src), path.join(parentPath, symlink)); + if (link.startsWith('..')) { + throw new Error(`${p}: file "${link}" links out of the package`); + } + const node = this.searchNodeFromPath(p) as FilesystemLinkEntry; + node.link = link; + return link; + } + + listFiles(options?: { isPack: boolean }) { + const files: string[] = []; + + const fillFilesFromMetadata = function (basePath: string, metadata: FilesystemEntry) { + if (!('files' in metadata)) { + return; + } + + for (const [childPath, childMetadata] of Object.entries(metadata.files)) { + const fullPath = path.join(basePath, childPath); + const packState = + 'unpacked' in childMetadata && childMetadata.unpacked ? 'unpack' : 'pack '; + files.push(options && options.isPack ? `${packState} : ${fullPath}` : fullPath); + fillFilesFromMetadata(fullPath, childMetadata); + } + }; + + fillFilesFromMetadata('/', this.header); + return files; + } + + getNode(p: string) { + const node = this.searchNodeFromDirectory(path.dirname(p)); + const name = path.basename(p); + if (name) { + return (node as FilesystemDirectoryEntry).files[name]; + } else { + return node; + } + } + + getFile(p: string, followLinks: boolean): FilesystemEntry { + followLinks = typeof followLinks === 'undefined' ? true : followLinks; + const info = this.getNode(p); + + if (!info) { + throw new Error(`"${p}" was not found in this archive`); + } + + // if followLinks is false we don't resolve symlinks + if ('link' in info && followLinks) { + return this.getFile(info.link, followLinks); + } else { + return info; + } + } +} diff --git a/src/integrity.ts b/src/integrity.ts new file mode 100644 index 00000000..abc07990 --- /dev/null +++ b/src/integrity.ts @@ -0,0 +1,67 @@ +import * as crypto from 'crypto'; +import * as fs from 'fs'; +import * as stream from 'stream'; +import { promisify } from 'util'; + +const ALGORITHM = 'SHA256'; +// 4MB default block size +const BLOCK_SIZE = 4 * 1024 * 1024; + +const pipeline = promisify(stream.pipeline); + +function hashBlock(block: Buffer) { + return crypto.createHash(ALGORITHM).update(block).digest('hex'); +} + +export type FileIntegrity = { + algorithm: 'SHA256'; + hash: string; + blockSize: number; + blocks: string[]; +}; + +export async function getFileIntegrity(path: string): Promise { + const fileHash = crypto.createHash(ALGORITHM); + + const blockHashes: string[] = []; + let currentBlockSize = 0; + let currentBlock: Buffer[] = []; + + await pipeline( + fs.createReadStream(path), + new stream.PassThrough({ + decodeStrings: false, + transform(_chunk, encoding, callback) { + fileHash.update(_chunk); + + function handleChunk(chunk: Buffer) { + const diffToSlice = Math.min(BLOCK_SIZE - currentBlockSize, chunk.byteLength); + currentBlockSize += diffToSlice; + currentBlock.push(chunk.slice(0, diffToSlice)); + if (currentBlockSize === BLOCK_SIZE) { + blockHashes.push(hashBlock(Buffer.concat(currentBlock))); + currentBlock = []; + currentBlockSize = 0; + } + if (diffToSlice < chunk.byteLength) { + handleChunk(chunk.slice(diffToSlice)); + } + } + handleChunk(_chunk); + callback(); + }, + flush(callback) { + blockHashes.push(hashBlock(Buffer.concat(currentBlock))); + currentBlock = []; + callback(); + }, + }), + ); + + return { + algorithm: ALGORITHM, + hash: fileHash.digest('hex'), + blockSize: BLOCK_SIZE, + blocks: blockHashes, + }; +} diff --git a/src/pickle.ts b/src/pickle.ts new file mode 100644 index 00000000..973f5d5a --- /dev/null +++ b/src/pickle.ts @@ -0,0 +1,238 @@ +// sizeof(T). +const SIZE_INT32 = 4; +const SIZE_UINT32 = 4; +const SIZE_INT64 = 8; +const SIZE_UINT64 = 8; +const SIZE_FLOAT = 4; +const SIZE_DOUBLE = 8; + +// The allocation granularity of the payload. +const PAYLOAD_UNIT = 64; + +// Largest JS number. +const CAPACITY_READ_ONLY = 9007199254740992; + +// Aligns 'i' by rounding it up to the next multiple of 'alignment'. +const alignInt = function (i: number, alignment: number) { + return i + ((alignment - (i % alignment)) % alignment); +}; + +// PickleIterator reads data from a Pickle. The Pickle object must remain valid +// while the PickleIterator object is in use. +class PickleIterator { + private payload: Buffer; + private payloadOffset: number; + private readIndex: number; + private endIndex: number; + + constructor(pickle: Pickle) { + this.payload = pickle.getHeader(); + this.payloadOffset = pickle.getHeaderSize(); + this.readIndex = 0; + this.endIndex = pickle.getPayloadSize(); + } + + readBool(): boolean { + return this.readInt() !== 0; + } + + readInt(): number { + return this.readBytes(SIZE_INT32, Buffer.prototype.readInt32LE); + } + + readUInt32(): number { + return this.readBytes(SIZE_UINT32, Buffer.prototype.readUInt32LE); + } + + readInt64(): bigint { + return this.readBytes(SIZE_INT64, Buffer.prototype.readBigInt64LE); + } + + readUInt64(): bigint { + return this.readBytes(SIZE_UINT64, Buffer.prototype.readBigUInt64LE); + } + + readFloat(): number { + return this.readBytes(SIZE_FLOAT, Buffer.prototype.readFloatLE); + } + + readDouble(): number { + return this.readBytes(SIZE_DOUBLE, Buffer.prototype.readDoubleLE); + } + + readString(): string { + return this.readBytes(this.readInt()).toString(); + } + + readBytes(length: number): Buffer; + readBytes R>(length: number, method: F): R; + readBytes R>(length: number, method?: F): R | Buffer { + const readPayloadOffset = this.getReadPayloadOffsetAndAdvance(length); + if (method != null) { + return method.call(this.payload, readPayloadOffset, length); + } else { + return this.payload.slice(readPayloadOffset, readPayloadOffset + length); + } + } + + getReadPayloadOffsetAndAdvance(length: number) { + if (length > this.endIndex - this.readIndex) { + this.readIndex = this.endIndex; + throw new Error('Failed to read data with length of ' + length); + } + const readPayloadOffset = this.payloadOffset + this.readIndex; + this.advance(length); + return readPayloadOffset; + } + + advance(size: number) { + const alignedSize = alignInt(size, SIZE_UINT32); + if (this.endIndex - this.readIndex < alignedSize) { + this.readIndex = this.endIndex; + } else { + this.readIndex += alignedSize; + } + } +} + +// This class provides facilities for basic binary value packing and unpacking. +// +// The Pickle class supports appending primitive values (ints, strings, etc.) +// to a pickle instance. The Pickle instance grows its internal memory buffer +// dynamically to hold the sequence of primitive values. The internal memory +// buffer is exposed as the "data" of the Pickle. This "data" can be passed +// to a Pickle object to initialize it for reading. +// +// When reading from a Pickle object, it is important for the consumer to know +// what value types to read and in what order to read them as the Pickle does +// not keep track of the type of data written to it. +// +// The Pickle's data has a header which contains the size of the Pickle's +// payload. It can optionally support additional space in the header. That +// space is controlled by the header_size parameter passed to the Pickle +// constructor. +export class Pickle { + private header: Buffer; + private headerSize: number; + private capacityAfterHeader: number; + private writeOffset: number; + + private constructor(buffer?: Buffer) { + if (buffer) { + this.header = buffer; + this.headerSize = buffer.length - this.getPayloadSize(); + this.capacityAfterHeader = CAPACITY_READ_ONLY; + this.writeOffset = 0; + if (this.headerSize > buffer.length) { + this.headerSize = 0; + } + if (this.headerSize !== alignInt(this.headerSize, SIZE_UINT32)) { + this.headerSize = 0; + } + if (this.headerSize === 0) { + this.header = Buffer.alloc(0); + } + } else { + this.header = Buffer.alloc(0); + this.headerSize = SIZE_UINT32; + this.capacityAfterHeader = 0; + this.writeOffset = 0; + this.resize(PAYLOAD_UNIT); + this.setPayloadSize(0); + } + } + + static createEmpty() { + return new Pickle(); + } + + static createFromBuffer(buffer: Buffer) { + return new Pickle(buffer); + } + + getHeader() { + return this.header; + } + + getHeaderSize() { + return this.headerSize; + } + + createIterator() { + return new PickleIterator(this); + } + + toBuffer() { + return this.header.slice(0, this.headerSize + this.getPayloadSize()); + } + + writeBool(value: boolean) { + return this.writeInt(value ? 1 : 0); + } + + writeInt(value: number) { + return this.writeBytes(value, SIZE_INT32, Buffer.prototype.writeInt32LE); + } + + writeUInt32(value: number) { + return this.writeBytes(value, SIZE_UINT32, Buffer.prototype.writeUInt32LE); + } + + writeInt64(value: number) { + return this.writeBytes(BigInt(value), SIZE_INT64, Buffer.prototype.writeBigInt64LE); + } + + writeUInt64(value: number) { + return this.writeBytes(BigInt(value), SIZE_UINT64, Buffer.prototype.writeBigUInt64LE); + } + + writeFloat(value: number) { + return this.writeBytes(value, SIZE_FLOAT, Buffer.prototype.writeFloatLE); + } + + writeDouble(value: number) { + return this.writeBytes(value, SIZE_DOUBLE, Buffer.prototype.writeDoubleLE); + } + + writeString(value: string) { + const length = Buffer.byteLength(value, 'utf8'); + if (!this.writeInt(length)) { + return false; + } + return this.writeBytes(value, length); + } + + setPayloadSize(payloadSize: number) { + return this.header.writeUInt32LE(payloadSize, 0); + } + + getPayloadSize() { + return this.header.readUInt32LE(0); + } + + writeBytes(data: string, length: number, method?: undefined): boolean; + writeBytes(data: number | BigInt, length: number, method: Function): boolean; + writeBytes(data: string | number | BigInt, length: number, method?: Function): boolean { + const dataLength = alignInt(length, SIZE_UINT32); + const newSize = this.writeOffset + dataLength; + if (newSize > this.capacityAfterHeader) { + this.resize(Math.max(this.capacityAfterHeader * 2, newSize)); + } + if (method) { + method.call(this.header, data, this.headerSize + this.writeOffset); + } else { + this.header.write(data as string, this.headerSize + this.writeOffset, length); + } + const endOffset = this.headerSize + this.writeOffset + length; + this.header.fill(0, endOffset, endOffset + dataLength - length); + this.setPayloadSize(newSize); + this.writeOffset = newSize; + return true; + } + + resize(newCapacity: number) { + newCapacity = alignInt(newCapacity, PAYLOAD_UNIT); + this.header = Buffer.concat([this.header, Buffer.alloc(newCapacity)]); + this.capacityAfterHeader = newCapacity; + } +} diff --git a/src/wrapped-fs.ts b/src/wrapped-fs.ts new file mode 100644 index 00000000..95c4ef71 --- /dev/null +++ b/src/wrapped-fs.ts @@ -0,0 +1,28 @@ +const fs = 'electron' in process.versions ? require('original-fs') : require('fs'); + +const promisifiedMethods = ['lstat', 'mkdtemp', 'readFile', 'stat', 'writeFile']; + +type AsarFS = typeof import('fs') & { + mkdirp(dir: string): Promise; + mkdirpSync(dir: string): void; + lstat: (typeof import('fs'))['promises']['lstat']; + mkdtemp: (typeof import('fs'))['promises']['mkdtemp']; + readFile: (typeof import('fs'))['promises']['readFile']; + stat: (typeof import('fs'))['promises']['stat']; + writeFile: (typeof import('fs'))['promises']['writeFile']; +}; + +const promisified: AsarFS = {} as any; + +for (const method of Object.keys(fs)) { + if (promisifiedMethods.includes(method)) { + (promisified as any)[method] = fs.promises[method]; + } else { + (promisified as any)[method] = fs[method]; + } +} +// To make it more like fs-extra +promisified.mkdirp = (dir) => fs.promises.mkdir(dir, { recursive: true }); +promisified.mkdirpSync = (dir) => fs.mkdirSync(dir, { recursive: true }); + +export default promisified; diff --git a/test/api-spec.js b/test/api-spec.js index 2239d1ac..9b227b29 100644 --- a/test/api-spec.js +++ b/test/api-spec.js @@ -1,110 +1,158 @@ -'use strict' +'use strict'; -const assert = require('assert') -const fs = require('../lib/wrapped-fs') -const os = require('os') -const path = require('path') -const rimraf = require('rimraf') +const assert = require('assert'); +const fs = require('../lib/wrapped-fs').default; +const os = require('os'); +const path = require('path'); +const rimraf = require('rimraf'); -const asar = require('..') -const compDirs = require('./util/compareDirectories') -const compFileLists = require('./util/compareFileLists') -const compFiles = require('./util/compareFiles') -const transform = require('./util/transformStream') +const asar = require('..'); +const compDirs = require('./util/compareDirectories'); +const compFileLists = require('./util/compareFileLists'); +const compFiles = require('./util/compareFiles'); +const transform = require('./util/transformStream'); -async function assertPackageListEquals (actualList, expectedFilename) { - const expected = await fs.readFile(expectedFilename, 'utf8') - return compFileLists(actualList.join('\n'), expected) +async function assertPackageListEquals(actualList, expectedFilename) { + const expected = await fs.readFile(expectedFilename, 'utf8'); + return compFileLists(actualList.join('\n'), expected); } describe('api', function () { - beforeEach(() => { rimraf.sync(path.join(__dirname, '..', 'tmp'), fs) }) + beforeEach(() => { + rimraf.sync(path.join(__dirname, '..', 'tmp'), fs); + }); it('should create archive from directory', async () => { - await asar.createPackage('test/input/packthis/', 'tmp/packthis-api.asar') - return compFiles('tmp/packthis-api.asar', 'test/expected/packthis.asar') - }) + await asar.createPackage('test/input/packthis/', 'tmp/packthis-api.asar'); + return compFiles('tmp/packthis-api.asar', 'test/expected/packthis.asar'); + }); if (os.platform() === 'win32') { it('should create archive with windows-style path separators', async () => { - await asar.createPackage('test\\input\\packthis\\', 'tmp\\packthis-api.asar') - return compFiles('tmp/packthis-api.asar', 'test/expected/packthis.asar') - }) + await asar.createPackage('test\\input\\packthis\\', 'tmp\\packthis-api.asar'); + return compFiles('tmp/packthis-api.asar', 'test/expected/packthis.asar'); + }); } it('should create archive from directory (without hidden files)', async () => { - await asar.createPackageWithOptions('test/input/packthis/', 'tmp/packthis-without-hidden-api.asar', { dot: false }) - return compFiles('tmp/packthis-without-hidden-api.asar', 'test/expected/packthis-without-hidden.asar') - }) + await asar.createPackageWithOptions( + 'test/input/packthis/', + 'tmp/packthis-without-hidden-api.asar', + { dot: false }, + ); + return compFiles( + 'tmp/packthis-without-hidden-api.asar', + 'test/expected/packthis-without-hidden.asar', + ); + }); it('should create archive from directory (with transformed files)', async () => { - await asar.createPackageWithOptions('test/input/packthis/', 'tmp/packthis-api-transformed.asar', { transform }) - return compFiles('tmp/packthis-api-transformed.asar', 'test/expected/packthis-transformed.asar') - }) + await asar.createPackageWithOptions( + 'test/input/packthis/', + 'tmp/packthis-api-transformed.asar', + { transform }, + ); + return compFiles( + 'tmp/packthis-api-transformed.asar', + 'test/expected/packthis-transformed.asar', + ); + }); it('should create archive from directory (with nothing packed)', async () => { - await asar.createPackageWithOptions('test/input/packthis/', 'tmp/packthis-api-unpacked.asar', { unpackDir: '**' }) - await compFiles('tmp/packthis-api-unpacked.asar', 'test/expected/packthis-all-unpacked.asar') - return compDirs('tmp/packthis-api-unpacked.asar.unpacked', 'test/expected/extractthis') - }) + await asar.createPackageWithOptions('test/input/packthis/', 'tmp/packthis-api-unpacked.asar', { + unpackDir: '**', + }); + await compFiles('tmp/packthis-api-unpacked.asar', 'test/expected/packthis-all-unpacked.asar'); + return compDirs('tmp/packthis-api-unpacked.asar.unpacked', 'test/expected/extractthis'); + }); it('should list files/dirs in archive', async () => { - return assertPackageListEquals(asar.listPackage('test/input/extractthis.asar'), 'test/expected/extractthis-filelist.txt') - }) + return assertPackageListEquals( + asar.listPackage('test/input/extractthis.asar'), + 'test/expected/extractthis-filelist.txt', + ); + }); it('should list files/dirs in archive with option', async () => { - return assertPackageListEquals(asar.listPackage('test/input/extractthis-unpack-dir.asar', { isPack: true }), 'test/expected/extractthis-filelist-with-option.txt') - }) + return assertPackageListEquals( + asar.listPackage('test/input/extractthis-unpack-dir.asar', { isPack: true }), + 'test/expected/extractthis-filelist-with-option.txt', + ); + }); it('should extract a text file from archive', async () => { - const actual = asar.extractFile('test/input/extractthis.asar', 'dir1/file1.txt').toString('utf8') - const expected = await fs.readFile('test/expected/extractthis/dir1/file1.txt', 'utf8') - return compFileLists(actual, expected) - }) + const actual = asar + .extractFile('test/input/extractthis.asar', 'dir1/file1.txt') + .toString('utf8'); + const expected = await fs.readFile('test/expected/extractthis/dir1/file1.txt', 'utf8'); + return compFileLists(actual, expected); + }); it('should extract a binary file from archive', async () => { - const actual = asar.extractFile('test/input/extractthis.asar', 'dir2/file2.png') - const expected = await fs.readFile('test/expected/extractthis/dir2/file2.png') - return assert.strictEqual(actual.toString(), expected.toString()) - }) + const actual = asar.extractFile('test/input/extractthis.asar', 'dir2/file2.png'); + const expected = await fs.readFile('test/expected/extractthis/dir2/file2.png'); + return assert.strictEqual(actual.toString(), expected.toString()); + }); it('should extract a binary file from archive with unpacked files', async () => { - const actual = asar.extractFile('test/input/extractthis-unpack.asar', 'dir2/file2.png') - const expected = await fs.readFile('test/expected/extractthis/dir2/file2.png') - return assert.strictEqual(actual.toString(), expected.toString()) - }) + const actual = asar.extractFile('test/input/extractthis-unpack.asar', 'dir2/file2.png'); + const expected = await fs.readFile('test/expected/extractthis/dir2/file2.png'); + return assert.strictEqual(actual.toString(), expected.toString()); + }); it('should extract an archive', async () => { - asar.extractAll('test/input/extractthis.asar', 'tmp/extractthis-api/') - return compDirs('tmp/extractthis-api/', 'test/expected/extractthis') - }) + asar.extractAll('test/input/extractthis.asar', 'tmp/extractthis-api/'); + return compDirs('tmp/extractthis-api/', 'test/expected/extractthis'); + }); it('should extract an archive with unpacked files', async () => { - asar.extractAll('test/input/extractthis-unpack.asar', 'tmp/extractthis-unpack-api/') - return compDirs('tmp/extractthis-unpack-api/', 'test/expected/extractthis') - }) + asar.extractAll('test/input/extractthis-unpack.asar', 'tmp/extractthis-unpack-api/'); + return compDirs('tmp/extractthis-unpack-api/', 'test/expected/extractthis'); + }); it('should extract a binary file from archive with unpacked files', async () => { - const actual = asar.extractFile('test/input/extractthis-unpack-dir.asar', 'dir1/file1.txt') - const expected = await fs.readFile('test/expected/extractthis/dir1/file1.txt') - assert.strictEqual(actual.toString(), expected.toString()) - }) + const actual = asar.extractFile('test/input/extractthis-unpack-dir.asar', 'dir1/file1.txt'); + const expected = await fs.readFile('test/expected/extractthis/dir1/file1.txt'); + assert.strictEqual(actual.toString(), expected.toString()); + }); it('should extract an archive with unpacked dirs', async () => { - asar.extractAll('test/input/extractthis-unpack-dir.asar', 'tmp/extractthis-unpack-dir-api/') - return compDirs('tmp/extractthis-unpack-dir-api/', 'test/expected/extractthis') - }) + asar.extractAll('test/input/extractthis-unpack-dir.asar', 'tmp/extractthis-unpack-dir-api/'); + return compDirs('tmp/extractthis-unpack-dir-api/', 'test/expected/extractthis'); + }); it('should extract an archive with symlink', async () => { - await asar.createPackageWithOptions('test/input/packthis-with-symlink/', 'tmp/packthis-with-symlink.asar', { dot: false }) - asar.extractAll('tmp/packthis-with-symlink.asar', 'tmp/packthis-with-symlink/') - return compFiles('tmp/packthis-with-symlink/real.txt', 'test/input/packthis-with-symlink/real.txt') - }) + await asar.createPackageWithOptions( + 'test/input/packthis-with-symlink/', + 'tmp/packthis-with-symlink.asar', + { dot: false }, + ); + asar.extractAll('tmp/packthis-with-symlink.asar', 'tmp/packthis-with-symlink/'); + return compFiles( + 'tmp/packthis-with-symlink/real.txt', + 'test/input/packthis-with-symlink/real.txt', + ); + }); it('should handle multibyte characters in paths', async () => { - await asar.createPackageWithOptions('test/input/packthis-unicode-path/', 'tmp/packthis-unicode-path.asar', { - globOptions: { - nosort: true - } - }) - return compFiles('tmp/packthis-unicode-path.asar', 'test/expected/packthis-unicode-path.asar') - }) + await asar.createPackageWithOptions( + 'test/input/packthis-unicode-path/', + 'tmp/packthis-unicode-path.asar', + { + globOptions: { + nosort: true, + }, + }, + ); + return compFiles('tmp/packthis-unicode-path.asar', 'test/expected/packthis-unicode-path.asar'); + }); it('should extract a text file from archive with multibyte characters in path', async () => { - const actual = asar.extractFile('test/expected/packthis-unicode-path.asar', 'dir1/女の子.txt').toString('utf8') - const expected = await fs.readFile('test/input/packthis-unicode-path/dir1/女の子.txt', 'utf8') - return compFileLists(actual, expected) - }) + const actual = asar + .extractFile('test/expected/packthis-unicode-path.asar', 'dir1/女の子.txt') + .toString('utf8'); + const expected = await fs.readFile('test/input/packthis-unicode-path/dir1/女の子.txt', 'utf8'); + return compFileLists(actual, expected); + }); it('should create files/directories whose names are properties of Object.prototype', async () => { - await asar.createPackage('test/input/packthis-object-prototype/', 'tmp/packthis-object-prototype.asar') - return compFiles('tmp/packthis-object-prototype.asar', 'test/expected/packthis-object-prototype.asar') - }) + await asar.createPackage( + 'test/input/packthis-object-prototype/', + 'tmp/packthis-object-prototype.asar', + ); + return compFiles( + 'tmp/packthis-object-prototype.asar', + 'test/expected/packthis-object-prototype.asar', + ); + }); it('should extract files/directories whose names are properties of Object.prototype', () => { - asar.extractAll('test/expected/packthis-object-prototype.asar', 'tmp/packthis-object-prototype/') - return compDirs('test/input/packthis-object-prototype/', 'tmp/packthis-object-prototype') - }) -}) + asar.extractAll( + 'test/expected/packthis-object-prototype.asar', + 'tmp/packthis-object-prototype/', + ); + return compDirs('test/input/packthis-object-prototype/', 'tmp/packthis-object-prototype'); + }); +}); diff --git a/test/cli-spec.js b/test/cli-spec.js index 44952c5c..b91d27ab 100644 --- a/test/cli-spec.js +++ b/test/cli-spec.js @@ -1,59 +1,78 @@ -'use strict' +'use strict'; -const assert = require('assert') -const childProcess = require('child_process') -const fs = require('../lib/wrapped-fs') -const os = require('os') -const path = require('path') -const { promisify } = require('util') -const rimraf = require('rimraf') +const assert = require('assert'); +const childProcess = require('child_process'); +const fs = require('../lib/wrapped-fs').default; +const os = require('os'); +const path = require('path'); +const { promisify } = require('util'); +const rimraf = require('rimraf'); -const compDirs = require('./util/compareDirectories') -const compFileLists = require('./util/compareFileLists') -const compFiles = require('./util/compareFiles') +const compDirs = require('./util/compareDirectories'); +const compFileLists = require('./util/compareFileLists'); +const compFiles = require('./util/compareFiles'); -const exec = promisify(childProcess.exec) +const exec = promisify(childProcess.exec); -async function execAsar (args) { - return exec(`node bin/asar ${args}`) +async function execAsar(args) { + return exec(`node bin/asar ${args}`); } -async function assertAsarOutputMatches (args, expectedFilename) { - const [{ stdout }, expectedContents] = await Promise.all([execAsar(args), fs.readFile(expectedFilename, 'utf8')]) - return compFileLists(stdout, `${expectedContents}\n`) +async function assertAsarOutputMatches(args, expectedFilename) { + const [{ stdout }, expectedContents] = await Promise.all([ + execAsar(args), + fs.readFile(expectedFilename, 'utf8'), + ]); + return compFileLists(stdout, `${expectedContents}\n`); } describe('command line interface', function () { - beforeEach(() => { rimraf.sync(path.join(__dirname, '..', 'tmp'), fs) }) + beforeEach(() => { + rimraf.sync(path.join(__dirname, '..', 'tmp'), fs); + }); it('should create archive from directory', async () => { - await execAsar('p test/input/packthis/ tmp/packthis-cli.asar') - await compFiles('tmp/packthis-cli.asar', 'test/expected/packthis.asar') - }) + await execAsar('p test/input/packthis/ tmp/packthis-cli.asar'); + await compFiles('tmp/packthis-cli.asar', 'test/expected/packthis.asar'); + }); if (os.platform() === 'win32') { it('should create archive from directory with windows-style path separators', async () => { - await execAsar('p test\\input\\packthis\\ tmp\\packthis-cli.asar') - await compFiles('tmp/packthis-cli.asar', 'test/expected/packthis.asar') - }) + await execAsar('p test\\input\\packthis\\ tmp\\packthis-cli.asar'); + await compFiles('tmp/packthis-cli.asar', 'test/expected/packthis.asar'); + }); } it('should create archive from directory without hidden files', async () => { - await execAsar('p test/input/packthis/ tmp/packthis-without-hidden-cli.asar --exclude-hidden') - await compFiles('tmp/packthis-without-hidden-cli.asar', 'test/expected/packthis-without-hidden.asar') - }) + await execAsar('p test/input/packthis/ tmp/packthis-without-hidden-cli.asar --exclude-hidden'); + await compFiles( + 'tmp/packthis-without-hidden-cli.asar', + 'test/expected/packthis-without-hidden.asar', + ); + }); it('should create archive from directory with unpacked files', async () => { - await execAsar('p test/input/packthis/ tmp/packthis-unpack-cli.asar --unpack *.png --exclude-hidden') - assert.ok(fs.existsSync('tmp/packthis-unpack-cli.asar.unpacked/dir2/file2.png')) - await compFiles('tmp/packthis-unpack-cli.asar', 'test/expected/packthis-unpack.asar') - }) + await execAsar( + 'p test/input/packthis/ tmp/packthis-unpack-cli.asar --unpack *.png --exclude-hidden', + ); + assert.ok(fs.existsSync('tmp/packthis-unpack-cli.asar.unpacked/dir2/file2.png')); + await compFiles('tmp/packthis-unpack-cli.asar', 'test/expected/packthis-unpack.asar'); + }); it('should list files/dirs in archive', async () => { - return assertAsarOutputMatches('l test/input/extractthis.asar', 'test/expected/extractthis-filelist.txt') - }) + return assertAsarOutputMatches( + 'l test/input/extractthis.asar', + 'test/expected/extractthis-filelist.txt', + ); + }); it('should list files/dirs in archive with unpacked files', async () => { - return assertAsarOutputMatches('l test/input/extractthis-unpack.asar', 'test/expected/extractthis-filelist.txt') - }) + return assertAsarOutputMatches( + 'l test/input/extractthis-unpack.asar', + 'test/expected/extractthis-filelist.txt', + ); + }); it('should list files/dirs with multibyte characters in path', async () => { - return assertAsarOutputMatches('l test/expected/packthis-unicode-path.asar', 'test/expected/packthis-unicode-path-filelist.txt') - }) + return assertAsarOutputMatches( + 'l test/expected/packthis-unicode-path.asar', + 'test/expected/packthis-unicode-path-filelist.txt', + ); + }); // we need a way to set a path to extract to first, otherwise we pollute our project dir // or we fake it by setting our cwd, but I don't like that /* @@ -76,68 +95,92 @@ describe('command line interface', function () { }) */ it('should extract an archive', async () => { - await execAsar('e test/input/extractthis.asar tmp/extractthis-cli/') - return compDirs('tmp/extractthis-cli/', 'test/expected/extractthis') - }) + await execAsar('e test/input/extractthis.asar tmp/extractthis-cli/'); + return compDirs('tmp/extractthis-cli/', 'test/expected/extractthis'); + }); it('should extract an archive with unpacked files', async () => { - await execAsar('e test/input/extractthis-unpack.asar tmp/extractthis-unpack-cli/') - return compDirs('tmp/extractthis-unpack-cli/', 'test/expected/extractthis') - }) - it('should throw an error when trying to extract a file that doesn\'t exist in the archive', async () => { - await assert.rejects(execAsar('ef test/input/extractthis.asar this-file-doesnt-exist.404'), /"(.*?)" was not found in this archive/) - }) + await execAsar('e test/input/extractthis-unpack.asar tmp/extractthis-unpack-cli/'); + return compDirs('tmp/extractthis-unpack-cli/', 'test/expected/extractthis'); + }); + it("should throw an error when trying to extract a file that doesn't exist in the archive", async () => { + await assert.rejects( + execAsar('ef test/input/extractthis.asar this-file-doesnt-exist.404'), + /"(.*?)" was not found in this archive/, + ); + }); it('should create archive from directory with unpacked dirs', async () => { - await execAsar('p test/input/packthis/ tmp/packthis-unpack-dir-cli.asar --unpack-dir dir2 --exclude-hidden') - assert.ok(fs.existsSync('tmp/packthis-unpack-dir-cli.asar.unpacked/dir2/file2.png')) - assert.ok(fs.existsSync('tmp/packthis-unpack-dir-cli.asar.unpacked/dir2/file3.txt')) - return compFiles('tmp/packthis-unpack-dir-cli.asar', 'test/expected/packthis-unpack-dir.asar') - }) + await execAsar( + 'p test/input/packthis/ tmp/packthis-unpack-dir-cli.asar --unpack-dir dir2 --exclude-hidden', + ); + assert.ok(fs.existsSync('tmp/packthis-unpack-dir-cli.asar.unpacked/dir2/file2.png')); + assert.ok(fs.existsSync('tmp/packthis-unpack-dir-cli.asar.unpacked/dir2/file3.txt')); + return compFiles('tmp/packthis-unpack-dir-cli.asar', 'test/expected/packthis-unpack-dir.asar'); + }); it('should create archive from directory with unpacked dirs specified by glob pattern', async () => { - const tmpFile = 'tmp/packthis-unpack-dir-glob-cli.asar' - const tmpUnpacked = 'tmp/packthis-unpack-dir-glob-cli.asar.unpacked' - await execAsar(`p test/input/packthis-glob/ ${tmpFile} --unpack-dir "{x1,x2}" --exclude-hidden`) - assert.ok(fs.existsSync(tmpUnpacked + '/x1/file1.txt')) - assert.ok(fs.existsSync(tmpUnpacked + '/x2/file2.txt')) - return compFiles(tmpFile, 'test/expected/packthis-unpack-dir-glob.asar') - }) + const tmpFile = 'tmp/packthis-unpack-dir-glob-cli.asar'; + const tmpUnpacked = 'tmp/packthis-unpack-dir-glob-cli.asar.unpacked'; + await execAsar( + `p test/input/packthis-glob/ ${tmpFile} --unpack-dir "{x1,x2}" --exclude-hidden`, + ); + assert.ok(fs.existsSync(tmpUnpacked + '/x1/file1.txt')); + assert.ok(fs.existsSync(tmpUnpacked + '/x2/file2.txt')); + return compFiles(tmpFile, 'test/expected/packthis-unpack-dir-glob.asar'); + }); it('should create archive from directory with unpacked dirs specified by globstar pattern', async () => { - const tmpFile = 'tmp/packthis-unpack-dir-globstar-cli.asar' - const tmpUnpacked = 'tmp/packthis-unpack-dir-globstar-cli.asar.unpacked' - await execAsar(`p test/input/packthis-glob/ ${tmpFile} --unpack-dir "**/{x1,x2}" --exclude-hidden`) - assert.ok(fs.existsSync(tmpUnpacked + '/x1/file1.txt')) - assert.ok(fs.existsSync(tmpUnpacked + '/x2/file2.txt')) - assert.ok(fs.existsSync(tmpUnpacked + '/y3/x1/file4.txt')) - assert.ok(fs.existsSync(tmpUnpacked + '/y3/z1/x2/file5.txt')) - return compFiles(tmpFile, 'test/expected/packthis-unpack-dir-globstar.asar') - }) + const tmpFile = 'tmp/packthis-unpack-dir-globstar-cli.asar'; + const tmpUnpacked = 'tmp/packthis-unpack-dir-globstar-cli.asar.unpacked'; + await execAsar( + `p test/input/packthis-glob/ ${tmpFile} --unpack-dir "**/{x1,x2}" --exclude-hidden`, + ); + assert.ok(fs.existsSync(tmpUnpacked + '/x1/file1.txt')); + assert.ok(fs.existsSync(tmpUnpacked + '/x2/file2.txt')); + assert.ok(fs.existsSync(tmpUnpacked + '/y3/x1/file4.txt')); + assert.ok(fs.existsSync(tmpUnpacked + '/y3/z1/x2/file5.txt')); + return compFiles(tmpFile, 'test/expected/packthis-unpack-dir-globstar.asar'); + }); it('should create archive from directory with unpacked dirs specified by foo/{bar,baz} style pattern', async () => { - const tmpFile = 'tmp/packthis-unpack-dir-globstar-cli.asar' - const tmpUnpacked = 'tmp/packthis-unpack-dir-globstar-cli.asar.unpacked' - await execAsar(`p test/input/packthis-glob/ ${tmpFile} --unpack-dir "y3/{x1,z1}" --exclude-hidden`) - assert.ok(fs.existsSync(path.join(tmpUnpacked, 'y3/x1/file4.txt'))) - assert.ok(fs.existsSync(path.join(tmpUnpacked, 'y3/z1/x2/file5.txt'))) - }) + const tmpFile = 'tmp/packthis-unpack-dir-globstar-cli.asar'; + const tmpUnpacked = 'tmp/packthis-unpack-dir-globstar-cli.asar.unpacked'; + await execAsar( + `p test/input/packthis-glob/ ${tmpFile} --unpack-dir "y3/{x1,z1}" --exclude-hidden`, + ); + assert.ok(fs.existsSync(path.join(tmpUnpacked, 'y3/x1/file4.txt'))); + assert.ok(fs.existsSync(path.join(tmpUnpacked, 'y3/z1/x2/file5.txt'))); + }); it('should list files/dirs in archive with unpacked dirs', async () => { - return assertAsarOutputMatches('l test/expected/packthis-unpack-dir.asar', 'test/expected/extractthis-filelist.txt') - }) + return assertAsarOutputMatches( + 'l test/expected/packthis-unpack-dir.asar', + 'test/expected/extractthis-filelist.txt', + ); + }); it('should list files/dirs in archive with unpacked dirs & is-pack option', async () => { - return assertAsarOutputMatches('l --is-pack test/expected/packthis-unpack-dir.asar', 'test/expected/extractthis-filelist-with-option.txt') - }) + return assertAsarOutputMatches( + 'l --is-pack test/expected/packthis-unpack-dir.asar', + 'test/expected/extractthis-filelist-with-option.txt', + ); + }); it('should extract an archive with unpacked dirs', async () => { - await execAsar('e test/input/extractthis-unpack-dir.asar tmp/extractthis-unpack-dir/') - return compDirs('tmp/extractthis-unpack-dir/', 'test/expected/extractthis') - }) + await execAsar('e test/input/extractthis-unpack-dir.asar tmp/extractthis-unpack-dir/'); + return compDirs('tmp/extractthis-unpack-dir/', 'test/expected/extractthis'); + }); it('should create archive from directory with unpacked dirs and files', async () => { - await execAsar('p test/input/packthis/ tmp/packthis-unpack-dir-file-cli.asar --unpack *.png --unpack-dir dir2 --exclude-hidden') - assert.ok(fs.existsSync('tmp/packthis-unpack-dir-file-cli.asar.unpacked/dir2/file2.png')) - assert.ok(fs.existsSync('tmp/packthis-unpack-dir-file-cli.asar.unpacked/dir2/file3.txt')) - return compFiles('tmp/packthis-unpack-dir-file-cli.asar', 'test/expected/packthis-unpack-dir.asar') - }) + await execAsar( + 'p test/input/packthis/ tmp/packthis-unpack-dir-file-cli.asar --unpack *.png --unpack-dir dir2 --exclude-hidden', + ); + assert.ok(fs.existsSync('tmp/packthis-unpack-dir-file-cli.asar.unpacked/dir2/file2.png')); + assert.ok(fs.existsSync('tmp/packthis-unpack-dir-file-cli.asar.unpacked/dir2/file3.txt')); + return compFiles( + 'tmp/packthis-unpack-dir-file-cli.asar', + 'test/expected/packthis-unpack-dir.asar', + ); + }); it('should create archive from directory with unpacked subdirs and files', async () => { - await execAsar('p test/input/packthis-subdir/ tmp/packthis-unpack-subdir-cli.asar --unpack *.txt --unpack-dir dir2/subdir --exclude-hidden') - assert.ok(fs.existsSync('tmp/packthis-unpack-subdir-cli.asar.unpacked/file0.txt')) - assert.ok(fs.existsSync('tmp/packthis-unpack-subdir-cli.asar.unpacked/dir1/file1.txt')) - assert.ok(fs.existsSync('tmp/packthis-unpack-subdir-cli.asar.unpacked/dir2/subdir/file2.png')) - assert.ok(fs.existsSync('tmp/packthis-unpack-subdir-cli.asar.unpacked/dir2/subdir/file3.txt')) - }) -}) + await execAsar( + 'p test/input/packthis-subdir/ tmp/packthis-unpack-subdir-cli.asar --unpack *.txt --unpack-dir dir2/subdir --exclude-hidden', + ); + assert.ok(fs.existsSync('tmp/packthis-unpack-subdir-cli.asar.unpacked/file0.txt')); + assert.ok(fs.existsSync('tmp/packthis-unpack-subdir-cli.asar.unpacked/dir1/file1.txt')); + assert.ok(fs.existsSync('tmp/packthis-unpack-subdir-cli.asar.unpacked/dir2/subdir/file2.png')); + assert.ok(fs.existsSync('tmp/packthis-unpack-subdir-cli.asar.unpacked/dir2/subdir/file3.txt')); + }); +}); diff --git a/test/filesystem-spec.js b/test/filesystem-spec.js index 8ac357de..cb33cf55 100644 --- a/test/filesystem-spec.js +++ b/test/filesystem-spec.js @@ -1,14 +1,16 @@ -'use strict' +'use strict'; -const assert = require('assert') -const fs = require('../lib/wrapped-fs') -const path = require('path') -const rimraf = require('rimraf') +const assert = require('assert'); +const fs = require('../lib/wrapped-fs').default; +const path = require('path'); +const rimraf = require('rimraf'); -const Filesystem = require('../lib/filesystem') +const Filesystem = require('../lib/filesystem').Filesystem; describe('filesystem', function () { - beforeEach(() => { rimraf.sync(path.join(__dirname, '..', 'tmp'), fs) }) + beforeEach(() => { + rimraf.sync(path.join(__dirname, '..', 'tmp'), fs); + }); it('should does not throw an error when the src path includes a symbol link', async () => { /** @@ -21,21 +23,21 @@ describe('filesystem', function () { * │ └── file.txt * └── var -> private/var */ - const tmpPath = path.join(__dirname, '..', 'tmp') - const privateVarPath = path.join(tmpPath, 'private', 'var') - const varPath = path.join(tmpPath, 'var') - fs.mkdirSync(privateVarPath, { recursive: true }) - fs.symlinkSync(path.relative(tmpPath, privateVarPath), varPath) + const tmpPath = path.join(__dirname, '..', 'tmp'); + const privateVarPath = path.join(tmpPath, 'private', 'var'); + const varPath = path.join(tmpPath, 'var'); + fs.mkdirSync(privateVarPath, { recursive: true }); + fs.symlinkSync(path.relative(tmpPath, privateVarPath), varPath); - const originFilePath = path.join(varPath, 'file.txt') - fs.writeFileSync(originFilePath, 'hello world') - const appPath = path.join(varPath, 'app') - fs.mkdirpSync(appPath) - fs.symlinkSync('../file.txt', path.join(appPath, 'file.txt')) + const originFilePath = path.join(varPath, 'file.txt'); + fs.writeFileSync(originFilePath, 'hello world'); + const appPath = path.join(varPath, 'app'); + fs.mkdirpSync(appPath); + fs.symlinkSync('../file.txt', path.join(appPath, 'file.txt')); - const filesystem = new Filesystem(varPath) + const filesystem = new Filesystem(varPath); assert.doesNotThrow(() => { - filesystem.insertLink(path.join(appPath, 'file.txt')) - }) - }) -}) + filesystem.insertLink(path.join(appPath, 'file.txt')); + }); + }); +}); diff --git a/test/index.test-d.ts b/test/index.test-d.ts index 504d05b3..66465344 100644 --- a/test/index.test-d.ts +++ b/test/index.test-d.ts @@ -1,6 +1,6 @@ import * as asar from '..'; import * as fs from 'fs'; -import * as crypto from 'crypto' +import * as crypto from 'crypto'; import { expectType } from 'tsd'; await asar.createPackage('bin', 'tmp/foo.asar'); @@ -11,9 +11,12 @@ await asar.createPackageWithOptions('bin', 'tmp/foo.asar', { }, transform: (filePath: string) => { if (process.env.TRANSFORM_ASAR) { - return crypto.createCipheriv('aes-256-cbc', crypto.randomBytes(32), crypto.randomBytes(16)).setAutoPadding(true).setEncoding('base64') + return crypto + .createCipheriv('aes-256-cbc', crypto.randomBytes(32), crypto.randomBytes(16)) + .setAutoPadding(true) + .setEncoding('base64'); } - } + }, }); await asar.createPackageFromFiles('bin', 'tmp/foo.asar', ['bin/asar.js']); const stat = fs.statSync('bin/asar.js'); diff --git a/test/pickle-spec.js b/test/pickle-spec.js index 46965640..2bf1f04a 100644 --- a/test/pickle-spec.js +++ b/test/pickle-spec.js @@ -1,12 +1,12 @@ -const assert = require('assert') -const Pickle = require('../lib/pickle') +const assert = require('assert'); +const { Pickle } = require('../lib/pickle'); describe('Pickle', function () { it('supports multi-byte characters', function () { - const write = Pickle.createEmpty() - write.writeString('女の子.txt') + const write = Pickle.createEmpty(); + write.writeString('女の子.txt'); - const read = Pickle.createFromBuffer(write.toBuffer()) - assert.strictEqual(read.createIterator().readString(), '女の子.txt') - }) -}) + const read = Pickle.createFromBuffer(write.toBuffer()); + assert.strictEqual(read.createIterator().readString(), '女の子.txt'); + }); +}); diff --git a/test/util/compareDirectories.js b/test/util/compareDirectories.js index 19a98edd..84fa1702 100644 --- a/test/util/compareDirectories.js +++ b/test/util/compareDirectories.js @@ -1,49 +1,60 @@ -'use strict' +'use strict'; -const _ = require('lodash') -const fs = require('../../lib/wrapped-fs') -const path = require('path') -const crawlFilesystem = require('../../lib/crawlfs') +const _ = require('lodash'); +const fs = require('../../lib/wrapped-fs').default; +const path = require('path'); +const crawlFilesystem = require('../../lib/crawlfs').crawl; module.exports = async function (dirA, dirB) { - const [[pathsA, metadataA], [pathsB, metadataB]] = await Promise.all([crawlFilesystem(dirA, null), crawlFilesystem(dirB, null)]) - const relativeA = _.map(pathsA, pathAItem => path.relative(dirA, pathAItem)) - const relativeB = _.map(pathsB, pathBItem => path.relative(dirB, pathBItem)) - const onlyInA = _.difference(relativeA, relativeB) - const onlyInB = _.difference(relativeB, relativeA) - const inBoth = _.intersection(pathsA, pathsB) - const differentFiles = [] - const errorMsgBuilder = [] + const [[pathsA, metadataA], [pathsB, metadataB]] = await Promise.all([ + crawlFilesystem(dirA, null), + crawlFilesystem(dirB, null), + ]); + const relativeA = _.map(pathsA, (pathAItem) => path.relative(dirA, pathAItem)); + const relativeB = _.map(pathsB, (pathBItem) => path.relative(dirB, pathBItem)); + const onlyInA = _.difference(relativeA, relativeB); + const onlyInB = _.difference(relativeB, relativeA); + const inBoth = _.intersection(pathsA, pathsB); + const differentFiles = []; + const errorMsgBuilder = []; for (const filename of inBoth) { - const typeA = metadataA[filename].type - const typeB = metadataB[filename].type + const typeA = metadataA[filename].type; + const typeB = metadataB[filename].type; // skip if both are directories if (typeA === 'directory' && typeB === 'directory') { - continue + continue; } // something is wrong if the types don't match up if (typeA !== typeB) { - differentFiles.push(filename) - continue + differentFiles.push(filename); + continue; } - const [fileContentA, fileContentB] = Promise.all([dirA, dirB].map(dir => fs.readFile(path.join(dir, filename), 'utf8'))) + const [fileContentA, fileContentB] = Promise.all( + [dirA, dirB].map((dir) => fs.readFile(path.join(dir, filename), 'utf8')), + ); if (fileContentA !== fileContentB) { - differentFiles.push(filename) + differentFiles.push(filename); } } if (onlyInA.length) { - errorMsgBuilder.push(`\tEntries only in '${dirA}':`) - for (const file of onlyInA) { errorMsgBuilder.push(`\t ${file}`) } + errorMsgBuilder.push(`\tEntries only in '${dirA}':`); + for (const file of onlyInA) { + errorMsgBuilder.push(`\t ${file}`); + } } if (onlyInB.length) { - errorMsgBuilder.push(`\tEntries only in '${dirB}':`) - for (const file of onlyInB) { errorMsgBuilder.push(`\t ${file}`) } + errorMsgBuilder.push(`\tEntries only in '${dirB}':`); + for (const file of onlyInB) { + errorMsgBuilder.push(`\t ${file}`); + } } if (differentFiles.length) { - errorMsgBuilder.push('\tDifferent file content:') - for (const file of differentFiles) { errorMsgBuilder.push(`\t ${file}`) } + errorMsgBuilder.push('\tDifferent file content:'); + for (const file of differentFiles) { + errorMsgBuilder.push(`\t ${file}`); + } } if (errorMsgBuilder.length) { - throw new Error('\n' + errorMsgBuilder.join('\n')) + throw new Error('\n' + errorMsgBuilder.join('\n')); } -} +}; diff --git a/test/util/compareFileLists.js b/test/util/compareFileLists.js index 7b35eadb..95ea6542 100644 --- a/test/util/compareFileLists.js +++ b/test/util/compareFileLists.js @@ -1,12 +1,12 @@ -'use strict' +'use strict'; -const assert = require('assert') -const os = require('os') +const assert = require('assert'); +const os = require('os'); -module.exports = function compareFileLists (actual, expected) { +module.exports = function compareFileLists(actual, expected) { // on windows replace slashes with backslashes and crlf with lf if (os.platform() === 'win32') { - expected = expected.replace(/\//g, '\\').replace(/\r\n/g, '\n') + expected = expected.replace(/\//g, '\\').replace(/\r\n/g, '\n'); } - assert.strictEqual(actual, expected) -} + assert.strictEqual(actual, expected); +}; diff --git a/test/util/compareFiles.js b/test/util/compareFiles.js index d80d9aae..82bfb2d3 100644 --- a/test/util/compareFiles.js +++ b/test/util/compareFiles.js @@ -1,25 +1,34 @@ -'use strict' +'use strict'; -const assert = require('assert') -const fs = require('../../lib/wrapped-fs') +const assert = require('assert'); +const fs = require('../../lib/wrapped-fs').default; module.exports = async function (actualFilePath, expectedFilePath) { if (process.env.ELECTRON_ASAR_SPEC_UPDATE) { - await fs.writeFile(expectedFilePath, await fs.readFile(actualFilePath)) + await fs.writeFile(expectedFilePath, await fs.readFile(actualFilePath)); } - const [actualFileContent, expectedFileContent] = await Promise.all([fs.readFile(actualFilePath, 'utf8'), fs.readFile(expectedFilePath, 'utf8')]) - assert.strictEqual(actualFileContent, expectedFileContent) + const [actualFileContent, expectedFileContent] = await Promise.all([ + fs.readFile(actualFilePath, 'utf8'), + fs.readFile(expectedFilePath, 'utf8'), + ]); + assert.strictEqual(actualFileContent, expectedFileContent); - const [actualIsSymlink, expectedIsSymlink] = [isSymbolicLinkSync(actualFilePath), isSymbolicLinkSync(expectedFilePath)] - assert.strictEqual(actualIsSymlink, expectedIsSymlink) + const [actualIsSymlink, expectedIsSymlink] = [ + isSymbolicLinkSync(actualFilePath), + isSymbolicLinkSync(expectedFilePath), + ]; + assert.strictEqual(actualIsSymlink, expectedIsSymlink); if (actualIsSymlink && expectedIsSymlink) { - const [actualSymlinkPointer, expectedSymlinkPointer] = [fs.readlinkSync(actualFilePath), fs.readlinkSync(expectedFilePath)] - assert.strictEqual(actualSymlinkPointer, expectedSymlinkPointer) + const [actualSymlinkPointer, expectedSymlinkPointer] = [ + fs.readlinkSync(actualFilePath), + fs.readlinkSync(expectedFilePath), + ]; + assert.strictEqual(actualSymlinkPointer, expectedSymlinkPointer); } -} +}; -function isSymbolicLinkSync (path) { - const stats = fs.lstatSync(path) - return stats.isSymbolicLink() +function isSymbolicLinkSync(path) { + const stats = fs.lstatSync(path); + return stats.isSymbolicLink(); } diff --git a/test/util/transformStream.js b/test/util/transformStream.js index f125b194..03896a2b 100644 --- a/test/util/transformStream.js +++ b/test/util/transformStream.js @@ -1,27 +1,27 @@ -'use strict' -const Transform = require('stream').Transform -const basename = require('path').basename +'use strict'; +const Transform = require('stream').Transform; +const basename = require('path').basename; class Reverser extends Transform { - constructor () { - super() - this._data = '' + constructor() { + super(); + this._data = ''; } - _transform (buf, enc, cb) { - this._data += buf - return cb() + _transform(buf, enc, cb) { + this._data += buf; + return cb(); } - _flush (cb) { - const txt = this._data.toString().split('').reverse().join('') - this.push(txt) - return cb() + _flush(cb) { + const txt = this._data.toString().split('').reverse().join(''); + this.push(txt); + return cb(); } } module.exports = function (filename) { if (basename(filename) === 'file0.txt') { - return new Reverser() + return new Reverser(); } -} +}; diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 00000000..0833e888 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,23 @@ +{ + "compilerOptions": { + "module": "commonjs", + "target": "es2017", + "lib": [ + "es2017" + ], + "sourceMap": true, + "strict": true, + "outDir": "lib", + "types": [ + "node" + ], + "allowSyntheticDefaultImports": true, + "moduleResolution": "node", + "declaration": true, + "noImplicitAny": true, + "strictNullChecks": true + }, + "include": [ + "src" + ] +} diff --git a/yarn.lock b/yarn.lock index 4af55b37..764c9147 100644 --- a/yarn.lock +++ b/yarn.lock @@ -99,6 +99,14 @@ resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.0.tgz#5fb2e536c1ae9bf35366eed879e827fa59ca41c2" integrity sha512-WulqXMDUTYAXCjZnk6JtIHPigp55cVtDgDrO2gHRwhyJto21+1zbVCtOYB2L1F9w4qCQ0rOGWBnBe0FNTiEJIQ== +"@types/glob@^7.1.0": + version "7.2.0" + resolved "https://registry.yarnpkg.com/@types/glob/-/glob-7.2.0.tgz#bc1b5bf3aa92f25bd5dd39f35c57361bdce5b2eb" + integrity sha512-ZUxbzKl0IfJILTS6t7ip5fQQM/J3TJYubDm3nMbgubNNYS62eXeUpoLUC8/7fJNiFYHTrGPQn7hspDUzIHX3UA== + dependencies: + "@types/minimatch" "*" + "@types/node" "*" + "@types/http-cache-semantics@*": version "4.0.1" resolved "https://registry.yarnpkg.com/@types/http-cache-semantics/-/http-cache-semantics-4.0.1.tgz#0ea7b61496902b95890dc4c3a116b60cb8dae812" @@ -116,6 +124,16 @@ dependencies: "@types/node" "*" +"@types/minimatch@*": + version "5.1.2" + resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-5.1.2.tgz#07508b45797cb81ec3f273011b054cd0755eddca" + integrity sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA== + +"@types/minimatch@^3.0.5": + version "3.0.5" + resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.5.tgz#1001cc5e6a3704b83c236027e77f2f58ea010f40" + integrity sha512-Klz949h02Gz2uZCMGwDUSDS1YBlTdDDgbWHi+81l29tQALUtvz4rAYi5uoVhE5Lagoq6DeqAUlbrHvW/mXDgdQ== + "@types/minimist@^1.2.0": version "1.2.2" resolved "https://registry.yarnpkg.com/@types/minimist/-/minimist-1.2.2.tgz#ee771e2ba4b3dc5b372935d549fd9617bf345b8c" @@ -126,6 +144,11 @@ resolved "https://registry.yarnpkg.com/@types/node/-/node-18.11.10.tgz#4c64759f3c2343b7e6c4b9caf761c7a3a05cee34" integrity sha512-juG3RWMBOqcOuXC643OAdSA525V44cVgGV6dUDuiFtss+8Fk5x1hI93Rsld43VeJVIeqlP9I7Fn9/qaVqoEAuQ== +"@types/node@^12.0.0": + version "12.20.55" + resolved "https://registry.yarnpkg.com/@types/node/-/node-12.20.55.tgz#c329cbd434c42164f846b909bd6f85b5537f6240" + integrity sha512-J8xLz7q2OFulZ2cyGTLE1TbbZcjpno7FaN6zdJNrgAdrJ+DZzh/uFR6YrTb4C+nXakvud8Q4+rbhoIWlYQbUFQ== + "@types/node@^16.11.26": version "16.18.4" resolved "https://registry.yarnpkg.com/@types/node/-/node-16.18.4.tgz#712ba61b4caf091fc6490301b1888356638c17bd" @@ -2239,6 +2262,11 @@ prelude-ls@~1.1.2: resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" integrity sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w== +prettier@^3.3.3: + version "3.3.3" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-3.3.3.tgz#30c54fe0be0d8d12e6ae61dbb10109ea00d53105" + integrity sha512-i2tDNA0O5IrMO757lfrdQZCc2jPNDVntV0m/+4whiDfWaTKfMNgR7Qz0NAeGz/nRqF4m5/6CLzbP4/liHt12Ew== + progress@^2.0.0, progress@^2.0.3: version "2.0.3" resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8" @@ -2809,6 +2837,11 @@ type-fest@^0.8.1: resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d" integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA== +typescript@^5.5.4: + version "5.5.4" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.5.4.tgz#d9852d6c82bad2d2eda4fd74a5762a8f5909e9ba" + integrity sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q== + unbox-primitive@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.2.tgz#29032021057d5e6cdbd08c5129c226dff8ed6f9e"