Skip to content

Commit

Permalink
Read only the first 65536 bytes of a file or a chunk (#9)
Browse files Browse the repository at this point in the history
  • Loading branch information
bjornstar authored Jul 6, 2024
1 parent 066350f commit 182915f
Show file tree
Hide file tree
Showing 2 changed files with 40 additions and 14 deletions.
44 changes: 30 additions & 14 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import {includes} from 'uint8array-extras';
// https://en.wikipedia.org/wiki/JPEG
// SOF2 [0xFF, 0xC2] = Start Of Frame (Progressive DCT)
const SOF2 = new Uint8Array([0xFF, 0xC2]);
const MAX_BUFFER = 65_536;

const fromBuffer = buffer => includes(buffer, SOF2);

Expand All @@ -14,47 +15,62 @@ isProgressive.buffer = fromBuffer;

isProgressive.stream = readableStream => new Promise((resolve, reject) => {
// The first byte is for the previous last byte if we have multiple data events.
const buffer = new Uint8Array(1 + readableStream.readableHighWaterMark);
const buffer = new Uint8Array(1 + MAX_BUFFER);
let bytesRead = 0;

const end = () => {
function end() {
resolve(false);
};
}

function cleanup(value) {
resolve(value);
readableStream.removeListener('data', onData);
readableStream.removeListener('end', end);
readableStream.removeListener('error', reject);
}

function onData(data) {
if (bytesRead >= MAX_BUFFER) {
return cleanup(false);
}

readableStream.on('data', data => {
buffer.set(data, 1);
buffer.set(data.subarray(0, MAX_BUFFER), 1);

if (fromBuffer(buffer)) {
resolve(true);
readableStream.removeListener('end', end);
return cleanup(true);
}

bytesRead += data.byteLength;
buffer.set(data.at(-1));
});
}

readableStream.on('error', reject);
readableStream.on('data', onData);
readableStream.on('end', end);
readableStream.on('error', reject);
});

// The metadata section has a maximum size of 65535 bytes
isProgressive.file = async filePath => fromBuffer(await readChunk(filePath, {length: 65_535}));
// The metadata section has a maximum size of 65536 bytes
isProgressive.file = async filePath => fromBuffer(await readChunk(filePath, {length: MAX_BUFFER}));

isProgressive.fileSync = filepath => {
// We read two bytes at a time here as it usually appears early in the file and reading 65535 would be wasteful
// We read two bytes at a time here as it usually appears early in the file and reading 65536 would be wasteful
const BUFFER_LENGTH = 2;
const buffer = new Uint8Array(1 + BUFFER_LENGTH);
const read = fs.openSync(filepath, 'r');
let bytesRead = BUFFER_LENGTH;
let isProgressive = false;
let position = 0;

while (bytesRead !== 0) {
bytesRead = fs.readSync(read, buffer, 1, BUFFER_LENGTH);
while (bytesRead !== 0 && position < MAX_BUFFER) {
bytesRead = fs.readSync(read, buffer, 1, BUFFER_LENGTH, position);

isProgressive = fromBuffer(buffer);

if (isProgressive) {
break;
}

position += bytesRead;
buffer.set(buffer.at(-1), 0);
}

Expand Down
10 changes: 10 additions & 0 deletions test.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import fs from 'node:fs';
import {Readable} from 'node:stream';
import test from 'ava';
import {readChunkSync} from 'read-chunk';
import isProgressive from './index.js';
Expand All @@ -21,6 +22,15 @@ test('.stream()', async t => {
t.true(await isProgressive.stream(fs.createReadStream(getPath('kitten-progressive'))));
});

// Discovered in the tests for is-progressive-cli
test('.stream() - the whole file', async t => {
t.true(await isProgressive.stream(Readable.from(fs.readFileSync(getPath('progressive')))));
t.true(await isProgressive.stream(Readable.from(fs.readFileSync(getPath('curious-exif')))));
t.false(await isProgressive.stream(Readable.from(fs.readFileSync(getPath('baseline')))));
t.false(await isProgressive.stream(Readable.from(fs.readFileSync(getPath('kitten')))));
t.true(await isProgressive.stream(Readable.from(fs.readFileSync(getPath('kitten-progressive')))));
});

test('.file()', async t => {
t.true(await isProgressive.file(getPath('progressive')));
t.true(await isProgressive.file(getPath('curious-exif')));
Expand Down

0 comments on commit 182915f

Please sign in to comment.