Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix arrow-parens eslint errors #261

Merged
merged 5 commits into from
Mar 19, 2018
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .eslint-ratchet-high-water-mark
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1874
1675
Original file line number Diff line number Diff line change
@@ -3,6 +3,6 @@ const test = require('ava');
const filter = require('../index');

// Nothing really to test here - just a placeholder for future
test('Dummy test', t => {
test('Dummy test', (t) => {
t.is(1, 1);
});
2 changes: 1 addition & 1 deletion cumulus/tasks/delete-pdr-ftp/test/delete-pdr-ftp-spec.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
'use strict';
const test = require('ava');

test('TODO - add test', t => {
test('TODO - add test', (t) => {
t.is(1, 1);
});
2 changes: 1 addition & 1 deletion cumulus/tasks/discover-cmr-granules/index.js
Original file line number Diff line number Diff line change
@@ -62,7 +62,7 @@ module.exports = class DiscoverCmrGranulesTask extends Task {
const filtered = this.excludeFiltered(messages, this.config.filtered_granule_keys);

// Write the messages to a DynamoDB table so we can track ingest failures
const messagePromises = filtered.map(msg => {
const messagePromises = filtered.map((msg) => {
const { granuleId, version, collection } = msg.meta;
const params = {
TableName: this.config.ingest_tracking_table,
4 changes: 2 additions & 2 deletions cumulus/tasks/discover-http-tiles/index.js
Original file line number Diff line number Diff line change
@@ -48,12 +48,12 @@ module.exports = class DiscoverHttpTilesTask extends Task {
if (granuleFilter) {
if (granuleFilter.filtered_granule_keys) {
const keySet = new Set(granuleFilter.filtered_granule_keys);
filterFn = msg => keySet.has(msg.meta.key);
filterFn = (msg) => keySet.has(msg.meta.key);
}
else if (granuleFilter.filtered_granule_key_start) {
const start = granuleFilter.filtered_granule_key_start;
const end = granuleFilter.filtered_granule_key_end;
filterFn = msg => msg.meta.key >= start && msg.meta.key <= end;
filterFn = (msg) => msg.meta.key >= start && msg.meta.key <= end;
}
}
return messages.filter(filterFn);
2 changes: 1 addition & 1 deletion cumulus/tasks/discover-pdr/index.js
Original file line number Diff line number Diff line change
@@ -53,7 +53,7 @@ module.exports = class DiscoverPdr extends Task {
// Get the list of PDRs
const pdrList = await pdrMod.getPdrList(client, folder, bucket, keyPrefix);

const S3UploadPromises = pdrList.map(async pdrEntry => {
const S3UploadPromises = pdrList.map(async (pdrEntry) => {
const fileName = pdrEntry.name;
log.info(`FILE: ${fileName}`);
// Get the file contents
2 changes: 1 addition & 1 deletion cumulus/tasks/discover-pdr/pdr.js
Original file line number Diff line number Diff line change
@@ -17,7 +17,7 @@ exports.getPdrList = async (client, folder, bucket, keyPrefix) => {
const pdrs = await listSync(folder);

// Check to see which files we already have in S3
const fileExistsPromises = pdrs.map(async pdr => {
const fileExistsPromises = pdrs.map(async (pdr) => {
const fileName = pdr.name;
return S3.fileExists(bucket, `${keyPrefix}/${fileName}`);
});
2 changes: 1 addition & 1 deletion cumulus/tasks/discover-pdr/test/discover-pdr-spec.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
'use strict';
const test = require('ava');

test('TODO - add test', t => {
test('TODO - add test', (t) => {
t.is(1, 1);
});
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
'use strict';
const test = require('ava');

test('TODO - add test', t => {
test('TODO - add test', (t) => {
t.is(1, 1);
});
2 changes: 1 addition & 1 deletion cumulus/tasks/filter-payload/test/filter-payload-spec.js
Original file line number Diff line number Diff line change
@@ -3,6 +3,6 @@ const test = require('ava');
const filter = require('../index');

// Nothing really to test here - just a placeholder for future
test('Dummy test', t => {
test('Dummy test', (t) => {
t.is(1, 1);
});
2 changes: 1 addition & 1 deletion cumulus/tasks/generate-pan/pan.js
Original file line number Diff line number Diff line change
@@ -22,7 +22,7 @@ exports.generatePan = (files, timeStamp) => {

pan += `NO_OF_FILES = ${files.length};\n`;

files.forEach(file => {
files.forEach((file) => {
const fileName = file.source.url.substring(file.source.url.lastIndexOf('/') + 1);
const filePath = file.source.url.substring(file.source.url.lastIndexOf(':') + 3);
const fileDirectory = path.dirname(filePath);
10 changes: 5 additions & 5 deletions cumulus/tasks/generate-pan/test/generate-pan-spec.js
Original file line number Diff line number Diff line change
@@ -11,15 +11,15 @@ const shortPan = (dateTime) =>
DISPOSITION = "SUCCESSFUL";
TIME_STAMP = ${timeStamp(dateTime)};`;

test('generates a short PAN if all files succeed', t => {
test('generates a short PAN if all files succeed', (t) => {
const input = allSuccessFixture.input;
const now = new Date();
const timeStampStr = timeStamp(now);
const result = pan.generatePan(input, timeStampStr);
t.is(result, shortPan(now));
});

test('generates a long pan with an entry for the number of files (NO_OF_FILES)', t => {
test('generates a long pan with an entry for the number of files (NO_OF_FILES)', (t) => {
const input = missingFileFixture.input;
const now = new Date();
const timeStampStr = timeStamp(now);
@@ -28,7 +28,7 @@ test('generates a long pan with an entry for the number of files (NO_OF_FILES)',
t.is(parseInt(numFilesEntry, 10), input.length);
});

test('generates a disposition message for each file in a long PAN', t => {
test('generates a disposition message for each file in a long PAN', (t) => {
const input = missingFileFixture.input;
const now = new Date();
const timeStampStr = timeStamp(now);
@@ -37,7 +37,7 @@ test('generates a disposition message for each file in a long PAN', t => {
t.is(dispositions.length, 2);
});

test('generates a timestamp for each file entry', t => {
test('generates a timestamp for each file entry', (t) => {
const input = missingFileFixture.input;
const now = new Date();
const timeStampStr = timeStamp(now);
@@ -48,7 +48,7 @@ test('generates a timestamp for each file entry', t => {
t.is(timeStampCount, input.length);
});

test('generates an error message for each missing file', t => {
test('generates an error message for each missing file', (t) => {
const input = missingFileFixture.input;
const now = new Date();
const timeStampStr = timeStamp(now);
2 changes: 1 addition & 1 deletion cumulus/tasks/generate-pdr-file-list/pdr.js
Original file line number Diff line number Diff line change
@@ -11,7 +11,7 @@ const fileSpecFields =
* @return {PVLRoot} An object representing a PDR
* @throws {Error} Throws an Error if parsing fails
*/
exports.parsePdr = pdr => pvl.pvlToJS(pdr);
exports.parsePdr = (pdr) => pvl.pvlToJS(pdr);

/**
* Convert a PVL FILE_SPEC entry into an object with enough information to download the
Original file line number Diff line number Diff line change
@@ -4,12 +4,12 @@ const pdr = require('../pdr');

const goodFileFixture = require('./fixtures/good-pdr-fixture');

test('pdrToFileList() - generates an entry for each file', t => {
test('pdrToFileList() - generates an entry for each file', (t) => {
const files = pdr.pdrToFileList(goodFileFixture.input, 'localhost', 21);
t.is(files.length, 3);
});

test('fileSpecToFileEntry() - generates proper fields', t => {
test('fileSpecToFileEntry() - generates proper fields', (t) => {
const pdrObj = pdr.parsePdr(goodFileFixture.input);
const fileGroups = pdrObj.objects('FILE_GROUP');
const host = 'localhost';
2 changes: 1 addition & 1 deletion cumulus/tasks/generate-pdrd/pdrd.js
Original file line number Diff line number Diff line change
@@ -12,7 +12,7 @@ ${topLevelErrors[0]}`;
pdrd = 'MESSAGE_TYPE = LONGPDRD;\n';
pdrd += `NO_FILE_GRPS = ${fileGroupErrors.length}\n`;

fileGroupErrors.forEach(errors => {
fileGroupErrors.forEach((errors) => {
if (errors.length > 0) {
pdrd += errors[0];
}
8 changes: 4 additions & 4 deletions cumulus/tasks/generate-pdrd/test/generate-pdrd-spec.js
Original file line number Diff line number Diff line change
@@ -7,7 +7,7 @@ const badFileEntryFixture = require('./fixtures/bad-file-entry-fixture');
const invalidPvlFixture = require('./fixtures/invalid-pvl-fixture');
const missingFieldsFixture = require('./fixtures/missing-fields-fixture');

test('generatePdrd() - invalid PVL gets a short PDRD', t => {
test('generatePdrd() - invalid PVL gets a short PDRD', (t) => {
const pdrdStr = pdrd.generatePdrd(
invalidPvlFixture.topLevelErrors,
invalidPvlFixture.fileGroupErrors
@@ -20,7 +20,7 @@ test('generatePdrd() - invalid PVL gets a short PDRD', t => {
t.is(errMsg, 'INVALID PVL STATEMENT');
});

test('generatePdrd() - missing TOTAL_FILE_COUNT gets a short PDRD', t => {
test('generatePdrd() - missing TOTAL_FILE_COUNT gets a short PDRD', (t) => {
const pdrdStr = pdrd.generatePdrd(
missingFieldsFixture.invalidFileCount.input.topLevelErrors,
missingFieldsFixture.invalidFileCount.input.fileGroupErrors
@@ -52,7 +52,7 @@ const testMacro = (t, fixture) => {
t.is(errMsg, fixture.error);
};

test('generatePdrd() - missing file fields gets a long PDRD', t => {
badFileEntryFixture.fixtures.forEach(fixture => testMacro(t, fixture));
test('generatePdrd() - missing file fields gets a long PDRD', (t) => {
badFileEntryFixture.fixtures.forEach((fixture) => testMacro(t, fixture));
});

Original file line number Diff line number Diff line change
@@ -3,7 +3,7 @@ const test = require('ava');
const helpers = require('@cumulus/common/test-helpers');
const TriggerProcessPdrs = require('../index');

test('trigger process PDRs', async t => {
test('trigger process PDRs', async (t) => {
const payload = [
{ s3_key: '123' },
{ s3_key: 'ABC' }
4 changes: 2 additions & 2 deletions cumulus/tasks/validate-archives/archive-validations.js
Original file line number Diff line number Diff line change
@@ -39,14 +39,14 @@ exports.validateArchiveContents = (archiveDirPath) => {
// out here.
const unarchivedFiles = fs
.readdirSync(archiveDirPath)
.filter(fileName => !fileName.startsWith('._'));
.filter((fileName) => !fileName.startsWith('._'));

log.debug(`UNARCHIVED FILES: ${JSON.stringify(unarchivedFiles)}`);

let hasImage = false;
let hasWorldFile = false;
let hasMetadata = false;
unarchivedFiles.forEach(filePath => {
unarchivedFiles.forEach((filePath) => {
log.debug(filePath);
const ext = path.extname(filePath).toUpperCase();
if (ext === '.JPG' || ext === '.PNG') hasImage = true;
14 changes: 7 additions & 7 deletions cumulus/tasks/validate-archives/index.js
Original file line number Diff line number Diff line change
@@ -20,7 +20,7 @@ const decompress = promisify(tarGz.decompress);
* @param {string} archiveFilePath
* @return {string} The un-archive directory
*/
const archiveDir = archiveFilePath => {
const archiveDir = (archiveFilePath) => {
// archive files must be .tgz or .tar.gz files
const segments = archiveFilePath.match(/(.*?)(\.tar\.gz|\.tgz)/i);
return segments[1];
@@ -33,7 +33,7 @@ const archiveDir = archiveFilePath => {
* @param {Object} fileAttrs An object that contains attributes about the archive file
*/
const uploadArchiveFilesToS3 = async (unarchivedFiles, archiveDirPath, fileAttrs) => {
const fullFilePaths = unarchivedFiles.map(fileName => path.join(archiveDirPath, fileName));
const fullFilePaths = unarchivedFiles.map((fileName) => path.join(archiveDirPath, fileName));
const s3DirKey = archiveDir(fileAttrs.target.key);
return aws.uploadS3Files(fullFilePaths, fileAttrs.target.bucket, s3DirKey);
};
@@ -59,7 +59,7 @@ const extractArchive = async (tmpDir, archiveFilePath) => {
* @param {string} archiveDirPath The path where the files were extracted
*/
const deleteExpandedFiles = async (unarchivedFiles, archiveDirPath) => {
unarchivedFiles.forEach(fileName => {
unarchivedFiles.forEach((fileName) => {
const fullPath = path.join(archiveDirPath, fileName);
fs.unlinkSync(fullPath);
});
@@ -88,8 +88,8 @@ module.exports = class ValidateArchives extends Task {

// Only files that were successfully downloaded by the provider gateway will be processed
const archiveFiles = files
.filter(file => file.success)
.map(file => [file.target.bucket, file.target.key]);
.filter((file) => file.success)
.map((file) => [file.target.bucket, file.target.key]);

const downloadRequest = archiveFiles.map(([s3Bucket, s3Key]) => ({
Bucket: s3Bucket,
@@ -107,7 +107,7 @@ module.exports = class ValidateArchives extends Task {

// Compute the dispositions (status) for each file downloaded successfully by
// the provider gateway
const dispositionPromises = files.map(async fileAttrs => {
const dispositionPromises = files.map(async (fileAttrs) => {
// Only process archives that were downloaded successfully by the provider gateway
if (fileAttrs.success) {
const archiveFileName = path.basename(fileAttrs.target.key);
@@ -152,7 +152,7 @@ module.exports = class ValidateArchives extends Task {
log.info('S3 FILES:');
log.info(JSON.stringify(s3Files));

const imgFiles = s3Files.map(s3File => ({ Bucket: s3File.bucket, Key: s3File.key }));
const imgFiles = s3Files.map((s3File) => ({ Bucket: s3File.bucket, Key: s3File.key }));

if (imgFiles.length > 0) {
imageSources.push({ archive: archiveFileName, images: imgFiles });
2 changes: 1 addition & 1 deletion cumulus/tasks/validate-pdr/index.js
Original file line number Diff line number Diff line change
@@ -14,7 +14,7 @@ function fetchPdr(bucket, key) {
}

function isPdrValid(topLevelErrors, fileGroupErrors) {
return topLevelErrors.length > 0 || fileGroupErrors.some(errors => errors.length > 0);
return topLevelErrors.length > 0 || fileGroupErrors.some((errors) => errors.length > 0);
}

async function handler(event, context, callback) {
Loading