diff --git a/package-lock.json b/package-lock.json index 4c1136a..a7c3ed8 100644 --- a/package-lock.json +++ b/package-lock.json @@ -22,7 +22,7 @@ "valid-url": "1.0.9" }, "devDependencies": { - "@sasjs/core": "3.10.0", + "@sasjs/core": "4.41.0", "@types/cli-table": "0.3.0", "@types/find": "0.2.1", "@types/jest": "27.4.0", @@ -1006,15 +1006,76 @@ "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/@sasjs/core": { - "version": "3.10.0", - "resolved": "https://registry.npmjs.org/@sasjs/core/-/core-3.10.0.tgz", - "integrity": "sha512-lgLxDYpIvwSrXFaUaTFCR0KXHQEc5QIOL4DU87TvBHEUUAWNQHzuVQWkavLtW5hbvLGnPXnyvspzoSzmBojXzg==", + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz", + "integrity": "sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==", + "dev": true, + "peer": true, + "dependencies": { + "@jridgewell/set-array": "^1.0.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.9" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz", + "integrity": "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==", + "dev": true, + "peer": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/set-array": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz", + "integrity": "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==", + "dev": true, + "peer": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/source-map": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.2.tgz", + "integrity": "sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw==", + "dev": true, + "peer": true, + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.0", + "@jridgewell/trace-mapping": "^0.3.9" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.4.14", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz", + "integrity": "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==", + "dev": true, + "peer": true + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.17", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.17.tgz", + "integrity": "sha512-MCNzAp77qzKca9+W/+I0+sEpaUnZoeasnghNeVc41VZCEKaCH73Vq3BZZ/SzWIgrqE4H4ceI+p+b6C0mHf9T4g==", "dev": true, + "peer": true, "dependencies": { - "ts-loader": "^9.2.6" + "@jridgewell/resolve-uri": "3.1.0", + "@jridgewell/sourcemap-codec": "1.4.14" } }, + "node_modules/@sasjs/core": { + "version": "4.41.0", + "resolved": "https://registry.npmjs.org/@sasjs/core/-/core-4.41.0.tgz", + "integrity": "sha512-Cw9doOvCk363j9mDhp6uZMngZLHYnMcYTBBR9lJzAz9RgDYSosz0vbL0zV7TztVJv+38YPAfwZDHPYcZtjjywQ==", + "dev": true + }, "node_modules/@sinonjs/commons": { "version": "1.8.3", "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.8.3.tgz", @@ -4113,9 +4174,9 @@ } }, "node_modules/minimatch": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "dependencies": { "brace-expansion": "^1.1.7" }, @@ -4762,14 +4823,15 @@ } }, "node_modules/terser": { - "version": "5.10.0", - "resolved": "https://registry.npmjs.org/terser/-/terser-5.10.0.tgz", - "integrity": "sha512-AMmF99DMfEDiRJfxfY5jj5wNH/bYO09cniSqhfoyxc8sFoYIgkJy86G04UoZU5VjlpnplVu0K6Tx6E9b5+DlHA==", + "version": "5.15.1", + "resolved": "https://registry.npmjs.org/terser/-/terser-5.15.1.tgz", + "integrity": "sha512-K1faMUvpm/FBxjBXud0LWVAGxmvoPbZbfTCYbSgaaYQaIXI3/TdI7a7ZGA73Zrou6Q8Zmz3oeUTsp/dj+ag2Xw==", "dev": true, "peer": true, "dependencies": { + "@jridgewell/source-map": "^0.3.2", + "acorn": "^8.5.0", "commander": "^2.20.0", - "source-map": "~0.7.2", "source-map-support": "~0.5.20" }, "bin": { @@ -4777,14 +4839,6 @@ }, "engines": { "node": ">=10" - }, - "peerDependencies": { - "acorn": "^8.5.0" - }, - "peerDependenciesMeta": { - "acorn": { - "optional": true - } } }, "node_modules/terser-webpack-plugin": { @@ -4832,16 +4886,6 @@ "node": ">=0.10.0" } }, - "node_modules/terser/node_modules/source-map": { - "version": "0.7.3", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz", - "integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==", - "dev": true, - "peer": true, - "engines": { - "node": ">= 8" - } - }, "node_modules/test-exclude": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", @@ -6156,15 +6200,67 @@ } } }, - "@sasjs/core": { - "version": "3.10.0", - "resolved": "https://registry.npmjs.org/@sasjs/core/-/core-3.10.0.tgz", - "integrity": "sha512-lgLxDYpIvwSrXFaUaTFCR0KXHQEc5QIOL4DU87TvBHEUUAWNQHzuVQWkavLtW5hbvLGnPXnyvspzoSzmBojXzg==", + "@jridgewell/gen-mapping": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz", + "integrity": "sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A==", + "dev": true, + "peer": true, + "requires": { + "@jridgewell/set-array": "^1.0.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.9" + } + }, + "@jridgewell/resolve-uri": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz", + "integrity": "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==", + "dev": true, + "peer": true + }, + "@jridgewell/set-array": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz", + "integrity": "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw==", + "dev": true, + "peer": true + }, + "@jridgewell/source-map": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.2.tgz", + "integrity": "sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw==", + "dev": true, + "peer": true, + "requires": { + "@jridgewell/gen-mapping": "^0.3.0", + "@jridgewell/trace-mapping": "^0.3.9" + } + }, + "@jridgewell/sourcemap-codec": { + "version": "1.4.14", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz", + "integrity": "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==", "dev": true, + "peer": true + }, + "@jridgewell/trace-mapping": { + "version": "0.3.17", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.17.tgz", + "integrity": "sha512-MCNzAp77qzKca9+W/+I0+sEpaUnZoeasnghNeVc41VZCEKaCH73Vq3BZZ/SzWIgrqE4H4ceI+p+b6C0mHf9T4g==", + "dev": true, + "peer": true, "requires": { - "ts-loader": "^9.2.6" + "@jridgewell/resolve-uri": "3.1.0", + "@jridgewell/sourcemap-codec": "1.4.14" } }, + "@sasjs/core": { + "version": "4.41.0", + "resolved": "https://registry.npmjs.org/@sasjs/core/-/core-4.41.0.tgz", + "integrity": "sha512-Cw9doOvCk363j9mDhp6uZMngZLHYnMcYTBBR9lJzAz9RgDYSosz0vbL0zV7TztVJv+38YPAfwZDHPYcZtjjywQ==", + "dev": true + }, "@sinonjs/commons": { "version": "1.8.3", "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.8.3.tgz", @@ -8636,9 +8732,9 @@ "dev": true }, "minimatch": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", - "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "requires": { "brace-expansion": "^1.1.7" } @@ -9129,24 +9225,16 @@ } }, "terser": { - "version": "5.10.0", - "resolved": "https://registry.npmjs.org/terser/-/terser-5.10.0.tgz", - "integrity": "sha512-AMmF99DMfEDiRJfxfY5jj5wNH/bYO09cniSqhfoyxc8sFoYIgkJy86G04UoZU5VjlpnplVu0K6Tx6E9b5+DlHA==", + "version": "5.15.1", + "resolved": "https://registry.npmjs.org/terser/-/terser-5.15.1.tgz", + "integrity": "sha512-K1faMUvpm/FBxjBXud0LWVAGxmvoPbZbfTCYbSgaaYQaIXI3/TdI7a7ZGA73Zrou6Q8Zmz3oeUTsp/dj+ag2Xw==", "dev": true, "peer": true, "requires": { + "@jridgewell/source-map": "^0.3.2", + "acorn": "^8.5.0", "commander": "^2.20.0", - "source-map": "~0.7.2", "source-map-support": "~0.5.20" - }, - "dependencies": { - "source-map": { - "version": "0.7.3", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz", - "integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==", - "dev": true, - "peer": true - } } }, "terser-webpack-plugin": { diff --git a/package.json b/package.json index 82dd399..92c1c75 100644 --- a/package.json +++ b/package.json @@ -34,7 +34,7 @@ }, "homepage": "https://github.com/sasjs/utils#readme", "devDependencies": { - "@sasjs/core": "3.10.0", + "@sasjs/core": "4.41.0", "@types/cli-table": "0.3.0", "@types/find": "0.2.1", "@types/jest": "27.4.0", diff --git a/src/file/spec/getAbsolutePath.spec.ts b/src/file/spec/getAbsolutePath.spec.ts new file mode 100644 index 0000000..cb614b5 --- /dev/null +++ b/src/file/spec/getAbsolutePath.spec.ts @@ -0,0 +1,14 @@ +import { getAbsolutePath } from '../getAbsolutePath' +import path from 'path' + +describe('getAbsolutePath', () => { + it('should return absolute path in normalized form', () => { + expect(getAbsolutePath('~/..', '')).toBeTruthy() + }) + + it('should return joined path when provided path is not absolute path', () => { + expect(getAbsolutePath('utils', 'sasjs')).toEqual( + path.join('sasjs', 'utils') + ) + }) +}) diff --git a/src/fs/generateCompileProgram.ts b/src/fs/generateCompileProgram.ts new file mode 100644 index 0000000..9c46030 --- /dev/null +++ b/src/fs/generateCompileProgram.ts @@ -0,0 +1,57 @@ +import path from 'path' +import { isFolder, listFilesInFolder, listSubFoldersInFolder } from '../file' +import { + getInitialCode, + getCompiledMacrosCode, + generateCodeForFileCreation +} from './internal/helper' + +export const generateCompileProgram = async (folderPath: string) => { + const compiledMacrosCode = await getCompiledMacrosCode(['mf_mkdir.sas']) + + const initialProgramContent = getInitialCode() + + const folderCreationCode = await fileAndDirectoryCreationCode(folderPath) + + return compiledMacrosCode + initialProgramContent + folderCreationCode +} + +const fileAndDirectoryCreationCode = async ( + resourcePath: string, + pathRelativeTo: string = resourcePath, + resultCode: string = '' +) => { + if (!(await isFolder(resourcePath))) { + resultCode += await generateCodeForFileCreation( + resourcePath, + pathRelativeTo + ) + return resultCode + } + + const files = await listFilesInFolder(resourcePath) + for (const file of files) { + resultCode = await fileAndDirectoryCreationCode( + path.join(resourcePath, file), + pathRelativeTo, + resultCode + ) + } + + const subFolders = await listSubFoldersInFolder(resourcePath) + for (const folder of subFolders) { + const folderPath = path.join(resourcePath, folder) + + resultCode = `${resultCode} +%mf_mkdir(&fsTarget${folderPath.replace(pathRelativeTo, '')}) +` + + resultCode = await fileAndDirectoryCreationCode( + folderPath, + pathRelativeTo, + resultCode + ) + } + + return resultCode +} diff --git a/src/fs/hash.ts b/src/fs/hash.ts new file mode 100644 index 0000000..3277c54 --- /dev/null +++ b/src/fs/hash.ts @@ -0,0 +1,136 @@ +import fs from 'fs-extra' +import path from 'path' +import { createHash } from 'crypto' +import { HashedFolder } from '../types' +import { + isFolder, + getRelativePath, + listFilesAndSubFoldersInFolder +} from '../file' +/** + * Hashes each file in each directory, and then hashes the hashes to create a hash for each directory also. + * + * Whilst files are hashed in their entirety, the logic for creating a folder hash is as follows: + * + * Sort the files and subfolders by name (case sensitive, uppercase then lower) + * Take the first 100 hashes, concatenate and hash + * Concatenate this hash with another 100 hashes and hash again + * Continue until the end of the folder. This is the folder hash + * If a folder contains other folders, start from the bottom of the tree - the folder hashes cascade upwards so you know immediately if there is a change in a sub/sub directory + * If the folder has no content (empty) then it is ignored. No hash created. + * + * @param {string} folderPath - absolute folder path + */ + +export const getHash = async (folderPath: string): Promise => { + return await hashFolder(folderPath) +} + +/** + * It returns a hashed folder tree that contains the local directory resources that are not synced with remote + */ +export function compareHashes( + localHash: HashedFolder, + remoteHashMap: { [key: string]: string } +) { + return hashDifference(localHash, remoteHashMap) +} + +const hashDifference = ( + localHash: HashedFolder, + remoteHashMap: { [key: string]: string }, + hashedDiff: HashedFolder = { + hash: localHash.hash, + absolutePath: localHash.absolutePath, + relativePath: localHash.relativePath, + isFile: false, + members: [] + } +) => { + for (const member of localHash.members) { + if (remoteHashMap[member.relativePath] !== member.hash) { + if (member.isFile) { + hashedDiff.members.push(member) + } else { + const diff = hashDifference(member as HashedFolder, remoteHashMap, { + hash: member.hash, + absolutePath: member.absolutePath, + relativePath: member.relativePath, + isFile: false, + members: [] + }) + hashedDiff.members.push(diff) + } + } + } + + return hashedDiff +} + +const hashFile = async (filePath: string) => { + const fileContent = fs.readFileSync(filePath) + + if (fileContent.length) { + const hash = createHash('md5') + hash.update(fileContent) + return hash.digest('hex').toUpperCase() + } +} + +const hashFolder = async ( + folderPath: string, + pathRelativeTo: string = folderPath, + hashedFolder: HashedFolder = { + hash: '', + absolutePath: folderPath, + relativePath: getRelativePath(pathRelativeTo, folderPath), + isFile: false, + members: [] + } +) => { + const filesAndFolders = await listFilesAndSubFoldersInFolder( + folderPath, + false + ) + filesAndFolders.sort() + + let concatenatedHash = '' + + const chunkSize = 100 + for (let i = 0; i < filesAndFolders.length; i += chunkSize) { + // Take the first 100 hashes, concatenate and hash + const resources = filesAndFolders.slice(i, i + chunkSize) + for (const resource of resources) { + const resourcePath = path.join(folderPath, resource) + if (await isFolder(resourcePath)) { + const hashedSubFolder = await hashFolder(resourcePath, pathRelativeTo) + hashedFolder.members.push(hashedSubFolder) + + concatenatedHash += hashedSubFolder.hash + } else { + const hash = await hashFile(resourcePath) + + if (!hash) continue + + hashedFolder.members.push({ + hash, + absolutePath: resourcePath, + relativePath: getRelativePath(pathRelativeTo, resourcePath), + isFile: true + }) + + concatenatedHash += hash + } + } + + if (concatenatedHash !== '') { + const hash = createHash('md5') + hash.update(concatenatedHash) + concatenatedHash = hash.digest('hex').toUpperCase() + } + } + + hashedFolder.hash = concatenatedHash + + return hashedFolder +} diff --git a/src/fs/index.ts b/src/fs/index.ts new file mode 100644 index 0000000..0bee460 --- /dev/null +++ b/src/fs/index.ts @@ -0,0 +1,7 @@ +export { getHash, compareHashes } from './hash' +export { generateCompileProgram } from './generateCompileProgram' +export { + generateProgramToGetRemoteHash, + generateProgramToSyncHashDiff, + findResourcesNotPresentLocally +} from './sync' diff --git a/src/fs/internal/helper.spec.ts b/src/fs/internal/helper.spec.ts new file mode 100644 index 0000000..66a8a12 --- /dev/null +++ b/src/fs/internal/helper.spec.ts @@ -0,0 +1,39 @@ +import * as ChunkModule from '../../utils/chunk' +import * as GetNodeModulePath from '../../utils/getNodeModulePath' +import { chunkFileContent, getCompiledMacrosCode } from './helper' + +describe('chunkFileContent', () => { + it('should return single line if fileContent does not exceed maxLength', () => { + const result = chunkFileContent('It is a single line content') + const expected = ` put 'It is a single line content';\n` + expect(result).toEqual(expected) + }) + + it('should return multiline if fileContent exceeds maxLength', () => { + jest + .spyOn(ChunkModule, 'chunk') + .mockImplementationOnce((text: string) => text.split(' ')) + + const result = chunkFileContent('It is a multi line content') + const expected = ` put 'It'@; + put 'is'@; + put 'a'@; + put 'multi'@; + put 'line'@; + put 'content'; +` + expect(result).toEqual(expected) + }) +}) + +describe('getCompiledMacrosCode', () => { + it('should throw error when @sasjs/core module is not found', async () => { + jest + .spyOn(GetNodeModulePath, 'getNodeModulePath') + .mockImplementationOnce(() => Promise.resolve('')) + + await expect(getCompiledMacrosCode([] as string[])).rejects.toThrowError( + '@sasjs/core could not be found' + ) + }) +}) diff --git a/src/fs/internal/helper.ts b/src/fs/internal/helper.ts new file mode 100644 index 0000000..4ee2b7a --- /dev/null +++ b/src/fs/internal/helper.ts @@ -0,0 +1,87 @@ +import path from 'path' +import { getNodeModulePath } from '../../utils/getNodeModulePath' +import { chunk } from '../../utils/chunk' +import { readFile, base64EncodeFile } from '../../file' + +export const generateCodeForFileCreation = async ( + filePath: string, + pathRelativeTo: string +) => { + const base64EncodedFileContent = await base64EncodeFile(filePath) + const chunkedFileContent = chunkFileContent(base64EncodedFileContent) + return ` +filename _in64 temp lrecl=99999999; +data _null_; +file _in64; +${chunkedFileContent} +run; + +filename _out64 "&fsTarget${filePath.replace(pathRelativeTo, '')}"; + +/* convert from base64 */ +data _null_; +length filein 8 fileout 8; +filein = fopen("_in64",'I',4,'B'); +fileout = fopen("_out64",'O',3,'B'); +char= '20'x; +do while(fread(filein)=0); + length raw $4 ; + do i=1 to 4; + rc=fget(filein,char,1); + substr(raw,i,1)=char; + end; + rc = fput(fileout, input(raw,$base64X4.)); + rc =fwrite(fileout); +end; +rc = fclose(filein); +rc = fclose(fileout); +run; + +filename _in64 clear; +filename _out64 clear; +` +} + +export const chunkFileContent = (fileContent: string) => { + const chunkedLines = chunk(fileContent) + + if (chunkedLines.length === 1) { + return ` put '${chunkedLines[0].split("'").join("''")}';\n` + } + + let combinedLines = '' + + chunkedLines.forEach((chunkedLine, index) => { + const text = ` put '${chunkedLine.split("'").join("''")}'${ + index !== chunkedLines.length - 1 ? '@;\n' : ';\n' + }` + + combinedLines += text + }) + + return combinedLines +} + +export const getInitialCode = () => `%global fsTarget; +%let compiled_fsTarget=%sysfunc(pathname(work)); +%let fsTarget=%sysfunc(coalescec(&fsTarget,&compiled_fsTarget)); +options nobomfile; + +%mf_mkdir(&fsTarget) +` + +export const getCompiledMacrosCode = async (macros: string[]) => { + const sasjsCorePath = await getNodeModulePath('@sasjs/core') + if (!sasjsCorePath) throw new Error('@sasjs/core could not be found') + + let compiledCode = '' + + for (const macro of macros) { + const macroPath = path.join(sasjsCorePath, 'base', macro) + const macroContent = await readFile(macroPath) + + compiledCode += macroContent + '\n' + } + + return compiledCode +} diff --git a/src/fs/spec/createFSCompileProgram.spec.ts b/src/fs/spec/createFSCompileProgram.spec.ts new file mode 100644 index 0000000..12b1486 --- /dev/null +++ b/src/fs/spec/createFSCompileProgram.spec.ts @@ -0,0 +1,29 @@ +import path from 'path' +import { generateCompileProgram } from '../generateCompileProgram' +import { createFile, createFolder, deleteFolder, readFile } from '../../file' + +describe('createFSCompileProgram', () => { + const timestamp = new Date().valueOf() + const folderName = `test-create-folder-${timestamp}` + const folderPath = path.join(__dirname, folderName) + const subFolderPath = path.join(folderPath, 'subFolder') + const filePath = path.join(subFolderPath, 'file.txt') + + beforeAll(async () => { + await createFolder(folderPath) + await createFolder(subFolderPath) + await createFile(filePath, 'this is dummy file content') + }) + + afterAll(async () => { + await deleteFolder(folderPath) + }) + + it('should return a sas program ', async () => { + const program = await generateCompileProgram(folderPath) + + expect(program).toContain('%macro mf_mkdir') + expect(program).toContain('%mf_mkdir(&fsTarget)') + expect(program).toContain(`%mf_mkdir(&fsTarget${path.sep}subFolder)`) + }) +}) diff --git a/src/fs/spec/hash.spec.ts b/src/fs/spec/hash.spec.ts new file mode 100644 index 0000000..20f7c03 --- /dev/null +++ b/src/fs/spec/hash.spec.ts @@ -0,0 +1,19 @@ +import path from 'path' +import { getHash, compareHashes } from '../hash' + +describe('getHash', () => { + it('should return the hash of provided directory', async () => { + const hashedFolder = await getHash(path.join(__dirname, 'hashFolder')) + const expectedHash = '74FFEA8EA05C42341754D6A4B01E90E5' + const receivedHash = hashedFolder.hash + expect(receivedHash).toEqual(expectedHash) + }) +}) + +describe('compareHashes', () => { + it('should return hashed folder tree with nodes either not existing in remoteHashMap or their hashes mismatch', async () => { + const hashedFolder = await getHash(path.join(__dirname, 'hashFolder')) + const hashedDiff = compareHashes(hashedFolder, {}) + expect(hashedDiff).toEqual(hashedFolder) + }) +}) diff --git a/src/fs/spec/hashFolder/emptyFile.txt b/src/fs/spec/hashFolder/emptyFile.txt new file mode 100644 index 0000000..e69de29 diff --git a/src/fs/spec/hashFolder/file1.txt b/src/fs/spec/hashFolder/file1.txt new file mode 100644 index 0000000..78dbaa7 --- /dev/null +++ b/src/fs/spec/hashFolder/file1.txt @@ -0,0 +1 @@ +This is a file1.txt in hashFolder \ No newline at end of file diff --git a/src/fs/spec/hashFolder/file2.txt b/src/fs/spec/hashFolder/file2.txt new file mode 100644 index 0000000..9ae36c0 --- /dev/null +++ b/src/fs/spec/hashFolder/file2.txt @@ -0,0 +1 @@ +This is a file2.txt in hashFolder \ No newline at end of file diff --git a/src/fs/spec/hashFolder/subFolder2/file1.txt b/src/fs/spec/hashFolder/subFolder2/file1.txt new file mode 100644 index 0000000..b9e4d06 --- /dev/null +++ b/src/fs/spec/hashFolder/subFolder2/file1.txt @@ -0,0 +1 @@ +This is a file1.txt in subfolder2 \ No newline at end of file diff --git a/src/fs/spec/hashFolder/subFolder2/file2.txt b/src/fs/spec/hashFolder/subFolder2/file2.txt new file mode 100644 index 0000000..d6bad4b --- /dev/null +++ b/src/fs/spec/hashFolder/subFolder2/file2.txt @@ -0,0 +1 @@ +This is a file2.txt in subfolder2 \ No newline at end of file diff --git a/src/fs/spec/hashFolder/subfolder1/file1.txt b/src/fs/spec/hashFolder/subfolder1/file1.txt new file mode 100644 index 0000000..9f1337c --- /dev/null +++ b/src/fs/spec/hashFolder/subfolder1/file1.txt @@ -0,0 +1 @@ +This is a file1.txt in subfolder1 \ No newline at end of file diff --git a/src/fs/spec/hashFolder/subfolder1/file2.txt b/src/fs/spec/hashFolder/subfolder1/file2.txt new file mode 100644 index 0000000..0ede37e --- /dev/null +++ b/src/fs/spec/hashFolder/subfolder1/file2.txt @@ -0,0 +1 @@ +This is a file2.txt in subfolder1 \ No newline at end of file diff --git a/src/fs/spec/sync.spec.ts b/src/fs/spec/sync.spec.ts new file mode 100644 index 0000000..a7ae481 --- /dev/null +++ b/src/fs/spec/sync.spec.ts @@ -0,0 +1,44 @@ +import path from 'path' +import { + generateProgramToGetRemoteHash, + generateProgramToSyncHashDiff, + findResourcesNotPresentLocally +} from '../sync' +import { getHash } from '../hash' + +describe('generateProgramToGetRemoteHash', () => { + it('should return a sas program to get hashes from remote server', async () => { + const program = await generateProgramToGetRemoteHash('/tmp/remote/path') + + expect(program).toContain('%let fsTarget=/tmp/remote/path;') + expect(program).toContain('%macro mp_hashdirectory') + expect(program).toContain('%macro mp_jsonout') + }) +}) + +describe('generateProgramToSyncHashDiff', () => { + it('should return a sas program that syncs hash differences to remote server', async () => { + const hashedFolder = await getHash(path.join(__dirname, 'hashFolder')) + const program = await generateProgramToSyncHashDiff( + hashedFolder, + '/tmp/remote/path' + ) + + expect(program).toContain('%let fsTarget=/tmp/remote/path;') + expect(program).toContain('%macro mp_hashdirectory') + expect(program).toContain('%macro mp_jsonout') + expect(program).toContain('%macro mf_mkdir') + }) +}) + +describe('findResourcesNotPresentLocally', () => { + it('should return a sas program that syncs hash differences to remote server', async () => { + const hashedFolder = await getHash(path.join(__dirname, 'hashFolder')) + const resourcesNotPresentLocally = findResourcesNotPresentLocally( + hashedFolder, + { './file/not/exists': 'HashString' } + ) + + expect(resourcesNotPresentLocally).toEqual(['./file/not/exists']) + }) +}) diff --git a/src/fs/sync.ts b/src/fs/sync.ts new file mode 100644 index 0000000..fffce58 --- /dev/null +++ b/src/fs/sync.ts @@ -0,0 +1,134 @@ +import path from 'path' +import { + getInitialCode, + getCompiledMacrosCode, + generateCodeForFileCreation +} from './internal/helper' +import { HashedFolder } from '../types' + +export const generateProgramToGetRemoteHash = async (remotePath: string) => { + const compiledMacrosCode = await getCompiledMacrosCode([ + 'mp_hashdirectory.sas', + 'mp_jsonout.sas' + ]) + + const codeForHashCreation = getCodeForHashCreation() + + const code = compiledMacrosCode + codeForHashCreation + + return setTargetAtStart(code, remotePath) +} + +export const generateProgramToSyncHashDiff = async ( + hashedFolder: HashedFolder, + remotePath: string +) => { + const compiledMacrosCode = await getCompiledMacrosCode([ + 'mp_hashdirectory.sas', + 'mp_jsonout.sas', + 'mf_mkdir.sas' + ]) + + const initialProgramContent = getInitialCode() + + const pathRelativeTo = hashedFolder.absolutePath.endsWith(path.sep) + ? hashedFolder.absolutePath.slice(0, -1) + : hashedFolder.absolutePath + + const folderCreationCode = await generateCodeForFolderCreation( + hashedFolder, + pathRelativeTo + ) + + const codeForHashCreation = getCodeForHashCreation() + + const code = + compiledMacrosCode + + initialProgramContent + + folderCreationCode + + codeForHashCreation + return setTargetAtStart(code, remotePath) +} + +export const findResourcesNotPresentLocally = ( + localHash: HashedFolder, + remoteHashMap: { [key: string]: string } +) => { + const localHashedArray = convertHashFolderTreeToArray(localHash) + const remoteHashedArray = Object.keys(remoteHashMap) + + return remoteHashedArray.filter((item) => !localHashedArray.includes(item)) +} + +const generateCodeForFolderCreation = async ( + hashedFolder: HashedFolder, + pathRelativeTo: string, + resultCode: string = '' +) => { + for (const member of hashedFolder.members) { + if (member.isFile) { + resultCode += await generateCodeForFileCreation( + member.absolutePath, + pathRelativeTo + ) + } else { + resultCode += `%mf_mkdir(&fsTarget${member.absolutePath.replace( + pathRelativeTo, + '' + )})\n` + resultCode = await generateCodeForFolderCreation( + member as HashedFolder, + pathRelativeTo, + resultCode + ) + } + } + + return resultCode +} + +const getCodeForHashCreation = () => { + return `/* Get Hashes */ +%mp_hashdirectory(&fsTarget,maxDepth=MAX,outds=work.hashes) + +/* Prepare Response JSON */ +filename tmp temp; +%mp_jsonout(OPEN,jref=tmp) +%mp_jsonout(OBJ,hashes,fmt=N,jref=tmp) +%mp_jsonout(CLOSE,jref=tmp) + +/* Print to Log */ +data _null_; + retain eof; + infile tmp end=eof lrecl=10000; + if _n_=1 then putlog '>>weboutBEGIN<<'; + input; + putlog _infile_; + if eof then putlog '>>weboutEND<<'; +run; +` +} + +const setTargetAtStart = (code: string, target: string) => { + return `%let fsTarget=${target};\n${code}` +} + +/** + * convert hash folder tree to an array of relative paths, + * the returned array will be used to check the resources that are present on remote but not local + */ + +const convertHashFolderTreeToArray = ( + hashedFolder: HashedFolder, + array: string[] = [] +) => { + if (hashedFolder.isFile) return [...array, hashedFolder.relativePath] + + for (const member of hashedFolder.members) { + array = convertHashFolderTreeToArray(member as HashedFolder, array) + } + + array.push(hashedFolder.relativePath) + + return array +} diff --git a/src/index.ts b/src/index.ts index 7d70825..c3c20d1 100644 --- a/src/index.ts +++ b/src/index.ts @@ -8,3 +8,4 @@ export * from './time' export * from './types' export * from './utils' export * from './compileTree' +export * from './fs' diff --git a/src/types/hash.ts b/src/types/hash.ts new file mode 100644 index 0000000..9639447 --- /dev/null +++ b/src/types/hash.ts @@ -0,0 +1,10 @@ +export interface HashedFile { + hash: string + absolutePath: string + relativePath: string + isFile: boolean +} + +export interface HashedFolder extends HashedFile { + members: (HashedFile | HashedFolder)[] +} diff --git a/src/types/index.ts b/src/types/index.ts index 399cd4b..4c322c6 100644 --- a/src/types/index.ts +++ b/src/types/index.ts @@ -3,6 +3,7 @@ export * from './configuration' export * from './decodedToken' export * from './extraResponseAttributes' export * from './fileTree' +export * from './hash' export * from './httpsAgentOptions' export * from './macro' export * from './sasAuthResponse' diff --git a/src/utils/getNodeModulePath.ts b/src/utils/getNodeModulePath.ts new file mode 100644 index 0000000..6cbf4cf --- /dev/null +++ b/src/utils/getNodeModulePath.ts @@ -0,0 +1,36 @@ +import path from 'path' +import { execSync } from 'child_process' +import { folderExists } from '../file' + +export const getNodeModulePath = async (module: string): Promise => { + // Check if module is present in project's dependencies + const projectPath = path.join(process.cwd(), 'node_modules', module) + + if (await folderExists(projectPath)) return projectPath + + // Check if module is present in @sasjs/utils located in project's dependencies + const utilsDepsPath = path.join('@sasjs', 'utils', 'node_modules') + const utilsLocalPath = path.join( + process.cwd(), + 'node_modules', + utilsDepsPath, + module + ) + + if (await folderExists(utilsLocalPath)) return utilsLocalPath + + // Check if module is present in global @sasjs/utils + const utilsGlobalPath = path.join( + getGlobalNodeModulesPath(), + utilsDepsPath, + module + ) + + if (await folderExists(utilsGlobalPath)) return utilsGlobalPath + + // Return default value + return '' +} + +export const getGlobalNodeModulesPath = () => + execSync(`npm root -g`).toString().replace(/\n/, '') diff --git a/src/utils/index.ts b/src/utils/index.ts index 35fa7eb..1bd2109 100644 --- a/src/utils/index.ts +++ b/src/utils/index.ts @@ -17,3 +17,4 @@ export { getExecutorPath } from './executor' export { bytesToSize } from './bytesToSize' export { diff } from './diff' export { chunk } from './chunk' +export { getNodeModulePath } from './getNodeModulePath' diff --git a/src/utils/base64.spec.ts b/src/utils/spec/base64.spec.ts similarity index 93% rename from src/utils/base64.spec.ts rename to src/utils/spec/base64.spec.ts index aab4741..7657870 100644 --- a/src/utils/base64.spec.ts +++ b/src/utils/spec/base64.spec.ts @@ -1,4 +1,4 @@ -import { encodeToBase64, decodeFromBase64 } from './base64' +import { encodeToBase64, decodeFromBase64 } from '../base64' describe('base64', () => { const originalPassword = 'hello-world' diff --git a/src/utils/bytesToSize.spec.ts b/src/utils/spec/bytesToSize.spec.ts similarity index 92% rename from src/utils/bytesToSize.spec.ts rename to src/utils/spec/bytesToSize.spec.ts index e1156e3..9301612 100644 --- a/src/utils/bytesToSize.spec.ts +++ b/src/utils/spec/bytesToSize.spec.ts @@ -1,4 +1,4 @@ -import { bytesToSize } from './bytesToSize' +import { bytesToSize } from '../bytesToSize' describe('bytesToSize', () => { it(`should Convert '1024' bytes to '1.0 KB'`, () => { diff --git a/src/utils/spec/diff.spec.ts b/src/utils/spec/diff.spec.ts new file mode 100644 index 0000000..28be96b --- /dev/null +++ b/src/utils/spec/diff.spec.ts @@ -0,0 +1,11 @@ +import { diff } from '../diff' + +describe('diff', () => { + it('should return the difference of two arrays', () => { + expect(diff(['a', 'b', 'c'], ['c', 'd', 'e'])).toEqual(['a', 'b', 'd', 'e']) + expect(diff([{ a: 1 }, { b: 2 }], [{ b: 2 }, { c: 3 }])).toEqual([ + { a: 1 }, + { c: 3 } + ]) + }) +}) diff --git a/src/utils/executor.spec.ts b/src/utils/spec/executor.spec.ts similarity index 94% rename from src/utils/executor.spec.ts rename to src/utils/spec/executor.spec.ts index 1368544..7e451c5 100644 --- a/src/utils/executor.spec.ts +++ b/src/utils/spec/executor.spec.ts @@ -1,4 +1,4 @@ -import { getExecutorPath } from './executor' +import { getExecutorPath } from '../executor' // Add more pair options to be tested here const serverTypePathMap: { serverType: string; executorPath: string }[] = [ diff --git a/src/utils/fileTree.spec.ts b/src/utils/spec/fileTree.spec.ts similarity index 96% rename from src/utils/fileTree.spec.ts rename to src/utils/spec/fileTree.spec.ts index afa305a..8800f5c 100644 --- a/src/utils/fileTree.spec.ts +++ b/src/utils/spec/fileTree.spec.ts @@ -1,4 +1,4 @@ -import { getTreeExample, isFileTree } from './fileTree' +import { getTreeExample, isFileTree } from '../fileTree' describe('isFileTree', () => { it('should return true for valid file tree', () => { diff --git a/src/utils/spec/getNodeModulePath.spec.ts b/src/utils/spec/getNodeModulePath.spec.ts new file mode 100644 index 0000000..33ea0ec --- /dev/null +++ b/src/utils/spec/getNodeModulePath.spec.ts @@ -0,0 +1,98 @@ +import path from 'path' +import { + getNodeModulePath, + getGlobalNodeModulesPath +} from '../getNodeModulePath' +import ChildProcess from 'child_process' +import * as FileModule from '../../file' +import * as getNodeModulePathModule from '../getNodeModulePath' + +describe('getNodeModulePath', () => { + it('should return module path from main node_modules folder', async () => { + jest + .spyOn(FileModule, 'folderExists') + .mockImplementationOnce(() => Promise.resolve(true)) + + const expected = path.join(process.cwd(), 'node_modules', 'test') + const received = await getNodeModulePath('test') + + expect(received).toEqual(expected) + }) + + it('should return module path from nested @sasjs/utils node_modules folder', async () => { + jest + .spyOn(FileModule, 'folderExists') + .mockImplementationOnce(() => Promise.resolve(false)) + + jest + .spyOn(FileModule, 'folderExists') + .mockImplementationOnce(() => Promise.resolve(true)) + + const utilsDepsPath = path.join('@sasjs', 'utils', 'node_modules') + const expected = path.join( + process.cwd(), + 'node_modules', + utilsDepsPath, + 'test' + ) + const received = await getNodeModulePath('test') + + expect(received).toEqual(expected) + }) + + it('should return module path from global @sasjs/utils node_modules folder', async () => { + jest + .spyOn(FileModule, 'folderExists') + .mockImplementationOnce(() => Promise.resolve(false)) + + jest + .spyOn(FileModule, 'folderExists') + .mockImplementationOnce(() => Promise.resolve(false)) + + jest + .spyOn(FileModule, 'folderExists') + .mockImplementationOnce(() => Promise.resolve(true)) + + jest + .spyOn(getNodeModulePathModule, 'getGlobalNodeModulesPath') + .mockImplementationOnce(() => 'global_path') + + const utilsDepsPath = path.join('@sasjs', 'utils', 'node_modules') + const expected = path.join('global_path', utilsDepsPath, 'test') + const received = await getNodeModulePath('test') + + expect(received).toEqual(expected) + }) + + it('should return blank string when module is not found anywhere', async () => { + jest + .spyOn(FileModule, 'folderExists') + .mockImplementationOnce(() => Promise.resolve(false)) + + jest + .spyOn(FileModule, 'folderExists') + .mockImplementationOnce(() => Promise.resolve(false)) + + jest + .spyOn(FileModule, 'folderExists') + .mockImplementationOnce(() => Promise.resolve(false)) + + jest + .spyOn(getNodeModulePathModule, 'getGlobalNodeModulesPath') + .mockImplementationOnce(() => 'global_path') + + const expected = '' + const received = await getNodeModulePath('test') + + expect(received).toEqual(expected) + }) +}) + +describe('getGlobalNodeModulesPath', () => { + it('should return global path of node_modules', () => { + jest.spyOn(ChildProcess, 'execSync') + + expect(getGlobalNodeModulesPath()).toContain('node_modules') + expect(ChildProcess.execSync).toHaveBeenCalledWith('npm root -g') + }) +}) diff --git a/src/utils/url.spec.ts b/src/utils/spec/url.spec.ts similarity index 93% rename from src/utils/url.spec.ts rename to src/utils/spec/url.spec.ts index e502fbf..e7b80a5 100644 --- a/src/utils/url.spec.ts +++ b/src/utils/spec/url.spec.ts @@ -1,4 +1,4 @@ -import { urlOrigin } from './url' +import { urlOrigin } from '../url' describe('urlOrigin', () => { it('should return an empty string if empty string was provided', () => { diff --git a/src/utils/utils.spec.ts b/src/utils/spec/utils.spec.ts similarity index 96% rename from src/utils/utils.spec.ts rename to src/utils/spec/utils.spec.ts index 3e937f0..4f94f75 100644 --- a/src/utils/utils.spec.ts +++ b/src/utils/spec/utils.spec.ts @@ -4,8 +4,8 @@ import { isWindows, isLinux, escapeWinSlashes -} from './utils' -import * as utilsModule from './utils' +} from '../utils' +import * as utilsModule from '../utils' describe('uuidv4', () => { it('should generate 10000 uniq UUID', () => {