diff --git a/.gitignore b/.gitignore index 3c3629e..22fbdbe 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,3 @@ node_modules +yarn.lock +package-lock.json diff --git a/compileerror.js b/compileerror.js index 9cbca08..32ecef7 100644 --- a/compileerror.js +++ b/compileerror.js @@ -6,7 +6,7 @@ inherits(CompileError, TruffleError); function CompileError(message) { // Note we trim() because solc likes to add extra whitespace. - var fancy_message = message.trim() + "\n" + colors.red("Compiliation failed. See above."); + var fancy_message = message.trim() + "\n" + colors.red("Compilation failed. See above."); var normal_message = message.trim(); CompileError.super_.call(this, normal_message); diff --git a/index.js b/index.js index 3c56a8d..3925e5f 100644 --- a/index.js +++ b/index.js @@ -1,14 +1,5 @@ var Profiler = require("./profiler"); var OS = require("os"); -var solc = require("solc"); - -// Clean up after solc. -var listeners = process.listeners("uncaughtException"); -var solc_listener = listeners[listeners.length - 1]; - -if (solc_listener) { - process.removeListener("uncaughtException", solc_listener); -} var path = require("path"); var fs = require("fs"); @@ -18,6 +9,7 @@ var CompileError = require("./compileerror"); var expect = require("truffle-expect"); var find_contracts = require("truffle-contract-sources"); var Config = require("truffle-config"); +var debug = require("debug")("compile"); // Most basic of the compile commands. Takes a hash of sources, where // the keys are file or module paths and the values are the bodies of @@ -44,9 +36,21 @@ var compile = function(sources, options, callback) { "solc" ]); + // Load solc module only when compilation is actually required. + var solc = require("solc"); + // Clean up after solc. + var listeners = process.listeners("uncaughtException"); + var solc_listener = listeners[listeners.length - 1]; + + if (solc_listener) { + process.removeListener("uncaughtException", solc_listener); + } + + // Ensure sources have operating system independent paths // i.e., convert backslashes to forward slashes; things like C: are left intact. var operatingSystemIndependentSources = {}; + var originalPathMappings = {}; Object.keys(sources).forEach(function(source) { // Turn all backslashes into forward slashes @@ -60,6 +64,9 @@ var compile = function(sources, options, callback) { // Save the result operatingSystemIndependentSources[replacement] = sources[source]; + + // Map the replacement back to the original source path. + originalPathMappings[replacement] = source; }); var solcStandardInput = { @@ -137,15 +144,19 @@ var compile = function(sources, options, callback) { var contract_definition = { contract_name: contract_name, - sourcePath: source_path, + sourcePath: originalPathMappings[source_path], // Save original source path, not modified ones source: operatingSystemIndependentSources[source_path], sourceMap: contract.evm.bytecode.sourceMap, - runtimeSourceMap: contract.evm.deployedBytecode.sourceMap, + deployedSourceMap: contract.evm.deployedBytecode.sourceMap, ast: standardOutput.sources[source_path].legacyAST, abi: contract.abi, bytecode: "0x" + contract.evm.bytecode.object, - runtimeBytecode: "0x" + contract.evm.deployedBytecode.object, - unlinked_binary: "0x" + contract.evm.bytecode.object // deprecated + deployedBytecode: "0x" + contract.evm.deployedBytecode.object, + unlinked_binary: "0x" + contract.evm.bytecode.object, // deprecated + compiler: { + "name": "solc", + "version": solc.version() + } } // Go through the link references and replace them with older-style @@ -162,14 +173,14 @@ var compile = function(sources, options, callback) { }); }); - // Now for the runtime bytecode + // Now for the deployed bytecode Object.keys(contract.evm.deployedBytecode.linkReferences).forEach(function(file_name) { var fileLinks = contract.evm.deployedBytecode.linkReferences[file_name]; Object.keys(fileLinks).forEach(function(library_name) { var linkReferences = fileLinks[library_name] || []; - contract_definition.runtimeBytecode = replaceLinkReferences(contract_definition.runtimeBytecode, linkReferences, library_name); + contract_definition.deployedBytecode = replaceLinkReferences(contract_definition.deployedBytecode, linkReferences, library_name); }); }); diff --git a/package.json b/package.json index 9cdfb2c..4629522 100644 --- a/package.json +++ b/package.json @@ -1,20 +1,23 @@ { "name": "truffle-compile", - "version": "2.0.7", + "version": "3.0.1", "description": "Compiler helper and artifact manager", "main": "index.js", "dependencies": { "async": "^2.1.4", "colors": "^1.1.2", + "debug": "^3.1.0", "graphlib": "^2.1.1", - "solc": "0.4.15", - "solidity-parser": "^0.3.0", + "solc": "0.4.18", "truffle-config": "^1.0.2", "truffle-contract-sources": "^0.0.1", "truffle-error": "^0.0.2", "truffle-expect": "^0.0.3" }, - "devDependencies": {}, + "devDependencies": { + "mocha": "^3.5.3", + "truffle-resolver": "2.0.0" + }, "scripts": { "test": "mocha" }, diff --git a/parser.js b/parser.js new file mode 100644 index 0000000..f0516e5 --- /dev/null +++ b/parser.js @@ -0,0 +1,124 @@ +var CompileError = require("./compileerror"); +var solc = require("solc"); + +// Clean up after solc. +var listeners = process.listeners("uncaughtException"); +var solc_listener = listeners[listeners.length - 1]; + +if (solc_listener) { + process.removeListener("uncaughtException", solc_listener); +} + +module.exports = { + parse: function(body, fileName) { + // Here, we want a valid AST even if imports don't exist. The way to + // get around that is to tell the compiler, as they happen, that we + // have source for them (an empty file). + + var fileName = fileName || "ParsedContract.sol"; + + var solcStandardInput = { + language: "Solidity", + sources: { + [fileName]: { + content: body + } + }, + settings: { + outputSelection: { + [fileName]: { + "*": ["ast"] + } + } + } + }; + + var output = solc.compileStandard(JSON.stringify(solcStandardInput), function(file_path) { + // Tell the compiler we have source code for the dependency + return {contents: "pragma solidity ^0.4.0;"}; + }); + + output = JSON.parse(output); + + if (output.errors) { + throw new CompileError(output.errors[0].formattedMessage); + } + + return { + contracts: Object.keys(output.contracts[fileName]), + ast: output.sources[fileName].ast + }; + }, + + // This needs to be fast! It is fast (as of this writing). Keep it fast! + parseImports: function(body) { + var self = this; + + // WARNING: Kind of a hack (an expedient one). + + // So we don't have to maintain a separate parser, we'll get all the imports + // in a file by sending the file to solc and evaluating the error messages + // to see what import statements couldn't be resolved. To prevent full-on + // compilation when a file has no import statements, we inject an import + // statement right on the end; just to ensure it will error and we can parse + // the imports speedily without doing extra work. + + // Helper to detect import errors with an easy regex. + var importErrorKey = "TRUFFLE_IMPORT"; + + // Inject failing import. + var failingImportFileName = "__Truffle__NotFound.sol"; + + body = body + "\n\nimport '" + failingImportFileName + "';\n"; + + var solcStandardInput = { + language: "Solidity", + sources: { + "ParsedContract.sol": { + content: body + } + }, + settings: { + outputSelection: { + "ParsedContract.sol": { + "*": [] // We don't need any output. + } + } + } + }; + + var output = solc.compileStandard(JSON.stringify(solcStandardInput), function() { + // The existence of this function ensures we get a parsable error message. + // Without this, we'll get an error message we *can* detect, but the key will make it easier. + // Note: This is not a normal callback. See docs here: https://github.com/ethereum/solc-js#from-version-021 + return {error: importErrorKey}; + }); + + output = JSON.parse(output); + + var nonImportErrors = output.errors.filter(function(solidity_error) { + // If the import error key is not found, we must not have an import error. + // This means we have a *different* parsing error which we should show to the user. + // Note: solc can return multiple parsing errors at once. + return solidity_error.formattedMessage.indexOf(importErrorKey) < 0; + }); + + // Should we try to throw more than one? (aside; we didn't before) + if (nonImportErrors.length > 0) { + throw new CompileError(nonImportErrors[0].formattedMessage); + } + + // Now, all errors must be import errors. + // Filter out our forced import, then get the import paths of the rest. + var imports = output.errors.filter(function(solidity_error) { + return solidity_error.message.indexOf(failingImportFileName) < 0; + }).map(function(solidity_error) { + var matches = solidity_error.formattedMessage.match(/import[^'"]+("|')([^'"]+)("|');/); + + // Return the item between the quotes. + return matches[2]; + }); + + return imports; + } +} diff --git a/profiler.js b/profiler.js index 1e3f8e0..0093c23 100644 --- a/profiler.js +++ b/profiler.js @@ -4,9 +4,9 @@ var path = require("path"); var async = require("async"); var fs = require("fs"); -var SolidityParser = require("solidity-parser"); var Graph = require("graphlib").Graph; var isAcyclic = require("graphlib/lib/alg").isAcyclic; +var Parser = require("./parser"); var CompileError = require("./compileerror"); var expect = require("truffle-expect"); var find_contracts = require("truffle-contract-sources"); @@ -30,43 +30,131 @@ module.exports = { } } - getFiles(function(err, files) { - async.map(files, fs.stat, function(err, file_stats) { - if (err) return callback(err); - - var contracts = files.map(function(expected_source_file) { - var resolved = null; - try { - resolved = options.resolver.require(expected_source_file); - } catch (e) { - // do nothing; warning, this could squelch real errors - if (e.message.indexOf("Could not find artifacts for") != 0) throw e; - } - return resolved; - }); + var sourceFilesArtifacts = {}; + var sourceFilesArtifactsUpdatedTimes = {}; - var updated = []; + var updatedFiles = []; - for (var i = 0; i < contracts.length; i++) { - var file_stat = file_stats[i]; - var contract = contracts[i]; + async.series([ + // Get all the source files and create an object out of them. + function(c) { + getFiles(function(err, files) { + if (err) return c(err); + + // Use an object for O(1) access. + files.forEach(function(sourceFile) { + sourceFilesArtifacts[sourceFile] = []; + }); - if (contract == null) { - updated.push(files[i]); - continue; + c(); + }) + }, + // Get all the artifact files, and read them, parsing them as JSON + function(c) { + fs.readdir(build_directory, function(err, build_files) { + if (err) { + // The build directory may not always exist. + if (err.message.indexOf("ENOENT: no such file or directory") >= 0) { + // Ignore it. + build_files = []; + } else { + return c(err); + } } - var modified_time = (file_stat.mtime || file_stat.ctime).getTime(); + build_files = build_files.filter(function(build_file) { + return path.extname(build_file) == ".json"; + }); + + async.map(build_files, function(buildFile, finished) { + fs.readFile(path.join(build_directory, buildFile), "utf8", function(err, body) { + if (err) return finished(err); + finished(null, body); + }); + }, function(err, jsonData) { + if (err) return c(err); - var built_time = contract.updated_at || 0; + try { + for (var i = 0; i < jsonData.length; i++) { + var data = JSON.parse(jsonData[i]); + + // In case there are artifacts from other source locations. + if (sourceFilesArtifacts[data.sourcePath] == null) { + sourceFilesArtifacts[data.sourcePath] = []; + } + + sourceFilesArtifacts[data.sourcePath].push(data); + } + } catch (e) { + return c(e); + } + + c(); + }); + }); + }, + function(c) { + // Get the minimum updated time for all of a source file's artifacts + // (note: one source file might have multiple artifacts). + Object.keys(sourceFilesArtifacts).forEach(function(sourceFile) { + var artifacts = sourceFilesArtifacts[sourceFile]; + + sourceFilesArtifactsUpdatedTimes[sourceFile] = artifacts.reduce(function(minimum, current) { + var updatedAt = new Date(current.updatedAt).getTime(); + + if (updatedAt < minimum) { + return updatedAt; + } + return minimum; + }, Number.MAX_SAFE_INTEGER); - if (modified_time > built_time) { - updated.push(files[i]); + // Empty array? + if (sourceFilesArtifactsUpdatedTimes[sourceFile] == Number.MAX_SAFE_INTEGER) { + sourceFilesArtifactsUpdatedTimes[sourceFile] = 0; } - } + }); - callback(null, updated); - }); + c(); + }, + // Stat all the source files, getting there updated times, and comparing them to + // the artifact updated times. + function(c) { + var sourceFiles = Object.keys(sourceFilesArtifacts); + + async.map(sourceFiles, function(sourceFile, finished) { + fs.stat(sourceFile, function(err, stat) { + if (err) { + // Ignore it. This means the source file was removed + // but the artifact file possibly exists. Return null + // to signfy that we should ignore it. + stat = null; + } + finished(null, stat); + }); + }, function(err, sourceFileStats) { + if (err) return callback(err); + + sourceFiles.forEach(function(sourceFile, index) { + var sourceFileStat = sourceFileStats[index]; + + // Ignore updating artifacts if source file has been removed. + if (sourceFileStat == null) { + return; + } + + var artifactsUpdatedTime = sourceFilesArtifactsUpdatedTimes[sourceFile] || 0; + var sourceFileUpdatedTime = (sourceFileStat.mtime || sourceFileStat.ctime).getTime(); + + if (sourceFileUpdatedTime > artifactsUpdatedTime) { + updatedFiles.push(sourceFile); + } + }); + + c(); + }); + } + ], function(err) { + callback(err, updatedFiles); }); }, @@ -197,7 +285,7 @@ module.exports = { var imports; try { - imports = SolidityParser.parse(resolved_body, "imports"); + imports = Parser.parseImports(resolved_body); } catch (e) { e.message = "Error parsing " + import_path + ": " + e.message; return finished(e); @@ -253,24 +341,18 @@ module.exports = { fs.readFile(file, "utf8", function(err, body) { if (err) return reject(err); - var ast; + var output; try { - ast = SolidityParser.parse(body); + output = Parser.parse(body); } catch (e) { e.message = "Error parsing " + file + ": " + e.message; return reject(e); } - accept(ast); + accept(output.contracts); }); - }).then(function(ast) { - var contract_names = ast.body.filter(function(toplevel_item) { - return toplevel_item.type == "ContractStatement" || toplevel_item.type == "LibraryStatement"; - }).map(function(contract_statement) { - return contract_statement.name; - }); - + }).then(function(contract_names) { var returnVal = {}; contract_names.forEach(function(contract_name) { diff --git a/test/MyContract.sol b/test/MyContract.sol new file mode 100644 index 0000000..05a5f91 --- /dev/null +++ b/test/MyContract.sol @@ -0,0 +1,18 @@ +pragma solidity ^0.4.15; + +import "./Dependency.sol"; +import "./path/to/AnotherDep.sol"; +import "../../../path/to/AnotherDep.sol"; +import "ethpmpackage/Contract.sol"; + +contract MyContract { + +} + +library SomeLibrary { + +} + +interface SomeInterface { + +} \ No newline at end of file diff --git a/test/ShouldError.sol b/test/ShouldError.sol new file mode 100644 index 0000000..a7257e4 --- /dev/null +++ b/test/ShouldError.sol @@ -0,0 +1,5 @@ +paragma solidity ^0.4.0; + +contract Error { + +} \ No newline at end of file diff --git a/test/test_parser.js b/test/test_parser.js new file mode 100644 index 0000000..fe18440 --- /dev/null +++ b/test/test_parser.js @@ -0,0 +1,72 @@ +var fs = require("fs"); +var path = require("path"); +var Parser = require("../parser"); +var assert = require("assert"); + +describe("Parser", function() { + var source = null; + var erroneousSource = null; + + before("get code", function() { + source = fs.readFileSync(path.join(__dirname, "MyContract.sol"), "utf-8"); + erroneousSource = fs.readFileSync(path.join(__dirname, "ShouldError.sol"), "utf-8"); + }); + + it("should return correct imports", function() { + var imports = Parser.parseImports(source); + + // Note that this test is important because certain parts of the solidity + // output cuts off path prefixes like "./" and "../../../". If we get the + // imports list incorrectly, we'll have collisions. + var expected = [ + './Dependency.sol', + './path/to/AnotherDep.sol', + '../../../path/to/AnotherDep.sol', + 'ethpmpackage/Contract.sol' + ]; + + assert.deepEqual(imports, expected) + }); + + it("should throw an error when parsing imports if there's an actual parse error", function() { + var error = null; + try { + Parser.parseImports(erroneousSource); + } catch(e) { + error = e; + } + + if (!error) { + throw new Error("Expected a parse error but didn't get one!"); + } + + assert(error.message.indexOf("Expected pragma, import directive or contract") >= 0); + }); + + it("should return a full AST when parsed, even when dependencies don't exist", function() { + this.timeout(4000); + + var output = Parser.parse(source); + + assert.deepEqual(output.contracts, ["MyContract", "SomeInterface", "SomeLibrary"]); + assert(output.ast.nodes.length > 0); + + // The above assert means we at least got some kind of AST. + // Is there something we specifically need here? + }); + + it("should throw an error when parsing completely if there's an actual parse error", function() { + var error = null; + try { + Parser.parse(erroneousSource); + } catch(e) { + error = e; + } + + if (!error) { + throw new Error("Expected a parse error but didn't get one!"); + } + + assert(error.message.indexOf("Expected pragma, import directive or contract") >= 0); + }); +}); \ No newline at end of file