diff --git a/.circleci/config.yml b/.circleci/config.yml index c2e6caef..a5265445 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -2,33 +2,180 @@ workflows: version: 2.1 node-multi-build: jobs: - - node-v4 - - node-v6 - - node-v8 + - check-coding-style - node-v10 - - node-v12: + - node-v12 + - node-v14 + - node-v16: run_coveralls: true + - node-v18 + - hardhat-core-default-solc + - hardhat-core-latest-solc + - hardhat-sample-project + - truffle-sample-project + - cli-smoke-test + - solidity-solcjs-ext-test version: 2.1 + +orbs: + shellcheck: circleci/shellcheck@volatile + +commands: + show-npm-version: + steps: + - run: + name: Versions + command: npm version + + update-npm: + steps: + - run: + name: Update globally available npm to the latest version + # Note: We need npm >= 8.3 which supports 'overrides' in package.json + command: npm install npm --global + + install-dependencies: + parameters: + cache-id: + type: string + path: + type: string + default: . + package-manager: + type: string + default: npm + dependency-file: + type: string + default: package.json + steps: + - restore_cache: + key: <>-dependency-cache-v2-{{ .Environment.CIRCLE_JOB }}-{{ checksum "<>/<>" }} + - run: + name: "<> install in <>" + command: | + cd "<>" + [[ -e node_modules/ ]] || <> install + - save_cache: + key: <>-dependency-cache-v2-{{ .Environment.CIRCLE_JOB }}-{{ checksum "<>/<>" }} + paths: + - "<>/node_modules/" + + install-truffle-dependencies: + steps: + - run: + name: Store current Truffle commit ID in a variable + command: | + cd truffle/ + echo "export _TRUFFLE_COMMIT_ID=$(git rev-parse --verify HEAD)" >> $BASH_ENV + - restore_cache: + key: truffle-dependency-cache-{{ checksum "truffle/yarn.lock" }}-{{ .Environment._TRUFFLE_COMMIT_ID }} + - run: + name: yarn install in truffle + command: | + cd truffle/ + [[ -e node_modules/ ]] || yarn install + - save_cache: + key: truffle-dependency-cache-{{ checksum "truffle/yarn.lock" }}-{{ .Environment._TRUFFLE_COMMIT_ID }} + paths: + - truffle/ + + inject-solc-js-tarball: + description: "Recursively finds and replaces all instances of solc-js module installed in node_modules/ with the one from a tarball." + parameters: + path: + type: string + default: . + tarball-path: + type: string + default: solc-js.tgz + package-manager: + type: enum + enum: ["npm", "yarn"] + default: npm + steps: + - run: + name: "Sanity check: tarball exists and the target dir contains a JS project" + command: | + [[ -f "<>" ]] + [[ -f "<>/package.json" ]] + - run: + name: Inject solc-js from the tarball into dependencies at <> + command: | + absolute_tarball_path=$(realpath "<>") + cd "<>" + mv package.json original-package.json + # NOTE: The 'overrides' feature requires npm >= 8.3. Yarn requires `resolutions` instead. + jq ". + {overrides: {solc: \"${absolute_tarball_path}\"}} + {resolutions: {solc: \"${absolute_tarball_path}\"}}" original-package.json > package.json + "<>" install + - run: + name: "Sanity check: all transitive dependencies successfully replaced with the tarball" + command: | + solc_version=$(jq --raw-output .version solc-js/package.json) + cd "<>" + if "<>" list --pattern solc | grep 'solc@' | grep -v "solc@${solc_version}"; then + echo "Another version of solc-js is still present in the dependency tree." + exit 1 + fi + + provision-and-package-solcjs: + description: "Creates a package out of latest solc-js to test its installation as a dependency." + steps: + - checkout: + path: solc-js/ + - install-dependencies: + cache-id: solc-js + path: solc-js + - run: + name: Package solc-js + command: | + cd solc-js/ + npm run build:tarball + mv "$(npm run --silent tarballName)" ../solc-js.tgz + + provision-hardhat-with-packaged-solcjs: + description: "Clones Hardhat repository and configures it to use a local clone of solc-js." + steps: + - run: git clone --depth 1 "https://github.com/nomiclabs/hardhat" hardhat/ + - install-dependencies: + cache-id: hardhat + path: hardhat + package-manager: yarn + dependency-file: yarn.lock + - inject-solc-js-tarball: + path: hardhat/ + package-manager: yarn + + provision-truffle-with-packaged-solcjs: + description: "Clones Truffle repository and configures it to use a local clone of solc-js." + steps: + - run: git clone --depth 1 "https://github.com/trufflesuite/truffle" truffle/ + - install-truffle-dependencies + - inject-solc-js-tarball: + path: truffle/ + package-manager: yarn + jobs: node-base: &node-base working_directory: ~/solc-js docker: - - image: circleci/node + - image: cimg/node:current parameters: run_coveralls: type: boolean default: false steps: - - run: - name: Versions - command: npm version + # We want the default npm here. Older one might not work with older node.js + - show-npm-version - checkout - - restore_cache: - key: dependency-cache-{{ .Environment.CIRCLE_JOB }}-{{ checksum "package.json" }} + - install-dependencies: + cache-id: solc-js - run: name: install-npm command: npm install + - run: + name: updateBinary + command: npm run updateBinary - run: name: test command: npm run test @@ -38,28 +185,213 @@ jobs: - run: name: coveralls command: npm run coveralls - - save_cache: - key: dependency-cache-{{ .Environment.CIRCLE_JOB }}-{{ checksum "package.json" }} - paths: - - ./node_modules - node-v4: + check-coding-style: + docker: + - image: cimg/node:current + steps: + - show-npm-version + - checkout + - shellcheck/install + - install-dependencies: + cache-id: solc-js + - run: + name: Check for javascript/typescript coding style + command: npm run lint + - shellcheck/check: + ignore-dirs: | + ./.git + ./node_modules + ./dist + + hardhat-core-default-solc: + # Runs out of memory on 'medium'. + resource_class: medium+ + docker: + - image: cimg/node:16.15 + steps: + - show-npm-version + - provision-and-package-solcjs + - provision-hardhat-with-packaged-solcjs + - run: + name: Restore the default solc binary expected by Hardhat + command: | + # Hardhat downloader tests are hard-coded to expect the version that comes with the solc-js. + # We forced latest solc-js but we still want the default binary with it. + hardhat_default_solc_version=$(jq --raw-output '.dependencies.solc' hardhat/packages/hardhat-core/package.json) + mkdir hardhat-default-solc/ + pushd hardhat-default-solc/ + npm install "solc@${hardhat_default_solc_version}" + popd + ln -sf ../../../hardhat-default-solc/node_modules/solc/soljson.js hardhat/node_modules/solc/soljson.js + - run: + name: Run hardhat-core test suite with its default solc binary + command: | + cd hardhat/packages/hardhat-core + # TODO: yarn build should not be needed to run these tests. Remove it. + # See https://github.com/NomicFoundation/hardhat/issues/2486 for details. + yarn build + yarn test + + hardhat-core-latest-solc: + docker: + - image: cimg/node:16.15 + steps: + - show-npm-version + - provision-and-package-solcjs + - provision-hardhat-with-packaged-solcjs + - run: + name: Run hardhat-core test suite with latest solc + command: | + HARDHAT_TESTS_SOLC_PATH="${PWD}/solc-js/soljson.js" + HARDHAT_TESTS_SOLC_VERSION=$(jq --raw-output .version solc-js/package.json) + export HARDHAT_TESTS_SOLC_PATH HARDHAT_TESTS_SOLC_VERSION + + cd hardhat/packages/hardhat-core + yarn test + + hardhat-sample-project: + docker: + - image: cimg/node:16.15 + steps: + - show-npm-version + - provision-and-package-solcjs + - run: git clone --depth 1 "https://github.com/nomiclabs/hardhat-hackathon-boilerplate" boilerplate/ + - run: + # Leaving package-lock.json causes a weird error in arborist when npm is used again after + # `npm install`: 'The "from" argument must be of type string. Received undefined' + name: Neutralize package-lock.json + command: rm boilerplate/package-lock.json + - install-dependencies: + cache-id: hardhat-hackathon-boilerplate + path: boilerplate + - run: + name: Update to the latest Hardhat release + command: | + # We can just use a release here because injection does not require rebuilding it. + cd boilerplate/ + npm update hardhat + - inject-solc-js-tarball: + path: boilerplate/ + - run: + name: Configure the boilerplate project to force Hardhat not to use a native binary + command: | + solc_version=$(jq --raw-output .version solc-js/package.json) + + cd boilerplate/ + + sed -i 's|pragma solidity [^;]\+;|pragma solidity *;|g' contracts/Token.sol + + { + echo "const {TASK_COMPILE_SOLIDITY_GET_SOLC_BUILD} = require('hardhat/builtin-tasks/task-names');" + echo "const assert = require('assert');" + echo + echo "subtask(TASK_COMPILE_SOLIDITY_GET_SOLC_BUILD, async (args, hre, runSuper) => {" + echo " assert(args.solcVersion == '${solc_version}', 'Unexpected solc version: ' + args.solcVersion);" + echo " return {" + echo " compilerPath: '$(realpath "../solc-js/soljson.js")'," + echo " isSolcJs: true," + echo " version: args.solcVersion," + echo " longVersion: args.solcVersion" + echo " };" + echo "})" + echo "module.exports = {solidity: '${solc_version}'};" + } >> hardhat.config.js + - run: + name: Build and test the boilerplate project with local Hardhat + command: | + cd boilerplate/ + npm run test + + truffle-sample-project: + docker: + - image: cimg/node:16.15 + steps: + - update-npm + - show-npm-version + - provision-and-package-solcjs + - run: sudo apt update + - run: sudo apt install python3 python-is-python3 --assume-yes --no-install-recommends + - provision-truffle-with-packaged-solcjs + - run: + name: Unbox MetaCoin + command: | + mkdir metacoin/ + cd metacoin/ + node ../truffle/node_modules/.bin/truffle unbox metacoin + - run: + name: Strip version pragmas + command: sed -i 's|pragma solidity [^;]\+;|pragma solidity *;|g' $(find metacoin/{contracts,test}/ -name "*.sol") + - run: + name: Build and test the sample project with local Truffle and its default solc + command: | + cd metacoin/ + node ../truffle/node_modules/.bin/truffle test + - run: + name: Build and test the sample project with local Truffle and latest solc + command: | + cd metacoin/ + # `truffle test` compiles the project but artifacts go into /tmp/ + ! [[ -e build/ ]] + echo "module.exports['compilers'] = {solc: {version: '$(realpath ../truffle/node_modules/solc/)'}}" >> truffle-config.js + node ../truffle/node_modules/.bin/truffle test + + cli-smoke-test: + docker: + - image: cimg/node:current + steps: + - show-npm-version + - provision-and-package-solcjs + - run: + name: "CLI smoke test (repository)" + command: | + cd solc-js + dist/solc.js --version + + echo "contract C {}" > C.sol + dist/solc.js C.sol --bin + [[ -f C_sol_C.bin ]] + - run: + name: "CLI smoke test (package)" + command: | + mkdir package/ + cd package/ + npm install ../solc-js.tgz + + npx solcjs --version + + echo "contract C {}" > C.sol + npx solcjs C.sol --bin + [[ -f C_sol_C.bin ]] + + solidity-solcjs-ext-test: + docker: + - image: circleci/node + steps: + - show-npm-version + - checkout: + path: solc-js/ + - run: git clone --depth 1 "https://github.com/ethereum/solidity" solidity/ + - run: cd solidity/ && curl "https://binaries.soliditylang.org/bin/soljson-nightly.js" --location --output soljson.js + - run: cd solidity/ && test/externalTests/solc-js/solc-js.sh "$(realpath soljson.js)" "$(scripts/get_version.sh)" "$(realpath ../solc-js/)" + + node-v10: <<: *node-base docker: - - image: circleci/node:4 - node-v6: + - image: cimg/node:10.24 + node-v12: <<: *node-base docker: - - image: circleci/node:6 - node-v8: + - image: cimg/node:12.22 + node-v14: <<: *node-base docker: - - image: circleci/node:8 - node-v10: + - image: cimg/node:14.19 + node-v16: <<: *node-base docker: - - image: circleci/node:10 - node-v12: + - image: cimg/node:16.15 + node-v18: <<: *node-base docker: - - image: circleci/node:12 + - image: cimg/node:18.3 diff --git a/.eslintrc.js b/.eslintrc.js new file mode 100644 index 00000000..f8e27e4e --- /dev/null +++ b/.eslintrc.js @@ -0,0 +1,22 @@ +module.exports = { + env: { + browser: true, + es2021: true, + node: true + }, + extends: [ + 'standard' + ], + parser: '@typescript-eslint/parser', + plugins: [ + '@typescript-eslint' + ], + parserOptions: { + ecmaVersion: 12, + sourceType: 'module' + }, + rules: { + semi: ['error', 'always'] + }, + ignorePatterns: ['dist', 'soljson.js'] +}; diff --git a/.gitignore b/.gitignore index 08761698..416b19a5 100644 --- a/.gitignore +++ b/.gitignore @@ -39,3 +39,7 @@ bin out *.bin *.abi + +dist/** + +.nyc_output diff --git a/.nycrc b/.nycrc new file mode 100644 index 00000000..2edd3399 --- /dev/null +++ b/.nycrc @@ -0,0 +1,17 @@ +{ + "exclude": [ + "coverage", + "dist/soljson.js", + "**/test/**" + ], + "extensions": [ + ".js" + ], + "report-dir": "./coverage", + "reporter": [ + "lcov", + "html", + "text-summary" + ], + "temp-directory": "./coverage/.nyc_output" +} diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 9f749089..00000000 --- a/.travis.yml +++ /dev/null @@ -1,29 +0,0 @@ -language: node_js - -branches: - # We need to whitelist the branches which we want to have "push" automation. - # Pull request automation is not constrained to this set of branches. - only: - - master -matrix: - fast_finish: true - include: - - os: linux - node_js: '4' - env: CXX=g++-4.8 TEST_SUITE=test - - os: linux - node_js: '6' - env: CXX=g++-4.8 TEST_SUITE=test - - os: linux - node_js: '8' - env: CXX=g++-4.8 TEST_SUITE=test - - os: linux - node_js: '10' - env: CXX=g++-4.8 TEST_SUITE=test - - os: linux - node_js: '12' - env: CXX=g++-4.8 TEST_SUITE=test - - os: linux - node_js: '12' - env: CXX=g++-4.8 TEST_SUITE=coveralls -script: npm run $TEST_SUITE diff --git a/README.md b/README.md index eddc2c2a..b1378a57 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,3 @@ -[![Build Status](https://img.shields.io/travis/ethereum/solc-js.svg?branch=master&style=flat-square)](https://travis-ci.org/ethereum/solc-js) [![CircleCI](https://img.shields.io/circleci/project/github/ethereum/solc-js/master.svg?style=flat-square)](https://circleci.com/gh/ethereum/solc-js/tree/master) [![Coverage Status](https://img.shields.io/coveralls/ethereum/solc-js.svg?style=flat-square)](https://coveralls.io/r/ethereum/solc-js) @@ -26,12 +25,24 @@ To see all the supported features, execute: solcjs --help ``` +To compile a contract that imports other contracts via relative paths: +```bash +solcjs --bin --include-path node_modules/ --base-path . MainContract.sol +``` +Use the ``--base-path`` and ``--include-path`` options to describe the layout of your project. +``--base-path`` represents the root of your own source tree while ``--include-path`` allows you to +specify extra locations containing external code (e.g. libraries installed with a package manager). + +Note: ensure that all the files you specify on the command line are located inside the base path or +one of the include paths. +The compiler refers to files from outside of these directories using absolute paths. +Having absolute paths in contract metadata will result in your bytecode being reproducible only +when it's placed in these exact absolute locations. + Note: this commandline interface is not compatible with `solc` provided by the Solidity compiler package and thus cannot be used in combination with an Ethereum client via the `eth.compile.solidity()` RPC method. Please refer to the [Solidity compiler documentation](https://solidity.readthedocs.io/) for instructions to install `solc`. Furthermore, the commandline interface to solc-js provides fewer features than the binary release. -One of the missing features is automatic loading of files from the filesystem if they are not explicitly -mentioned on the command line. ### Usage in Projects @@ -44,13 +55,14 @@ There are two ways to use `solc`: The high-level API consists of a single method, `compile`, which expects the [Compiler Standard Input and Output JSON](https://solidity.readthedocs.io/en/v0.5.0/using-the-compiler.html#compiler-input-and-output-json-description). -It also accepts an optional callback function to resolve unmet dependencies. This callback receives a path and must synchronously return either an error or the content of the dependency as a string. -It cannot be used together with callback-based, asynchronous, filesystem access. A workaround is to collect the names of dependencies, return an error, and keep re-running the compiler until all -of them are resolved. +It also accepts an optional set of callback functions, which include the ``import`` and the ``smtSolver`` callbacks. +Starting 0.6.0 it only accepts an object in place of the callback to supply the callbacks. -Starting 0.5.12 it also accepts an object in place of the callback to supply different kind of callbacks, however only file imports are supported. - -_Note_: as an intermittent backwards compatibility feature, between versions 0.5.0 and 0.5.2, `compileStandard` and `compileStandardWrapper` also exists and behave like `compile` does. +The ``import`` callback function is used to resolve unmet dependencies. +This callback receives a path and must synchronously return either an error or the content of the dependency +as a string. It cannot be used together with callback-based, asynchronous, +filesystem access. A workaround is to collect the names of dependencies, return +an error, and keep re-running the compiler until all of them are resolved. #### Example usage without the import callback @@ -117,10 +129,7 @@ function findImports(path) { else return { error: 'File not found' }; } -// Current syntax -var output = JSON.parse(solc.compile(JSON.stringify(input), findImports)); - -// New syntax (supported from 0.5.12) +// New syntax (supported from 0.5.12, mandatory from 0.6.0) var output = JSON.parse( solc.compile(JSON.stringify(input), { import: findImports }) ); @@ -135,6 +144,55 @@ for (var contractName in output.contracts['test.sol']) { } ``` +Since version 0.5.1, the ``smtSolver`` callback function is used to solve SMT queries generated by +Solidity's SMTChecker. If you have an SMT solver installed locally, it can +be used to solve the given queries, where the callback must synchronously +return either an error or the result from the solver. A default +``smtSolver`` callback is included in this package via the module +``smtchecker.js`` which exports the ``smtCallback`` function that takes 1) a +function that takes queries and returns the solving result, and 2) a solver +configuration object. The module ``smtsolver.js`` has a few predefined solver +configurations, and relies on Z3, Eldarica or CVC4 being installed locally. It +exports the list of locally found solvers and a function that invokes a given +solver. + +The API of the SMT callback is **experimental** and can change at any time. +The last change was in version 0.8.11. + +#### Example usage with smtSolver callback + +```javascript +var solc = require('solc'); +const smtchecker = require('solc/smtchecker'); +const smtsolver = require('solc/smtsolver'); +// Note that this example only works via node and not in the browser. + +var input = { + language: 'Solidity', + sources: { + 'test.sol': { + content: 'contract C { function f(uint x) public { assert(x > 0); } }' + } + }, + settings: { + modelChecker: { + engine: "chc", + solvers: [ "smtlib2" ] + } + } +}; + +var output = JSON.parse( + solc.compile( + JSON.stringify(input), + { smtSolver: smtchecker.smtCallback(smtsolver.smtSolver, smtsolver.availableSolvers[0]) } + ) +); + +``` +The assertion is clearly false, and an ``assertion failure`` warning +should be returned, together with a counterexample. + #### Low-level API The low-level API is as follows: @@ -250,11 +308,11 @@ Add the version of `solc` you want to use into `index.html`: ```html ``` -(Alternatively use `https://solc-bin.ethereum.org/bin/soljson-latest.js` to get the latests version.) +(Alternatively use `https://binaries.soliditylang.org/bin/soljson-latest.js` to get the latests version.) This will load `solc` into the global variable `window.Module`. Then use this inside Javascript as: @@ -275,7 +333,7 @@ Alternatively, to iterate the releases, one can load `list.js` from `solc-bin`: ```html ``` diff --git a/abi.js b/abi.ts similarity index 79% rename from abi.js rename to abi.ts index d0ca6a4c..ebe6a8ce 100644 --- a/abi.js +++ b/abi.ts @@ -1,11 +1,11 @@ -var semver = require('semver'); +import * as semver from 'semver'; function update (compilerVersion, abi) { - var hasConstructor = false; - var hasFallback = false; + let hasConstructor = false; + let hasFallback = false; - for (var i = 0; i < abi.length; i++) { - var item = abi[i]; + for (let i = 0; i < abi.length; i++) { + const item = abi[i]; if (item.type === 'constructor') { hasConstructor = true; @@ -19,8 +19,8 @@ function update (compilerVersion, abi) { } if (item.type !== 'event') { - // add 'payable' to everything - if (semver.lt(compilerVersion, '0.4.0')) { + // add 'payable' to everything, except constant functions + if (!item.constant && semver.lt(compilerVersion, '0.4.0')) { item.payable = true; } @@ -58,6 +58,6 @@ function update (compilerVersion, abi) { return abi; } -module.exports = { - update: update +export = { + update }; diff --git a/bindings/compile.ts b/bindings/compile.ts new file mode 100644 index 00000000..5f3a2d54 --- /dev/null +++ b/bindings/compile.ts @@ -0,0 +1,232 @@ +import assert from 'assert'; + +import { isNil } from '../common/helpers'; +import { bindSolcMethod } from './helpers'; + +export function setupCompile (solJson, core) { + return { + compileJson: bindCompileJson(solJson), + compileJsonCallback: bindCompileJsonCallback(solJson, core), + compileJsonMulti: bindCompileJsonMulti(solJson), + compileStandard: bindCompileStandard(solJson, core) + }; +} + +/********************** + * COMPILE + **********************/ + +/** + * Returns a binding to the solidity compileJSON method. + * input (text), optimize (bool) -> output (jsontext) + * + * @param solJson The Emscripten compiled Solidity object. + */ +function bindCompileJson (solJson) { + return bindSolcMethod( + solJson, + 'compileJSON', + 'string', + ['string', 'number'], + null + ); +} + +/** + * Returns a binding to the solidity compileJSONMulti method. + * input (jsontext), optimize (bool) -> output (jsontext) + * + * @param solJson The Emscripten compiled Solidity object. + */ +function bindCompileJsonMulti (solJson) { + return bindSolcMethod( + solJson, + 'compileJSONMulti', + 'string', + ['string', 'number'], + null + ); +} + +/** + * Returns a binding to the solidity compileJSONCallback method. + * input (jsontext), optimize (bool), callback (ptr) -> output (jsontext) + * + * @param solJson The Emscripten compiled Solidity object. + * @param coreBindings The core bound Solidity methods. + */ +function bindCompileJsonCallback (solJson, coreBindings) { + const compileInternal = bindSolcMethod( + solJson, + 'compileJSONCallback', + 'string', + ['string', 'number', 'number'], + null + ); + + if (isNil(compileInternal)) return null; + + return function (input, optimize, readCallback) { + return runWithCallbacks(solJson, coreBindings, readCallback, compileInternal, [input, optimize]); + }; +} + +/** + * Returns a binding to the solidity solidity_compile method with a fallback to + * compileStandard. + * input (jsontext), callback (optional >= v6 only - ptr) -> output (jsontext) + * + * @param solJson The Emscripten compiled Solidity object. + * @param coreBindings The core bound Solidity methods. + */ +function bindCompileStandard (solJson, coreBindings) { + let boundFunctionStandard: any = null; + let boundFunctionSolidity: any = null; + + // input (jsontext), callback (ptr) -> output (jsontext) + const compileInternal = bindSolcMethod( + solJson, + 'compileStandard', + 'string', + ['string', 'number'], + null + ); + + if (coreBindings.isVersion6OrNewer) { + // input (jsontext), callback (ptr), callback_context (ptr) -> output (jsontext) + boundFunctionSolidity = bindSolcMethod( + solJson, + 'solidity_compile', + 'string', + ['string', 'number', 'number'], + null + ); + } else { + // input (jsontext), callback (ptr) -> output (jsontext) + boundFunctionSolidity = bindSolcMethod( + solJson, + 'solidity_compile', + 'string', + ['string', 'number'], + null + ); + } + + if (!isNil(compileInternal)) { + boundFunctionStandard = function (input, readCallback) { + return runWithCallbacks(solJson, coreBindings, readCallback, compileInternal, [input]); + }; + } + + if (!isNil(boundFunctionSolidity)) { + boundFunctionStandard = function (input, callbacks) { + return runWithCallbacks(solJson, coreBindings, callbacks, boundFunctionSolidity, [input]); + }; + } + + return boundFunctionStandard; +} + +/********************** + * CALL BACKS + **********************/ + +function wrapCallback (coreBindings, callback) { + assert(typeof callback === 'function', 'Invalid callback specified.'); + + return function (data, contents, error) { + const result = callback(coreBindings.copyFromCString(data)); + if (typeof result.contents === 'string') { + coreBindings.copyToCString(result.contents, contents); + } + if (typeof result.error === 'string') { + coreBindings.copyToCString(result.error, error); + } + }; +} + +function wrapCallbackWithKind (coreBindings, callback) { + assert(typeof callback === 'function', 'Invalid callback specified.'); + + return function (context, kind, data, contents, error) { + // Must be a null pointer. + assert(context === 0, 'Callback context must be null.'); + const result = callback(coreBindings.copyFromCString(kind), coreBindings.copyFromCString(data)); + if (typeof result.contents === 'string') { + coreBindings.copyToCString(result.contents, contents); + } + if (typeof result.error === 'string') { + coreBindings.copyToCString(result.error, error); + } + }; +} + +// calls compile() with args || cb +function runWithCallbacks (solJson, coreBindings, callbacks, compile, args) { + if (callbacks) { + assert(typeof callbacks === 'object', 'Invalid callback object specified.'); + } else { + callbacks = {}; + } + + let readCallback = callbacks.import; + if (readCallback === undefined) { + readCallback = function (data) { + return { + error: 'File import callback not supported' + }; + }; + } + + let singleCallback; + if (coreBindings.isVersion6OrNewer) { + // After 0.6.x multiple kind of callbacks are supported. + let smtSolverCallback = callbacks.smtSolver; + if (smtSolverCallback === undefined) { + smtSolverCallback = function (data) { + return { + error: 'SMT solver callback not supported' + }; + }; + } + + singleCallback = function (kind, data) { + if (kind === 'source') { + return readCallback(data); + } else if (kind === 'smt-query') { + return smtSolverCallback(data); + } else { + assert(false, 'Invalid callback kind specified.'); + } + }; + + singleCallback = wrapCallbackWithKind(coreBindings, singleCallback); + } else { + // Old Solidity version only supported imports. + singleCallback = wrapCallback(coreBindings, readCallback); + } + + const cb = coreBindings.addFunction(singleCallback, 'viiiii'); + let output; + try { + args.push(cb); + if (coreBindings.isVersion6OrNewer) { + // Callback context. + args.push(null); + } + + output = compile(...args); + } finally { + coreBindings.removeFunction(cb); + } + + if (coreBindings.reset) { + // Explicitly free memory. + // + // NOTE: cwrap() of "compile" will copy the returned pointer into a + // Javascript string and it is not possible to call free() on it. + // reset() however will clear up all allocations. + coreBindings.reset(); + } + return output; +} diff --git a/bindings/core.ts b/bindings/core.ts new file mode 100644 index 00000000..674fb20e --- /dev/null +++ b/bindings/core.ts @@ -0,0 +1,161 @@ +import { bindSolcMethod, bindSolcMethodWithFallbackFunc } from './helpers'; +import translate from '../translate'; +import * as semver from 'semver'; +import { isNil } from '../common/helpers'; + +export function setupCore (solJson) { + const core = { + alloc: bindAlloc(solJson), + license: bindLicense(solJson), + version: bindVersion(solJson), + reset: bindReset(solJson) + }; + + const helpers = { + addFunction: unboundAddFunction.bind(this, solJson), + removeFunction: unboundRemoveFunction.bind(this, solJson), + + copyFromCString: unboundCopyFromCString.bind(this, solJson), + copyToCString: unboundCopyToCString.bind(this, solJson, core.alloc), + + // @ts-ignore + versionToSemver: versionToSemver(core.version()) + }; + + return { + ...core, + ...helpers, + + isVersion6OrNewer: semver.gt(helpers.versionToSemver(), '0.5.99') + }; +} + +/********************** + * Core Functions + **********************/ + +/** + * Returns a binding to the solidity_alloc function. + * + * @param solJson The Emscripten compiled Solidity object. + */ +function bindAlloc (solJson) { + const allocBinding = bindSolcMethod( + solJson, + 'solidity_alloc', + 'number', + ['number'], + null + ); + + // the fallback malloc is not a cwrap function and should just be returned + // directly in-case the alloc binding could not happen. + if (isNil(allocBinding)) { + return solJson._malloc; + } + + return allocBinding; +} + +/** + * Returns a binding to the solidity_version method. + * + * @param solJson The Emscripten compiled Solidity object. + */ +function bindVersion (solJson) { + return bindSolcMethodWithFallbackFunc( + solJson, + 'solidity_version', + 'string', + [], + 'version' + ); +} + +function versionToSemver (version) { + return translate.versionToSemver.bind(this, version); +} + +/** + * Returns a binding to the solidity_license method. + * + * If the current solJson version < 0.4.14 then this will bind an empty function. + * + * @param solJson The Emscripten compiled Solidity object. + */ +function bindLicense (solJson) { + return bindSolcMethodWithFallbackFunc( + solJson, + 'solidity_license', + 'string', + [], + 'license', + () => { + } + ); +} + +/** + * Returns a binding to the solidity_reset method. + * + * @param solJson The Emscripten compiled Solidity object. + */ +function bindReset (solJson) { + return bindSolcMethod( + solJson, + 'solidity_reset', + null, + [], + null + ); +} + +/********************** + * Helpers Functions + **********************/ + +/** + * Copy to a C string. + * + * Allocates memory using solc's allocator. + * + * Before 0.6.0: + * Assuming copyToCString is only used in the context of wrapCallback, solc will free these pointers. + * See https://github.com/ethereum/solidity/blob/v0.5.13/libsolc/libsolc.h#L37-L40 + * + * After 0.6.0: + * The duty is on solc-js to free these pointers. We accomplish that by calling `reset` at the end. + * + * @param solJson The Emscripten compiled Solidity object. + * @param alloc The memory allocation function. + * @param str The source string being copied to a C string. + * @param ptr The pointer location where the C string will be set. + */ +function unboundCopyToCString (solJson, alloc, str, ptr) { + const length = solJson.lengthBytesUTF8(str); + + const buffer = alloc(length + 1); + + solJson.stringToUTF8(str, buffer, length + 1); + solJson.setValue(ptr, buffer, '*'); +} + +/** + * Wrapper over Emscripten's C String copying function (which can be different + * on different versions). + * + * @param solJson The Emscripten compiled Solidity object. + * @param ptr The pointer location where the C string will be referenced. + */ +function unboundCopyFromCString (solJson, ptr) { + const copyFromCString = solJson.UTF8ToString || solJson.Pointer_stringify; + return copyFromCString(ptr); +} + +function unboundAddFunction (solJson, func, signature?) { + return (solJson.addFunction || solJson.Runtime.addFunction)(func, signature); +} + +function unboundRemoveFunction (solJson, ptr) { + return (solJson.removeFunction || solJson.Runtime.removeFunction)(ptr); +} diff --git a/bindings/helpers.ts b/bindings/helpers.ts new file mode 100644 index 00000000..b3e6ade9 --- /dev/null +++ b/bindings/helpers.ts @@ -0,0 +1,36 @@ +import { isNil } from '../common/helpers'; + +export function bindSolcMethod (solJson, method, returnType, args, defaultValue) { + if (isNil(solJson[`_${method}`]) && defaultValue !== undefined) { + return defaultValue; + } + + return solJson.cwrap(method, returnType, args); +} + +export function bindSolcMethodWithFallbackFunc (solJson, method, returnType, args, fallbackMethod, finalFallback = undefined) { + const methodFunc = bindSolcMethod(solJson, method, returnType, args, null); + + if (!isNil(methodFunc)) { + return methodFunc; + } + + return bindSolcMethod(solJson, fallbackMethod, returnType, args, finalFallback); +} + +export function getSupportedMethods (solJson) { + return { + licenseSupported: anyMethodExists(solJson, 'solidity_license'), + versionSupported: anyMethodExists(solJson, 'solidity_version'), + allocSupported: anyMethodExists(solJson, 'solidity_alloc'), + resetSupported: anyMethodExists(solJson, 'solidity_reset'), + compileJsonSupported: anyMethodExists(solJson, 'compileJSON'), + compileJsonMultiSupported: anyMethodExists(solJson, 'compileJSONMulti'), + compileJsonCallbackSuppported: anyMethodExists(solJson, 'compileJSONCallback'), + compileJsonStandardSupported: anyMethodExists(solJson, 'compileStandard', 'solidity_compile') + }; +} + +function anyMethodExists (solJson, ...names) { + return names.some(name => !isNil(solJson[`_${name}`])); +} diff --git a/bindings/index.ts b/bindings/index.ts new file mode 100644 index 00000000..63d4c074 --- /dev/null +++ b/bindings/index.ts @@ -0,0 +1,15 @@ +import { setupCore } from './core'; +import { getSupportedMethods } from './helpers'; +import { setupCompile } from './compile'; + +export default function setupBindings (solJson) { + const coreBindings = setupCore(solJson); + const compileBindings = setupCompile(solJson, coreBindings); + const methodFlags = getSupportedMethods(solJson); + + return { + methodFlags, + coreBindings, + compileBindings + }; +} diff --git a/build/clean.js b/build/clean.js new file mode 100644 index 00000000..84e91fa4 --- /dev/null +++ b/build/clean.js @@ -0,0 +1,8 @@ +const fs = require('fs'); +const path = require('path'); + +const distFolder = path.join(__dirname, 'dist'); + +if (fs.existsSync(distFolder)) { + fs.rmdirSync(distFolder); +} diff --git a/build/pack-publish-block.js b/build/pack-publish-block.js new file mode 100644 index 00000000..8ce0f6e4 --- /dev/null +++ b/build/pack-publish-block.js @@ -0,0 +1,9 @@ +// This is meant to run in a hook before npm pack. +// Reporting an error from the hook interrupts the command. +if (process.env.BYPASS_SAFETY_CHECK === 'false' || process.env.BYPASS_SAFETY_CHECK === undefined) { + console.error('Run `npm run build:tarball` or `npm run publish:tarball` to pack or publish the package'); + process.exit(1); +} else if (process.env.BYPASS_SAFETY_CHECK !== 'true') { + console.error('Invalid value of the BYPASS_SAFETY_CHECK variable. Must be "true", "false" or unset.'); + process.exit(1); +} diff --git a/build/postbuild.js b/build/postbuild.js new file mode 100644 index 00000000..7ea03dd3 --- /dev/null +++ b/build/postbuild.js @@ -0,0 +1,4 @@ +const fs = require('fs'); +const path = require('path'); + +fs.chmodSync(path.join(__dirname, '../dist', 'solc.js'), '755'); diff --git a/common/helpers.ts b/common/helpers.ts new file mode 100644 index 00000000..c21a91c6 --- /dev/null +++ b/common/helpers.ts @@ -0,0 +1,20 @@ +/** + * Returns true if and only if the value is null or undefined. + * + * @param value + */ +export function isNil (value: any): boolean { + // Uses == over === which compares both null and undefined. + return value == null; +} + +/** + * Returns true if and only if the value is an object and not an array. + * + * @param value + */ +export function isObject (value: any): boolean { + // typeof [] will result in an 'object' so this additionally uses Array.isArray + // to confirm it's just an object. + return typeof value === 'object' && !Array.isArray(value); +} diff --git a/common/types.ts b/common/types.ts new file mode 100644 index 00000000..254484a1 --- /dev/null +++ b/common/types.ts @@ -0,0 +1,26 @@ +/** + * A mapping between libraries and the addresses to which they were deployed. + * + * Containing support for two level configuration, These two level + * configurations can be seen below. + * + * { + * "lib.sol:L1": "0x...", + * "lib.sol:L2": "0x...", + * "lib.sol": {"L3": "0x..."} + * } + */ +export interface LibraryAddresses { + [qualifiedNameOrSourceUnit: string]: string | { [unqualifiedLibraryName: string]: string }; +} + +/** + * A mapping between libraries and lists of placeholder instances present in their hex-encoded bytecode. + * For each placeholder its length and the position of the first character is stored. + * + * Each start and length entry will always directly refer to the position in + * binary and not hex-encoded bytecode. + */ +export interface LinkReferences { + [libraryLabel: string]: Array<{ start: number, length: number }>; +} diff --git a/downloadCurrentVersion.js b/downloadCurrentVersion.ts similarity index 64% rename from downloadCurrentVersion.js rename to downloadCurrentVersion.ts index ee49b4f8..6b524a49 100755 --- a/downloadCurrentVersion.js +++ b/downloadCurrentVersion.ts @@ -3,17 +3,17 @@ // This is used to download the correct binary version // as part of the prepublish step. -var pkg = require('./package.json'); -var fs = require('fs'); -var https = require('https'); -var MemoryStream = require('memorystream'); -var keccak256 = require('js-sha3').keccak256; +import * as fs from 'fs'; +import { https } from 'follow-redirects'; +import MemoryStream from 'memorystream'; +import { keccak256 } from 'js-sha3'; +const pkg = require('./package.json'); function getVersionList (cb) { console.log('Retrieving available version list...'); - var mem = new MemoryStream(null, { readable: false }); - https.get('https://ethereum.github.io/solc-bin/bin/list.json', function (response) { + const mem = new MemoryStream(null, { readable: false }); + https.get('https://binaries.soliditylang.org/bin/list.json', function (response) { if (response.statusCode !== 200) { console.log('Error downloading file: ' + response.statusCode); process.exit(1); @@ -39,8 +39,8 @@ function downloadBinary (outputName, version, expectedHash) { process.exit(1); }); - var file = fs.createWriteStream(outputName, { encoding: 'binary' }); - https.get('https://ethereum.github.io/solc-bin/bin/' + version, function (response) { + const file = fs.createWriteStream(outputName, { encoding: 'binary' }); + https.get('https://binaries.soliditylang.org/bin/' + version, function (response) { if (response.statusCode !== 200) { console.log('Error downloading file: ' + response.statusCode); process.exit(1); @@ -48,7 +48,7 @@ function downloadBinary (outputName, version, expectedHash) { response.pipe(file); file.on('finish', function () { file.close(function () { - var hash = '0x' + keccak256(fs.readFileSync(outputName, { encoding: 'binary' })); + const hash = '0x' + keccak256(fs.readFileSync(outputName, { encoding: 'binary' })); if (expectedHash !== hash) { console.log('Hash mismatch: ' + expectedHash + ' vs ' + hash); process.exit(1); @@ -63,13 +63,13 @@ console.log('Downloading correct solidity binary...'); getVersionList(function (list) { list = JSON.parse(list); - var wanted = pkg.version.match(/^(\d+\.\d+\.\d+)$/)[1]; - var releaseFileName = list.releases[wanted]; - var expectedFile = list.builds.filter(function (entry) { return entry.path === releaseFileName; })[0]; + const wanted = pkg.version.match(/^(\d+\.\d+\.\d+)$/)[1]; + const releaseFileName = list.releases[wanted]; + const expectedFile = list.builds.filter(function (entry) { return entry.path === releaseFileName; })[0]; if (!expectedFile) { console.log('Version list is invalid or corrupted?'); process.exit(1); } - var expectedHash = expectedFile.keccak256; + const expectedHash = expectedFile.keccak256; downloadBinary('soljson.js', releaseFileName, expectedHash); }); diff --git a/formatters.ts b/formatters.ts new file mode 100644 index 00000000..4ad82028 --- /dev/null +++ b/formatters.ts @@ -0,0 +1,13 @@ +export function formatFatalError (message) { + return JSON.stringify({ + errors: [ + { + type: 'JSONError', + component: 'solcjs', + severity: 'error', + message: message, + formattedMessage: 'Error: ' + message + } + ] + }); +} diff --git a/index.js b/index.js deleted file mode 100644 index 5925da69..00000000 --- a/index.js +++ /dev/null @@ -1,3 +0,0 @@ -var wrapper = require('./wrapper.js'); - -module.exports = wrapper(require('./soljson.js')); diff --git a/index.ts b/index.ts new file mode 100644 index 00000000..80eb230a --- /dev/null +++ b/index.ts @@ -0,0 +1,4 @@ +import wrapper from './wrapper'; + +const soljson = require('./soljson.js'); +export = wrapper(soljson); diff --git a/linker.js b/linker.js deleted file mode 100644 index 249c76c9..00000000 --- a/linker.js +++ /dev/null @@ -1,109 +0,0 @@ -var keccak256 = require('js-sha3').keccak256; - -function libraryHashPlaceholder (input) { - return '$' + keccak256(input).slice(0, 34) + '$'; -} - -var linkBytecode = function (bytecode, libraries, linkReferences) { - if (typeof linkReferences === typeof '' || linkReferences === null || linkReferences === undefined) { - linkReferences = findLinkReferences(bytecode); - } - - // NOTE: for backwards compatibility support old compiler which didn't use file names - var librariesComplete = {}; - for (var libraryName in libraries) { - if (typeof libraries[libraryName] === 'object') { - // API compatible with the standard JSON i/o - for (var lib in libraries[libraryName]) { - librariesComplete[lib] = libraries[libraryName][lib]; - librariesComplete[libraryName + ':' + lib] = libraries[libraryName][lib]; - } - } else { - // backwards compatible API for early solc-js versions - var parsed = libraryName.match(/^([^:]+):(.+)$/); - if (parsed) { - librariesComplete[parsed[2]] = libraries[libraryName]; - } - librariesComplete[libraryName] = libraries[libraryName]; - } - } - - for (libraryName in librariesComplete) { - var hexAddress = librariesComplete[libraryName]; - if (hexAddress.slice(0, 2) !== '0x' || hexAddress.length > 42) { - throw new Error('Invalid address specified for ' + libraryName); - } - // remove 0x prefix - hexAddress = hexAddress.slice(2); - hexAddress = Array(40 - hexAddress.length + 1).join('0') + hexAddress; - - // Support old (library name) and new (hash of library name) - // placeholders. - var findAndReplace = function (name) { - // truncate to 37 characters - var truncatedName = name.slice(0, 36); - var libLabel = '__' + truncatedName + Array(37 - truncatedName.length).join('_') + '__'; - while (bytecode.indexOf(libLabel) >= 0) { - bytecode = bytecode.replace(libLabel, hexAddress); - } - }; - - var replace = function (name) { - // truncate to 37 characters - var truncatedName = name.slice(0, 36); - if (linkReferences && linkReferences[truncatedName]) { - linkReferences[truncatedName].forEach(function (reference) { - var start = reference.start * 2; - var end = (reference.start + reference.length) * 2; - bytecode = bytecode.slice(0, start) + hexAddress + bytecode.slice(end); - }); - } else { - // manually find and replace if link reference is not present - findAndReplace(name); - } - }; - - replace(libraryName); - replace(libraryHashPlaceholder(libraryName)); - } - - return bytecode; -}; - -var findLinkReferences = function (bytecode) { - // find 40 bytes in the pattern of __...<36 digits>...__ - // e.g. __Lib.sol:L_____________________________ - var linkReferences = {}; - var offset = 0; - while (true) { - var found = bytecode.match(/__(.{36})__/); - if (!found) { - break; - } - - var start = found.index; - // trim trailing underscores - // NOTE: this has no way of knowing if the trailing underscore was part of the name - var libraryName = found[1].replace(/_+$/gm, ''); - - if (!linkReferences[libraryName]) { - linkReferences[libraryName] = []; - } - - linkReferences[libraryName].push({ - // offsets are in bytes in binary representation (and not hex) - start: (offset + start) / 2, - length: 20 - }); - - offset += start + 20; - - bytecode = bytecode.slice(start + 20); - } - return linkReferences; -}; - -module.exports = { - linkBytecode: linkBytecode, - findLinkReferences: findLinkReferences -}; diff --git a/linker.ts b/linker.ts new file mode 100644 index 00000000..619cae57 --- /dev/null +++ b/linker.ts @@ -0,0 +1,181 @@ +import assert from 'assert'; +import { keccak256 } from 'js-sha3'; +import { isNil, isObject } from './common/helpers'; +import { LibraryAddresses, LinkReferences } from './common/types'; + +/** + * Generates a new-style library placeholder from a fully-qualified library name. + * + * Newer versions of the compiler use hashed names instead of just truncating the name + * before putting it in a placeholder. + * + * @param fullyQualifiedLibraryName Fully qualified library name. + */ +function libraryHashPlaceholder (fullyQualifiedLibraryName) { + return `$${keccak256(fullyQualifiedLibraryName).slice(0, 34)}$`; +} + +/** + * Finds all placeholders corresponding to the specified library label and replaces them + * with a concrete address. Works with both hex-encoded and binary bytecode as long as + * the address is in the same format. + * + * @param bytecode Bytecode string. + * + * @param label Library label, either old- or new-style. Must exactly match the part between `__` markers in the + * placeholders. Will be padded with `_` characters if too short or truncated if too long. + * + * @param address Address to replace placeholders with. Must be the right length. + * It will **not** be padded with zeros if too short. + */ +function replacePlaceholder (bytecode, label, address) { + // truncate to 36 characters + const truncatedName = label.slice(0, 36); + const libLabel = `__${truncatedName.padEnd(36, '_')}__`; + + while (bytecode.indexOf(libLabel) >= 0) { + bytecode = bytecode.replace(libLabel, address); + } + + return bytecode; +} + +/** + * Finds and all library placeholders in the provided bytecode and replaces them with actual addresses. + * Supports both old- and new-style placeholders (even both in the same file). + * See [Library Linking](https://docs.soliditylang.org/en/latest/using-the-compiler.html#library-linking) + * for a full explanation of the linking process. + * + * Example of a legacy placeholder: `__lib.sol:L_____________________________` + * Example of a new-style placeholder: `__$cb901161e812ceb78cfe30ca65050c4337$__` + * + * @param bytecode Hex-encoded bytecode string. All 40-byte substrings starting and ending with + * `__` will be interpreted as placeholders. + * + * @param libraries Mapping between fully qualified library names and the hex-encoded + * addresses they should be replaced with. Addresses shorter than 40 characters are automatically padded with zeros. + * + * @returns bytecode Hex-encoded bytecode string with placeholders replaced with addresses. + * Note that some placeholders may remain in the bytecode if `libraries` does not provide addresses for all of them. + */ +function linkBytecode (bytecode: string, libraries: LibraryAddresses): string { + assert(typeof bytecode === 'string'); + assert(typeof libraries === 'object'); + + // NOTE: for backwards compatibility support old compiler which didn't use file names + const librariesComplete: { [fullyQualifiedLibraryName: string]: string } = {}; + + for (const [fullyQualifiedLibraryName, libraryObjectOrAddress] of Object.entries(libraries)) { + if (isNil(libraryObjectOrAddress)) { + throw new Error(`No address provided for library ${fullyQualifiedLibraryName}`); + } + + // API compatible with the standard JSON i/o + // {"lib.sol": {"L": "0x..."}} + if (isObject(libraryObjectOrAddress)) { + for (const [unqualifiedLibraryName, address] of Object.entries(libraryObjectOrAddress)) { + librariesComplete[unqualifiedLibraryName] = address; + librariesComplete[`${fullyQualifiedLibraryName}:${unqualifiedLibraryName}`] = address; + } + + continue; + } + + // backwards compatible API for early solc-js versions + const parsed = fullyQualifiedLibraryName.match(/^(?[^:]+):(?.+)$/); + const libraryAddress = libraryObjectOrAddress as string; + + if (!isNil(parsed)) { + const { unqualifiedLibraryName } = parsed.groups; + librariesComplete[unqualifiedLibraryName] = libraryAddress; + } + + librariesComplete[fullyQualifiedLibraryName] = libraryAddress; + } + + for (const libraryName in librariesComplete) { + let hexAddress = librariesComplete[libraryName]; + + if (!hexAddress.startsWith('0x') || hexAddress.length > 42) { + throw new Error(`Invalid address specified for ${libraryName}`); + } + + // remove 0x prefix + hexAddress = hexAddress.slice(2).padStart(40, '0'); + + bytecode = replacePlaceholder(bytecode, libraryName, hexAddress); + bytecode = replacePlaceholder(bytecode, libraryHashPlaceholder(libraryName), hexAddress); + } + + return bytecode; +} + +/** + * Finds locations of all library address placeholders in the hex-encoded bytecode. + * Returns information in a format matching `evm.bytecode.linkReferences` output + * in Standard JSON. + * + * See [Library Linking](https://docs.soliditylang.org/en/latest/using-the-compiler.html#library-linking) + * for a full explanation of library placeholders and linking process. + * + * WARNING: The output matches `evm.bytecode.linkReferences` exactly only in + * case of old-style placeholders created from fully qualified library names + * of no more than 36 characters, and even then only if the name does not start + * or end with an underscore. This is different from + * `evm.bytecode.linkReferences`, which uses fully qualified library names. + * This is a limitation of the placeholder format - the fully qualified names + * are not preserved in the compiled bytecode and cannot be reconstructed + * without external information. + * + * @param bytecode Hex-encoded bytecode string. + * + * @returns linkReferences A mapping between library labels and their locations + * in the bytecode. In case of old-style placeholders the label is a fully + * qualified library name truncated to 36 characters. For new-style placeholders + * it's the first 34 characters of the hex-encoded hash of the fully qualified + * library name, with a leading and trailing $ character added. Note that the + * offsets and lengths refer to the *binary* (not hex-encoded) bytecode, just + * like in `evm.bytecode.linkReferences`. + */ +function findLinkReferences (bytecode: string): LinkReferences { + assert(typeof bytecode === 'string'); + + // find 40 bytes in the pattern of __...<36 digits>...__ + // e.g. __Lib.sol:L_____________________________ + const linkReferences: LinkReferences = {}; + + let offset = 0; + + while (true) { + const found = bytecode.match(/__(.{36})__/); + if (!found) { + break; + } + + const start = found.index; + + // trim trailing underscores + // NOTE: this has no way of knowing if the trailing underscore was part of the name + const libraryName = found[1].replace(/_+$/gm, ''); + + if (!linkReferences[libraryName]) { + linkReferences[libraryName] = []; + } + + // offsets are in bytes in binary representation (and not hex) + linkReferences[libraryName].push({ + start: (offset + start) / 2, + length: 20 + }); + + offset += start + 20; + bytecode = bytecode.slice(start + 20); + } + + return linkReferences; +} + +export = { + linkBytecode, + findLinkReferences +}; diff --git a/package.json b/package.json index 3d79502e..8a85ef3f 100644 --- a/package.json +++ b/package.json @@ -1,18 +1,26 @@ { "name": "solc", - "version": "0.5.13", + "version": "0.8.17", "description": "Solidity compiler", "main": "index.js", "bin": { - "solcjs": "solcjs" + "solcjs": "solc.js" }, "scripts": { - "lint": "node ./node_modules/semistandard/bin/cmd.js", - "prepublish": "node downloadCurrentVersion.js && node verifyVersion.js", - "pretest": "npm run lint", - "test": "tape ./test/index.js", - "coverage": "node ./node_modules/nyc/bin/nyc.js --reporter=lcov --reporter=text-summary ./node_modules/tape/bin/tape ./test/index.js", - "coveralls": "npm run coverage && node ./node_modules/coveralls/bin/coveralls.js =10.0.0" + }, "files": [ - "abi.js", - "index.js", - "linker.js", - "smtchecker.js", - "smtsolver.js", - "solcjs", - "soljson.js", - "translate.js", - "wrapper.js" + "common/*.js", + "bindings/*.js", + "*.js" ], "author": "chriseth", "license": "MIT", @@ -42,29 +47,36 @@ "homepage": "https://github.com/ethereum/solc-js#readme", "dependencies": { "command-exists": "^1.2.8", - "commander": "3.0.2", - "fs-extra": "^0.30.0", + "commander": "^8.1.0", + "follow-redirects": "^1.12.1", "js-sha3": "0.8.0", "memorystream": "^0.3.1", - "require-from-string": "^2.0.0", "semver": "^5.5.0", "tmp": "0.0.33" }, "devDependencies": { + "@types/node": "^16.11.7", + "@types/semver": "^7.3.9", + "@types/tape": "^4.13.2", + "@types/tmp": "^0.2.3", + "@typescript-eslint/eslint-plugin": "^5.8.0", + "@typescript-eslint/parser": "^5.8.0", "coveralls": "^3.0.0", - "nyc": "^14.1.0", - "semistandard": "^12.0.0", + "eslint": "^7.32.0", + "eslint-config-standard": "^16.0.3", + "eslint-plugin-import": "^2.25.3", + "eslint-plugin-node": "^11.1.0", + "eslint-plugin-promise": "^5.1.1", + "nyc": "^15.1.0", "tape": "^4.11.0", - "tape-spawn": "^1.4.2" - }, - "semistandard": { - "ignore": [ - "soljson.js" - ] + "tape-spawn": "^1.4.2", + "ts-node": "^10.4.0", + "typescript": "^4.5.4" }, "nyc": { "exclude": [ - "soljson.js" + "soljson.js", + "dist" ] } } diff --git a/smtchecker.js b/smtchecker.ts similarity index 50% rename from smtchecker.js rename to smtchecker.ts index 2e279f6d..78e64248 100644 --- a/smtchecker.js +++ b/smtchecker.ts @@ -3,20 +3,20 @@ // The function runs an SMT solver on each query and adjusts the input for // another run. // Returns null if no solving is requested. -function handleSMTQueries (inputJSON, outputJSON, solver) { - var auxInputReq = outputJSON.auxiliaryInputRequested; +function handleSMTQueries (inputJSON: any, outputJSON: any, solverFunction: any, solver?: any) { + const auxInputReq = outputJSON.auxiliaryInputRequested; if (!auxInputReq) { return null; } - var queries = auxInputReq.smtlib2queries; + const queries = auxInputReq.smtlib2queries; if (!queries || Object.keys(queries).length === 0) { return null; } - var responses = {}; - for (var query in queries) { - responses[query] = solver(queries[query]); + const responses = {}; + for (const query in queries) { + responses[query] = solverFunction(queries[query], solver); } // Note: all existing solved queries are replaced. @@ -25,6 +25,18 @@ function handleSMTQueries (inputJSON, outputJSON, solver) { return inputJSON; } -module.exports = { - handleSMTQueries: handleSMTQueries +function smtCallback (solverFunction, solver?: any) { + return function (query) { + try { + const result = solverFunction(query, solver); + return { contents: result }; + } catch (err) { + return { error: err }; + } + }; +} + +export = { + handleSMTQueries, + smtCallback }; diff --git a/smtsolver.js b/smtsolver.js deleted file mode 100644 index a544dd0a..00000000 --- a/smtsolver.js +++ /dev/null @@ -1,59 +0,0 @@ -var commandExistsSync = require('command-exists').sync; -var execSync = require('child_process').execSync; -var fs = require('fs'); -var tmp = require('tmp'); - -const timeout = 10000; - -var potentialSolvers = [ - { - name: 'z3', - params: '-smt2 -t:' + timeout - }, - { - name: 'cvc4', - params: '--lang=smt2 --tlimit=' + timeout - } -]; -var solvers = potentialSolvers.filter(solver => commandExistsSync(solver.name)); - -function solve (query) { - if (solvers.length === 0) { - throw new Error('No SMT solver available. Assertion checking will not be performed.'); - } - - var tmpFile = tmp.fileSync({ postfix: '.smt2' }); - fs.writeFileSync(tmpFile.name, query); - // TODO For now only the first SMT solver found is used. - // At some point a computation similar to the one done in - // SMTPortfolio::check should be performed, where the results - // given by different solvers are compared and an error is - // reported if solvers disagree (i.e. SAT vs UNSAT). - var solverOutput; - try { - solverOutput = execSync( - solvers[0].name + ' ' + solvers[0].params + ' ' + tmpFile.name, { - timeout: 10000 - } - ).toString(); - } catch (e) { - // execSync throws if the process times out or returns != 0. - // The latter might happen with z3 if the query asks for a model - // for an UNSAT formula. We can still use stdout. - solverOutput = e.stdout.toString(); - if ( - !solverOutput.startsWith('sat') && - !solverOutput.startsWith('unsat') && - !solverOutput.startsWith('unknown') - ) { - throw new Error('Failed solve SMT query. ' + e.toString()); - } - } - // Trigger early manual cleanup - tmpFile.removeCallback(); - return solverOutput; -} - -module.exports = { - smtSolver: solve -}; diff --git a/smtsolver.ts b/smtsolver.ts new file mode 100644 index 00000000..9bd73b36 --- /dev/null +++ b/smtsolver.ts @@ -0,0 +1,73 @@ +import { sync as commandExistsSync } from 'command-exists'; +import { execSync } from 'child_process'; +import * as fs from 'fs'; +import * as tmp from 'tmp'; + +// Timeout in ms. +const timeout = 10000; + +const potentialSolvers = [ + { + name: 'z3', + command: 'z3', + params: '-smt2 rlimit=20000000 rewriter.pull_cheap_ite=true fp.spacer.q3.use_qgen=true fp.spacer.mbqi=false fp.spacer.ground_pobs=false' + }, + { + name: 'Eldarica', + command: 'eld', + params: '-horn -t:' + (timeout / 1000) // Eldarica takes timeout in seconds. + }, + { + name: 'cvc4', + command: 'cvc4', + params: '--lang=smt2 --tlimit=' + timeout + } +]; + +const solvers = potentialSolvers.filter(solver => commandExistsSync(solver.command)); + +function solve (query, solver) { + if (solver === undefined) { + if (solvers.length === 0) { + throw new Error('No SMT solver available. Assertion checking will not be performed.'); + } else { + solver = solvers[0]; + } + } + + const tmpFile = tmp.fileSync({ postfix: '.smt2' }); + fs.writeFileSync(tmpFile.name, query); + let solverOutput; + try { + solverOutput = execSync( + solver.command + ' ' + solver.params + ' ' + tmpFile.name, { + encoding: 'utf8', + maxBuffer: 1024 * 1024 * 1024, + stdio: 'pipe', + timeout: timeout // Enforce timeout on the process, since solvers can sometimes go around it. + } + ).toString(); + } catch (e) { + // execSync throws if the process times out or returns != 0. + // The latter might happen with z3 if the query asks for a model + // for an UNSAT formula. We can still use stdout. + solverOutput = e.stdout.toString(); + if ( + !solverOutput.startsWith('sat') && + !solverOutput.startsWith('unsat') && + !solverOutput.startsWith('unknown') && + !solverOutput.startsWith('(error') && // Eldarica reports errors in an sexpr, for example: '(error "Failed to reconstruct array model")' + !solverOutput.startsWith('error') + ) { + throw new Error('Failed to solve SMT query. ' + e.toString()); + } + } + // Trigger early manual cleanup + tmpFile.removeCallback(); + return solverOutput; +} + +export = { + smtSolver: solve, + availableSolvers: solvers +}; diff --git a/solc.ts b/solc.ts new file mode 100755 index 00000000..9f565883 --- /dev/null +++ b/solc.ts @@ -0,0 +1,258 @@ +#!/usr/bin/env node + +import * as commander from 'commander'; +import * as fs from 'fs'; +import * as os from 'os'; +import * as path from 'path'; +import solc from './index'; +import smtchecker from './smtchecker'; +import smtsolver from './smtsolver'; + +// hold on to any exception handlers that existed prior to this script running, we'll be adding them back at the end +const originalUncaughtExceptionListeners = process.listeners('uncaughtException'); +// FIXME: remove annoying exception catcher of Emscripten +// see https://github.com/chriseth/browser-solidity/issues/167 +process.removeAllListeners('uncaughtException'); + +const program: any = new commander.Command(); +const commanderParseInt = function (value) { + const parsedValue = parseInt(value, 10); + if (isNaN(parsedValue)) { + throw new commander.InvalidArgumentError('Not a valid integer.'); + } + return parsedValue; +}; + +program.name('solcjs'); +program.version(solc.version()); +program + .option('--version', 'Show version and exit.') + .option('--optimize', 'Enable bytecode optimizer.', false) + .option( + '--optimize-runs ', + 'The number of runs specifies roughly how often each opcode of the deployed code will be executed across the lifetime of the contract. ' + + 'Lower values will optimize more for initial deployment cost, higher values will optimize more for high-frequency usage.', + commanderParseInt + ) + .option('--bin', 'Binary of the contracts in hex.') + .option('--abi', 'ABI of the contracts.') + .option('--standard-json', 'Turn on Standard JSON Input / Output mode.') + .option('--base-path ', 'Root of the project source tree. ' + + 'The import callback will attempt to interpret all import paths as relative to this directory.' + ) + .option('--include-path ', 'Extra source directories available to the import callback. ' + + 'When using a package manager to install libraries, use this option to specify directories where packages are installed. ' + + 'Can be used multiple times to provide multiple locations.' + ) + .option('-o, --output-dir ', 'Output directory for the contracts.') + .option('-p, --pretty-json', 'Pretty-print all JSON output.', false) + .option('-v, --verbose', 'More detailed console output.', false); + +program.parse(process.argv); +const options = program.opts(); + +const files = program.args; +const destination = options.outputDir || '.'; + +function abort (msg) { + console.error(msg || 'Error occured'); + process.exit(1); +} + +function readFileCallback (sourcePath) { + const prefixes = [options.basePath ? options.basePath : ''].concat( + options.includePath ? options.includePath : [] + ); + for (const prefix of prefixes) { + const prefixedSourcePath = (prefix ? prefix + '/' : '') + sourcePath; + + if (fs.existsSync(prefixedSourcePath)) { + try { + return { contents: fs.readFileSync(prefixedSourcePath).toString('utf8') }; + } catch (e) { + return { error: 'Error reading ' + prefixedSourcePath + ': ' + e }; + } + } + } + return { error: 'File not found inside the base path or any of the include paths.' }; +} + +function withUnixPathSeparators (filePath) { + // On UNIX-like systems forward slashes in paths are just a part of the file name. + if (os.platform() !== 'win32') { + return filePath; + } + + return filePath.replace(/\\/g, '/'); +} + +function makeSourcePathRelativeIfPossible (sourcePath) { + const absoluteBasePath = (options.basePath ? path.resolve(options.basePath) : path.resolve('.')); + const absoluteIncludePaths = ( + options.includePath + ? options.includePath.map((prefix) => { return path.resolve(prefix); }) + : [] + ); + + // Compared to base path stripping logic in solc this is much simpler because path.resolve() + // handles symlinks correctly (does not resolve them except in work dir) and strips .. segments + // from paths going beyond root (e.g. `/../../a/b/c` -> `/a/b/c/`). It's simpler also because it + // ignores less important corner cases: drive letters are not stripped from absolute paths on + // Windows and UNC paths are not handled in a special way (at least on Linux). Finally, it has + // very little test coverage so there might be more differences that we are just not aware of. + const absoluteSourcePath = path.resolve(sourcePath); + + for (const absolutePrefix of [absoluteBasePath].concat(absoluteIncludePaths)) { + const relativeSourcePath = path.relative(absolutePrefix, absoluteSourcePath); + + if (!relativeSourcePath.startsWith('../')) { return withUnixPathSeparators(relativeSourcePath); } + } + + // File is not located inside base path or include paths so use its absolute path. + return withUnixPathSeparators(absoluteSourcePath); +} + +function toFormattedJson (input) { + return JSON.stringify(input, null, program.prettyJson ? 4 : 0); +} + +function reformatJsonIfRequested (inputJson) { + return (program.prettyJson ? toFormattedJson(JSON.parse(inputJson)) : inputJson); +} + +let callbacks; +if (options.basePath || !options.standardJson) { callbacks = { import: readFileCallback }; } + +if (options.standardJson) { + const input = fs.readFileSync(process.stdin.fd).toString('utf8'); + if (program.verbose) { console.log('>>> Compiling:\n' + reformatJsonIfRequested(input) + '\n'); } + let output = reformatJsonIfRequested(solc.compile(input, callbacks)); + + try { + if (smtsolver.availableSolvers.length === 0) { + console.log('>>> Cannot retry compilation with SMT because there are no SMT solvers available.'); + } else { + const inputJSON = smtchecker.handleSMTQueries(JSON.parse(input), JSON.parse(output), smtsolver.smtSolver, smtsolver.availableSolvers[0]); + if (inputJSON) { + if (program.verbose) { console.log('>>> Retrying compilation with SMT:\n' + toFormattedJson(inputJSON) + '\n'); } + output = reformatJsonIfRequested(solc.compile(JSON.stringify(inputJSON), callbacks)); + } + } + } catch (e) { + const addError = { + component: 'general', + formattedMessage: e.toString(), + message: e.toString(), + type: 'Warning' + }; + + const outputJSON = JSON.parse(output); + if (!outputJSON.errors) { + outputJSON.errors = []; + } + outputJSON.errors.push(addError); + output = toFormattedJson(outputJSON); + } + + if (program.verbose) { console.log('>>> Compilation result:'); } + console.log(output); + process.exit(0); +} else if (files.length === 0) { + console.error('Must provide a file'); + process.exit(1); +} + +if (!(options.bin || options.abi)) { + abort('Invalid option selected, must specify either --bin or --abi'); +} + +if (!options.basePath && options.includePath && options.includePath.length > 0) { + abort('--include-path option requires a non-empty base path.'); +} + +if (options.includePath) { + for (const includePath of options.includePath) { + if (!includePath) { abort('Empty values are not allowed in --include-path.'); } + } +} + +const sources = {}; + +for (let i = 0; i < files.length; i++) { + try { + sources[makeSourcePathRelativeIfPossible(files[i])] = { + content: fs.readFileSync(files[i]).toString() + }; + } catch (e) { + abort('Error reading ' + files[i] + ': ' + e); + } +} + +const cliInput = { + language: 'Solidity', + settings: { + optimizer: { + enabled: options.optimize, + runs: options.optimizeRuns + }, + outputSelection: { + '*': { + '*': ['abi', 'evm.bytecode'] + } + } + }, + sources: sources +}; +if (program.verbose) { console.log('>>> Compiling:\n' + toFormattedJson(cliInput) + '\n'); } +const output = JSON.parse(solc.compile(JSON.stringify(cliInput), callbacks)); + +let hasError = false; + +if (!output) { + abort('No output from compiler'); +} else if (output.errors) { + for (const error in output.errors) { + const message = output.errors[error]; + if (message.severity === 'warning') { + console.log(message.formattedMessage); + } else { + console.error(message.formattedMessage); + hasError = true; + } + } +} + +fs.mkdirSync(destination, { recursive: true }); + +function writeFile (file, content) { + file = path.join(destination, file); + fs.writeFile(file, content, function (err) { + if (err) { + console.error('Failed to write ' + file + ': ' + err); + } + }); +} + +for (const fileName in output.contracts) { + for (const contractName in output.contracts[fileName]) { + let contractFileName = fileName + ':' + contractName; + contractFileName = contractFileName.replace(/[:./\\]/g, '_'); + + if (options.bin) { + writeFile(contractFileName + '.bin', output.contracts[fileName][contractName].evm.bytecode.object); + } + + if (options.abi) { + writeFile(contractFileName + '.abi', toFormattedJson(output.contracts[fileName][contractName].abi)); + } + } +} + +// Put back original exception handlers. +originalUncaughtExceptionListeners.forEach(function (listener) { + process.addListener('uncaughtException', listener); +}); + +if (hasError) { + process.exit(1); +} diff --git a/solcjs b/solcjs deleted file mode 100755 index a09ac370..00000000 --- a/solcjs +++ /dev/null @@ -1,147 +0,0 @@ -#!/usr/bin/env node - -// hold on to any exception handlers that existed prior to this script running, we'll be adding them back at the end -var originalUncaughtExceptionListeners = process.listeners("uncaughtException"); - -var fs = require('fs-extra'); -var path = require('path'); -var solc = require('./index.js'); -var smtchecker = require('./smtchecker.js'); -var smtsolver = require('./smtsolver.js'); -// FIXME: remove annoying exception catcher of Emscripten -// see https://github.com/chriseth/browser-solidity/issues/167 -process.removeAllListeners('uncaughtException'); -var commander = require('commander'); - -var program = new commander.Command(); -program.name('solcjs'); -program.version(solc.version()); -program - .option('--version', 'Show version and exit.') - .option('--optimize', 'Enable bytecode optimizer.') - .option('--bin', 'Binary of the contracts in hex.') - .option('--abi', 'ABI of the contracts.') - .option('--standard-json', 'Turn on Standard JSON Input / Output mode.') - .option('-o, --output-dir ', 'Output directory for the contracts.'); -program.parse(process.argv); - -var files = program.args; -var destination = program.outputDir || '.' - -function abort (msg) { - console.error(msg || 'Error occured'); - process.exit(1); -} - -if (program.standardJson) { - var input = fs.readFileSync(process.stdin.fd).toString('utf8'); - var output = solc.compile(input); - - try { - var inputJSON = smtchecker.handleSMTQueries(JSON.parse(input), JSON.parse(output), smtsolver.smtSolver); - if (inputJSON) { - output = solc.compile(JSON.stringify(inputJSON)); - } - } - catch (e) { - var addError = { - component: "general", - formattedMessage: e.toString(), - message: e.toString(), - type: "Warning" - }; - - var outputJSON = JSON.parse(output); - if (!outputJSON.errors) { - outputJSON.errors = [] - } - outputJSON.errors.push(addError); - output = JSON.stringify(outputJSON); - } - - console.log(output); - process.exit(0); -} else if (files.length === 0) { - console.error('Must provide a file'); - process.exit(1); -} - -if (!(program.bin || program.abi)) { - abort('Invalid option selected, must specify either --bin or --abi'); -} - -var sources = {}; - -for (var i = 0; i < files.length; i++) { - try { - sources[ files[i] ] = { content: fs.readFileSync(files[i]).toString() }; - } catch (e) { - abort('Error reading ' + files[i] + ': ' + e); - } -} - -var output = JSON.parse(solc.compile(JSON.stringify({ - language: 'Solidity', - settings: { - optimizer: { - enabled: program.optimize - }, - outputSelection: { - '*': { - '*': [ 'abi', 'evm.bytecode' ] - } - } - }, - sources: sources -}))); - -let hasError = false; - -if (!output) { - abort('No output from compiler'); -} else if (output['errors']) { - for (var error in output['errors']) { - var message = output['errors'][error] - if (message.severity === 'warning') { - console.log(message.formattedMessage) - } else { - console.error(message.formattedMessage) - hasError = true - } - } -} - -fs.ensureDirSync (destination); - -function writeFile (file, content) { - file = path.join(destination, file); - fs.writeFile(file, content, function (err) { - if (err) { - console.error('Failed to write ' + file + ': ' + err); - } - }); -} - -for (var fileName in output.contracts) { - for (var contractName in output.contracts[fileName]) { - var contractFileName = fileName + ':' + contractName; - contractFileName = contractFileName.replace(/[:./\\]/g, '_'); - - if (program.bin) { - writeFile(contractFileName + '.bin', output.contracts[fileName][contractName].evm.bytecode.object); - } - - if (program.abi) { - writeFile(contractFileName + '.abi', JSON.stringify(output.contracts[fileName][contractName].abi)); - } - } -} - -// Put back original exception handlers. -originalUncaughtExceptionListeners.forEach(function (listener) { - process.addListener('uncaughtException', listener); -}); - -if (hasError) { - process.exit(1); -} \ No newline at end of file diff --git a/test/abi.js b/test/abi.js deleted file mode 100644 index 90c05c95..00000000 --- a/test/abi.js +++ /dev/null @@ -1,58 +0,0 @@ -const tape = require('tape'); -const abi = require('../abi.js'); - -tape('ABI translator', function (t) { - t.test('Empty ABI', function (st) { - st.deepEqual(abi.update('0.4.0', []), []); - st.end(); - }); - t.test('0.1.1 (no constructor)', function (st) { - st.deepEqual(abi.update('0.1.1', []), [ { inputs: [], payable: true, stateMutability: 'payable', type: 'constructor' }, { payable: true, stateMutability: 'payable', type: 'fallback' } ]); - st.end(); - }); - t.test('0.3.6 (constructor)', function (st) { - var input = [ { inputs: [], type: 'constructor' } ]; - st.deepEqual(abi.update('0.3.6', input), [ { inputs: [], payable: true, stateMutability: 'payable', type: 'constructor' }, { payable: true, stateMutability: 'payable', type: 'fallback' } ]); - st.end(); - }); - t.test('0.3.6 (function)', function (st) { - var input = [ { inputs: [], type: 'function' } ]; - st.deepEqual(abi.update('0.3.6', input), [ { inputs: [], payable: true, stateMutability: 'payable', type: 'function' }, { payable: true, stateMutability: 'payable', type: 'fallback' } ]); - st.end(); - }); - t.test('0.3.6 (event)', function (st) { - var input = [ { inputs: [], type: 'event' } ]; - st.deepEqual(abi.update('0.3.6', input), [ { inputs: [], type: 'event' }, { payable: true, stateMutability: 'payable', type: 'fallback' } ]); - st.end(); - }); - t.test('0.3.6 (has no fallback)', function (st) { - var input = [ { inputs: [], type: 'constructor' } ]; - st.deepEqual(abi.update('0.3.6', input), [ { inputs: [], type: 'constructor', payable: true, stateMutability: 'payable' }, { type: 'fallback', payable: true, stateMutability: 'payable' } ]); - st.end(); - }); - t.test('0.4.0 (has fallback)', function (st) { - var input = [ { inputs: [], type: 'constructor' }, { type: 'fallback' } ]; - st.deepEqual(abi.update('0.4.0', input), [ { inputs: [], type: 'constructor', payable: true, stateMutability: 'payable' }, { type: 'fallback', stateMutability: 'nonpayable' } ]); - st.end(); - }); - t.test('0.4.0 (constant function)', function (st) { - var input = [ { inputs: [], type: 'function', constant: true } ]; - st.deepEqual(abi.update('0.4.0', input), [ { inputs: [], constant: true, stateMutability: 'view', type: 'function' } ]); - st.end(); - }); - t.test('0.4.1 (constructor not payable)', function (st) { - var input = [ { inputs: [], payable: false, type: 'constructor' } ]; - st.deepEqual(abi.update('0.4.1', input), [ { inputs: [], payable: true, stateMutability: 'payable', type: 'constructor' } ]); - st.end(); - }); - t.test('0.4.5 (constructor payable)', function (st) { - var input = [ { inputs: [], payable: false, type: 'constructor' } ]; - st.deepEqual(abi.update('0.4.5', input), [ { inputs: [], payable: false, stateMutability: 'nonpayable', type: 'constructor' } ]); - st.end(); - }); - t.test('0.4.16 (statemutability)', function (st) { - var input = [ { inputs: [], payable: false, stateMutability: 'pure', type: 'function' } ]; - st.deepEqual(abi.update('0.4.16', input), [ { inputs: [], payable: false, stateMutability: 'pure', type: 'function' } ]); - st.end(); - }); -}); diff --git a/test/abi.ts b/test/abi.ts new file mode 100644 index 00000000..6328c904 --- /dev/null +++ b/test/abi.ts @@ -0,0 +1,73 @@ +import tape from 'tape'; +import abi from '../abi'; + +tape('ABI translator', function (t) { + t.test('Empty ABI', function (st) { + st.deepEqual(abi.update('0.4.0', []), []); + st.end(); + }); + t.test('0.1.1 (no constructor)', function (st) { + st.deepEqual(abi.update('0.1.1', []), [{ inputs: [], payable: true, stateMutability: 'payable', type: 'constructor' }, { payable: true, stateMutability: 'payable', type: 'fallback' }]); + st.end(); + }); + t.test('0.3.6 (constructor)', function (st) { + const input = [{ inputs: [], type: 'constructor' }]; + st.deepEqual(abi.update('0.3.6', input), [{ inputs: [], payable: true, stateMutability: 'payable', type: 'constructor' }, { payable: true, stateMutability: 'payable', type: 'fallback' }]); + st.end(); + }); + t.test('0.3.6 (non-constant function)', function (st) { + const input = [{ inputs: [], type: 'function' }]; + st.deepEqual(abi.update('0.3.6', input), [{ inputs: [], payable: true, stateMutability: 'payable', type: 'function' }, { payable: true, stateMutability: 'payable', type: 'fallback' }]); + st.end(); + }); + t.test('0.3.6 (constant function)', function (st) { + const input = [{ inputs: [], type: 'function', constant: true }]; + st.deepEqual(abi.update('0.3.6', input), [{ inputs: [], constant: true, stateMutability: 'view', type: 'function' }, { payable: true, stateMutability: 'payable', type: 'fallback' }]); + st.end(); + }); + t.test('0.3.6 (event)', function (st) { + const input = [{ inputs: [], type: 'event' }]; + st.deepEqual(abi.update('0.3.6', input), [{ inputs: [], type: 'event' }, { payable: true, stateMutability: 'payable', type: 'fallback' }]); + st.end(); + }); + t.test('0.3.6 (has no fallback)', function (st) { + const input = [{ inputs: [], type: 'constructor' }]; + st.deepEqual(abi.update('0.3.6', input), [{ inputs: [], type: 'constructor', payable: true, stateMutability: 'payable' }, { type: 'fallback', payable: true, stateMutability: 'payable' }]); + st.end(); + }); + t.test('0.4.0 (has fallback)', function (st) { + const input = [{ inputs: [], type: 'constructor' }, { type: 'fallback' }]; + st.deepEqual(abi.update('0.4.0', input), [{ inputs: [], type: 'constructor', payable: true, stateMutability: 'payable' }, { type: 'fallback', stateMutability: 'nonpayable' }]); + st.end(); + }); + t.test('0.4.0 (non-constant function)', function (st) { + const input = [{ inputs: [], type: 'function' }]; + st.deepEqual(abi.update('0.4.0', input), [{ inputs: [], stateMutability: 'nonpayable', type: 'function' }]); + st.end(); + }); + t.test('0.4.0 (constant function)', function (st) { + const input = [{ inputs: [], type: 'function', constant: true }]; + st.deepEqual(abi.update('0.4.0', input), [{ inputs: [], constant: true, stateMutability: 'view', type: 'function' }]); + st.end(); + }); + t.test('0.4.0 (payable function)', function (st) { + const input = [{ inputs: [], payable: true, type: 'function' }]; + st.deepEqual(abi.update('0.4.0', input), [{ inputs: [], payable: true, stateMutability: 'payable', type: 'function' }]); + st.end(); + }); + t.test('0.4.1 (constructor not payable)', function (st) { + const input = [{ inputs: [], payable: false, type: 'constructor' }]; + st.deepEqual(abi.update('0.4.1', input), [{ inputs: [], payable: true, stateMutability: 'payable', type: 'constructor' }]); + st.end(); + }); + t.test('0.4.5 (constructor payable)', function (st) { + const input = [{ inputs: [], payable: false, type: 'constructor' }]; + st.deepEqual(abi.update('0.4.5', input), [{ inputs: [], payable: false, stateMutability: 'nonpayable', type: 'constructor' }]); + st.end(); + }); + t.test('0.4.16 (statemutability)', function (st) { + const input = [{ inputs: [], payable: false, stateMutability: 'pure', type: 'function' }]; + st.deepEqual(abi.update('0.4.16', input), [{ inputs: [], payable: false, stateMutability: 'pure', type: 'function' }]); + st.end(); + }); +}); diff --git a/test/cli.js b/test/cli.js deleted file mode 100644 index fb0fbd67..00000000 --- a/test/cli.js +++ /dev/null @@ -1,93 +0,0 @@ -const tape = require('tape'); -const spawn = require('tape-spawn'); -const pkg = require('../package.json'); - -tape('CLI', function (t) { - t.test('--version', function (st) { - var spt = spawn(st, './solcjs --version'); - spt.stdout.match(RegExp(pkg.version + '(-[^a-zA-A0-9.+]+)?(\\+[^a-zA-Z0-9.-]+)?')); - spt.stderr.empty(); - spt.end(); - }); - - t.test('no parameters', function (st) { - var spt = spawn(st, './solcjs'); - spt.stderr.match(/^Must provide a file/); - spt.end(); - }); - - t.test('no mode specified', function (st) { - var spt = spawn(st, './solcjs test/resources/fixtureSmoke.sol'); - spt.stderr.match(/^Invalid option selected/); - spt.end(); - }); - - t.test('--bin', function (st) { - var spt = spawn(st, './solcjs --bin test/resources/fixtureSmoke.sol'); - spt.stderr.empty(); - spt.succeeds(); - spt.end(); - }); - - t.test('--bin --optimize', function (st) { - var spt = spawn(st, './solcjs --bin --optimize test/resources/fixtureSmoke.sol'); - spt.stderr.empty(); - spt.succeeds(); - spt.end(); - }); - - t.test('invalid file specified', function (st) { - var spt = spawn(st, './solcjs --bin test/fileNotFound.sol'); - spt.stderr.match(/^Error reading /); - spt.end(); - }); - - t.test('incorrect source source', function (st) { - var spt = spawn(st, './solcjs --bin test/resources/fixtureIncorrectSource.sol'); - spt.stderr.match(/^test\/resources\/fixtureIncorrectSource.sol:1:1: SyntaxError: Invalid pragma "contract"/); - spt.end(); - }); - - t.test('--abi', function (st) { - var spt = spawn(st, './solcjs --abi test/resources/fixtureSmoke.sol'); - spt.stderr.empty(); - spt.succeeds(); - spt.end(); - }); - - t.test('--bin --abi', function (st) { - var spt = spawn(st, './solcjs --bin --abi test/resources/fixtureSmoke.sol'); - spt.stderr.empty(); - spt.succeeds(); - spt.end(); - }); - - t.test('standard json', function (st) { - var input = { - 'language': 'Solidity', - 'settings': { - 'outputSelection': { - '*': { - '*': [ 'evm.bytecode', 'userdoc' ] - } - } - }, - 'sources': { - 'Contract.sol': { - 'content': 'pragma solidity >=0.5.0; contract Contract { function f() pure public {} }' - } - } - }; - var spt = spawn(st, './solcjs --standard-json'); - spt.stdin.setEncoding('utf-8'); - spt.stdin.write(JSON.stringify(input)); - spt.stdin.end(); - spt.stdin.on('finish', function () { - spt.stderr.empty(); - spt.stdout.match(/Contract.sol/); - spt.stdout.match(/userdoc/); - spt.succeeds(); - spt.end(); - }); - }); -}); diff --git a/test/cli.ts b/test/cli.ts new file mode 100644 index 00000000..ab86e3f5 --- /dev/null +++ b/test/cli.ts @@ -0,0 +1,263 @@ +import tape from 'tape'; +import spawn from 'tape-spawn'; +import * as path from 'path'; +import solc from '../'; + +tape('CLI', function (t) { + t.test('--version', function (st) { + const spt = spawn(st, './solc.js --version'); + spt.stdout.match(solc.version() + '\n'); + spt.stdout.match(/^\s*[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9.]+)?\+commit\.[0-9a-f]+([a-zA-Z0-9.-]+)?\s*$/); + spt.stderr.empty(); + spt.end(); + }); + + t.test('no parameters', function (st) { + const spt = spawn(st, './solc.js'); + spt.stderr.match(/^Must provide a file/); + spt.end(); + }); + + t.test('no mode specified', function (st) { + const spt = spawn(st, './solc.js test/resources/fixtureSmoke.sol'); + spt.stderr.match(/^Invalid option selected/); + spt.end(); + }); + + t.test('--bin', function (st) { + const spt = spawn(st, './solc.js --bin test/resources/fixtureSmoke.sol'); + spt.stderr.empty(); + spt.succeeds(); + spt.end(); + }); + + t.test('--bin --optimize', function (st) { + const spt = spawn(st, './solc.js --bin --optimize test/resources/fixtureSmoke.sol'); + spt.stderr.empty(); + spt.succeeds(); + spt.end(); + }); + + t.test('--bin --optimize-runs 666', function (st) { + const spt = spawn(st, './solc.js --bin --optimize-runs 666 test/resources/fixtureSmoke.sol'); + spt.stderr.empty(); + spt.succeeds(); + spt.end(); + }); + + t.test('--bin --optimize-runs not-a-number', function (st) { + const spt = spawn(st, './solc.js --bin --optimize-runs not-a-number test/resources/fixtureSmoke.sol'); + spt.stderr.match(/^error: option '--optimize-runs ' argument 'not-a-number' is invalid/); + spt.end(); + }); + + t.test('invalid file specified', function (st) { + const spt = spawn(st, './solc.js --bin test/fileNotFound.sol'); + spt.stderr.match(/^Error reading /); + spt.end(); + }); + + t.test('incorrect source source', function (st) { + const spt = spawn(st, './solc.js --bin test/resources/fixtureIncorrectSource.sol'); + spt.stderr.match(/SyntaxError: Invalid pragma "contract"/); + spt.end(); + }); + + t.test('--abi', function (st) { + const spt = spawn(st, './solc.js --abi test/resources/fixtureSmoke.sol'); + spt.stderr.empty(); + spt.succeeds(); + spt.end(); + }); + + t.test('--bin --abi', function (st) { + const spt = spawn(st, './solc.js --bin --abi test/resources/fixtureSmoke.sol'); + spt.stderr.empty(); + spt.succeeds(); + spt.end(); + }); + + t.test('no base path', function (st) { + const spt = spawn( + st, + './solc.js --bin ' + + 'test/resources/importA.sol ' + + './test/resources//importA.sol ' + + path.resolve('test/resources/importA.sol') + ); + spt.stderr.empty(); + spt.succeeds(); + spt.end(); + }); + + t.test('relative base path', function (st) { + // NOTE: This and other base path tests rely on the relative ./importB.sol import in importA.sol. + // If base path is not stripped correctly from all source paths below, they will not be found + // by the import callback when it appends the base path back. + const spt = spawn( + st, + './solc.js --bin --base-path test/resources ' + + 'test/resources/importA.sol ' + + './test/resources//importA.sol ' + + path.resolve('test/resources/importA.sol') + ); + spt.stderr.empty(); + spt.succeeds(); + spt.end(); + }); + + t.test('relative non canonical base path', function (st) { + const spt = spawn( + st, + './solc.js --bin --base-path ./test/resources ' + + 'test/resources/importA.sol ' + + './test/resources//importA.sol ' + + path.resolve('test/resources/importA.sol') + ); + spt.stderr.empty(); + spt.succeeds(); + spt.end(); + }); + + t.test('absolute base path', function (st) { + const spt = spawn( + st, + './solc.js --bin --base-path ' + path.resolve('test/resources') + ' ' + + 'test/resources/importA.sol ' + + './test/resources//importA.sol ' + + path.resolve('test/resources/importA.sol') + ); + spt.stderr.empty(); + spt.succeeds(); + spt.end(); + }); + + t.test('include paths', function (st) { + const spt = spawn( + st, + './solc.js --bin ' + + 'test/resources/importCallback/base/contractB.sol ' + + 'test/resources/importCallback/includeA/libY.sol ' + + './test/resources/importCallback/includeA//libY.sol ' + + path.resolve('test/resources/importCallback/includeA/libY.sol') + ' ' + + '--base-path test/resources/importCallback/base ' + + '--include-path test/resources/importCallback/includeA ' + + '--include-path ' + path.resolve('test/resources/importCallback/includeB/') + ); + spt.stderr.empty(); + spt.succeeds(); + spt.end(); + }); + + t.test('include paths without base path', function (st) { + const spt = spawn( + st, + './solc.js --bin ' + + 'test/resources/importCallback/contractC.sol ' + + '--include-path test/resources/importCallback/includeA' + ); + spt.stderr.match(/--include-path option requires a non-empty base path\./); + spt.fails(); + spt.end(); + }); + + t.test('empty include paths', function (st) { + const spt = spawn( + st, + './solc.js --bin ' + + 'test/resources/importCallback/contractC.sol ' + + '--base-path test/resources/importCallback/base ' + + '--include-path=' + ); + spt.stderr.match(/Empty values are not allowed in --include-path\./); + spt.fails(); + spt.end(); + }); + + t.test('standard json', function (st) { + const input = { + language: 'Solidity', + settings: { + outputSelection: { + '*': { + '*': ['evm.bytecode', 'userdoc'] + } + } + }, + sources: { + 'Contract.sol': { + content: 'pragma solidity >=0.5.0; contract Contract { function f() pure public {} }' + } + } + }; + const spt = spawn(st, './solc.js --standard-json'); + spt.stdin.setEncoding('utf-8'); + spt.stdin.write(JSON.stringify(input)); + spt.stdin.end(); + spt.stdin.on('finish', function () { + spt.stderr.empty(); + spt.stdout.match(/Contract.sol/); + spt.stdout.match(/userdoc/); + spt.succeeds(); + spt.end(); + }); + }); + + t.test('standard json base path', function (st) { + const input = { + language: 'Solidity', + settings: { + outputSelection: { + '*': { + '*': ['metadata'] + } + } + }, + sources: { + 'importA.sol': { + content: 'import "./importB.sol";' + } + } + }; + const spt = spawn(st, './solc.js --standard-json --base-path test/resources'); + spt.stdin.setEncoding('utf-8'); + spt.stdin.write(JSON.stringify(input)); + spt.stdin.end(); + spt.stdin.on('finish', function () { + spt.stderr.empty(); + spt.stdout.match(/{"contracts":{"importB.sol":{"D":{"metadata":/); + spt.succeeds(); + spt.end(); + }); + }); + + t.test('standard json include paths', function (st) { + const input = { + language: 'Solidity', + sources: { + 'contractB.sol': { + content: + '// SPDX-License-Identifier: GPL-3.0\n' + + 'pragma solidity >=0.0;\n' + + 'import "./contractA.sol";\n' + } + } + }; + const spt = spawn( + st, + './solc.js --standard-json ' + + '--base-path test/resources/importCallback/base ' + + '--include-path test/resources/importCallback/includeA ' + + '--include-path ' + path.resolve('test/resources/importCallback/includeB/') + ); + spt.stdin.setEncoding('utf-8'); + spt.stdin.write(JSON.stringify(input)); + spt.stdin.end(); + spt.stdin.on('finish', function () { + spt.stderr.empty(); + spt.stdout.match(/"sources":{"contractA.sol":{"id":0},"contractB.sol":{"id":1},"libX.sol":{"id":2},"libY.sol":{"id":3},"libZ.sol":{"id":4},"utils.sol":{"id":5}}}/); + spt.succeeds(); + spt.end(); + }); + }); +}); diff --git a/test/compiler.js b/test/compiler.js deleted file mode 100644 index 5acb2c3e..00000000 --- a/test/compiler.js +++ /dev/null @@ -1,656 +0,0 @@ -const tape = require('tape'); -const semver = require('semver'); -const solc = require('../index.js'); -const linker = require('../linker.js'); -const execSync = require('child_process').execSync; - -function runTests (solc, versionText) { - console.log(`Running tests with ${versionText} ${solc.version()}`); - - function getBytecode (output, fileName, contractName) { - try { - var outputContract; - if (semver.lt(solc.semver(), '0.4.9')) { - outputContract = output.contracts[contractName]; - } else { - outputContract = output.contracts[fileName + ':' + contractName]; - } - return outputContract['bytecode']; - } catch (e) { - return ''; - } - } - - function getBytecodeStandard (output, fileName, contractName) { - try { - var outputFile; - if (semver.lt(solc.semver(), '0.4.9')) { - outputFile = output.contracts['']; - } else { - outputFile = output.contracts[fileName]; - } - return outputFile[contractName]['evm']['bytecode']['object']; - } catch (e) { - return ''; - } - } - - function expectError (output, errorType, message) { - if (output.errors) { - for (var error in output.errors) { - error = output.errors[error]; - if (error.type === errorType) { - if (message) { - return error.message.match(message) !== null; - } - return true; - } - } - } - return false; - } - - function expectNoError (output) { - if (output.errors) { - for (var error in output.errors) { - error = output.errors[error]; - if (error.severity === 'error') { - return false; - } - } - } - return true; - } - - tape(versionText, function (t) { - var tape = t.test; - - tape('Version and license', function (t) { - t.test('check version', function (st) { - st.equal(typeof solc.version(), 'string'); - st.end(); - }); - t.test('check semver', function (st) { - st.equal(typeof solc.semver(), 'string'); - st.end(); - }); - t.test('check license', function (st) { - st.ok(typeof solc.license() === 'undefined' || typeof solc.license() === 'string'); - st.end(); - }); - }); - - tape('Compilation', function (t) { - t.test('single files can be compiled (using lowlevel API)', function (st) { - if (typeof solc.lowlevel.compileSingle !== 'function') { - st.skip('Low-level compileSingle interface not implemented by this compiler version.'); - st.end(); - return; - } - - var output = JSON.parse(solc.lowlevel.compileSingle('contract x { function g() public {} }')); - st.ok('contracts' in output); - var bytecode = getBytecode(output, '', 'x'); - st.ok(typeof bytecode === 'string'); - st.ok(bytecode.length > 0); - st.end(); - }); - - t.test('invalid source code fails properly (using lowlevel API)', function (st) { - if (typeof solc.lowlevel.compileSingle !== 'function') { - st.skip('Low-level compileSingle interface not implemented by this compiler version.'); - st.end(); - return; - } - - var output = JSON.parse(solc.lowlevel.compileSingle('contract x { this is an invalid contract }')); - if (semver.lt(solc.semver(), '0.1.4')) { - st.ok(output.error.indexOf('Parser error: Expected identifier') !== -1); - st.end(); - return; - } - st.plan(3); - st.ok('errors' in output); - // Check if the ParserError exists, but allow others too - st.ok(output.errors.length >= 1); - for (var error in output.errors) { - // Error should be something like: - // ParserError - // Error: Expected identifier - // Parser error: Expected identifier - if ( - output.errors[error].indexOf('ParserError') !== -1 || - output.errors[error].indexOf('Error: Expected identifier') !== -1 || - output.errors[error].indexOf('Parser error: Expected identifier') !== -1 - ) { - st.ok(true); - } - } - st.end(); - }); - - t.test('multiple files can be compiled (using lowlevel API)', function (st) { - // <0.1.6 doesn't have this - if (typeof solc.lowlevel.compileMulti !== 'function') { - st.skip('Low-level compileMulti interface not implemented by this compiler version.'); - st.end(); - return; - } - - var input = { - 'lib.sol': 'library L { function f() public returns (uint) { return 7; } }', - 'cont.sol': 'import "lib.sol"; contract x { function g() public { L.f(); } }' - }; - var output = JSON.parse(solc.lowlevel.compileMulti(JSON.stringify({sources: input}))); - var x = getBytecode(output, 'cont.sol', 'x'); - st.ok(typeof x === 'string'); - st.ok(x.length > 0); - var L = getBytecode(output, 'lib.sol', 'L'); - st.ok(typeof L === 'string'); - st.ok(L.length > 0); - st.end(); - }); - - t.test('lazy-loading callback works (using lowlevel API)', function (st) { - // <0.2.1 doesn't have this - if (typeof solc.lowlevel.compileCallback !== 'function') { - st.skip('Low-level compileCallback interface not implemented by this compiler version.'); - st.end(); - return; - } - - var input = { - 'cont.sol': 'import "lib.sol"; contract x { function g() public { L.f(); } }' - }; - function findImports (path) { - if (path === 'lib.sol') { - return { contents: 'library L { function f() public returns (uint) { return 7; } }' }; - } else { - return { error: 'File not found' }; - } - } - var output = JSON.parse(solc.lowlevel.compileCallback(JSON.stringify({sources: input}), 0, findImports)); - var x = getBytecode(output, 'cont.sol', 'x'); - var L = getBytecode(output, 'lib.sol', 'L'); - st.ok(typeof x === 'string'); - st.ok(x.length > 0); - st.ok(typeof L === 'string'); - st.ok(L.length > 0); - st.end(); - }); - - t.test('lazy-loading callback works (with file not found) (using lowlevel API)', function (st) { - // <0.2.1 doesn't have this - if (typeof solc.lowlevel.compileCallback !== 'function') { - st.skip('Low-level compileCallback interface not implemented by this compiler version.'); - st.end(); - return; - } - - var input = { - 'cont.sol': 'import "lib.sol"; contract x { function g() public { L.f(); } }' - }; - function findImports (path) { - return { error: 'File not found' }; - } - var output = JSON.parse(solc.lowlevel.compileCallback(JSON.stringify({sources: input}), 0, findImports)); - st.plan(3); - st.ok('errors' in output); - // Check if the ParserError exists, but allow others too - st.ok(output.errors.length >= 1); - for (var error in output.errors) { - // Error should be something like: - // cont.sol:1:1: ParserError: Source "lib.sol" not found: File not found - // cont.sol:1:1: Error: Source "lib.sol" not found: File not found - if (output.errors[error].indexOf('Error') !== -1 && output.errors[error].indexOf('File not found') !== -1) { - st.ok(true); - } - } - st.end(); - }); - - t.test('lazy-loading callback works (with exception) (using lowlevel API)', function (st) { - // <0.2.1 doesn't have this - if (typeof solc.lowlevel.compileCallback !== 'function') { - st.skip('Low-level compileCallback interface not implemented by this compiler version.'); - st.end(); - return; - } - - var input = { - 'cont.sol': 'import "lib.sol"; contract x { function g() public { L.f(); } }' - }; - function findImports (path) { - throw new Error('Could not implement this interface properly...'); - } - st.throws(function () { - solc.lowlevel.compileCallback(JSON.stringify({sources: input}), 0, findImports); - }, /^Error: Could not implement this interface properly.../); - st.end(); - }); - - t.test('lazy-loading callback fails properly (with invalid callback) (using lowlevel API)', function (st) { - // <0.2.1 doesn't have this - if (typeof solc.lowlevel.compileCallback !== 'function') { - st.skip('Low-level compileCallback interface not implemented by this compiler version.'); - st.end(); - return; - } - - var input = { - 'cont.sol': 'import "lib.sol"; contract x { function g() public { L.f(); } }' - }; - st.throws(function () { - solc.lowlevel.compileCallback(JSON.stringify({sources: input}), 0, "this isn't a callback"); - }, /Invalid callback specified./); - st.end(); - }); - - t.test('file import without lazy-loading callback fails properly (using lowlevel API)', function (st) { - // <0.2.1 doesn't have this - if (typeof solc.lowlevel.compileCallback !== 'function') { - st.skip('Low-level compileCallback interface not implemented by this compiler version.'); - st.end(); - return; - } - - var input = { - 'cont.sol': 'import "lib.sol"; contract x { function g() public { L.f(); } }' - }; - var output = JSON.parse(solc.lowlevel.compileCallback(JSON.stringify({sources: input}))); - st.plan(3); - st.ok('errors' in output); - // Check if the ParserError exists, but allow others too - st.ok(output.errors.length >= 1); - for (var error in output.errors) { - // Error should be something like: - // cont.sol:1:1: ParserError: Source "lib.sol" not found: File import callback not supported - // cont.sol:1:1: Error: Source "lib.sol" not found: File import callback not supported - if (output.errors[error].indexOf('Error') !== -1 && output.errors[error].indexOf('File import callback not supported') !== -1) { - st.ok(true); - } - } - st.end(); - }); - - t.test('compiling standard JSON (using lowlevel API)', function (st) { - if (typeof solc.lowlevel.compileStandard !== 'function') { - st.skip('Low-level compileStandard interface not implemented by this compiler version.'); - st.end(); - return; - } - - var input = { - 'language': 'Solidity', - 'settings': { - 'outputSelection': { - '*': { - '*': [ 'evm.bytecode' ] - } - } - }, - 'sources': { - 'lib.sol': { - 'content': 'library L { function f() public returns (uint) { return 7; } }' - }, - 'cont.sol': { - 'content': 'import "lib.sol"; contract x { function g() public { L.f(); } }' - } - } - }; - - function bytecodeExists (output, fileName, contractName) { - try { - return output.contracts[fileName][contractName]['evm']['bytecode']['object'].length > 0; - } catch (e) { - return false; - } - } - - var output = JSON.parse(solc.lowlevel.compileStandard(JSON.stringify(input))); - st.ok(bytecodeExists(output, 'cont.sol', 'x')); - st.ok(bytecodeExists(output, 'lib.sol', 'L')); - st.end(); - }); - - t.test('invalid source code fails properly with standard JSON (using lowlevel API)', function (st) { - if (typeof solc.lowlevel.compileStandard !== 'function') { - st.skip('Low-level compileStandard interface not implemented by this compiler version.'); - st.end(); - return; - } - - var input = { - 'language': 'Solidity', - 'settings': { - 'outputSelection': { - '*': { - '*': [ 'evm.bytecode' ] - } - } - }, - 'sources': { - 'x.sol': { - 'content': 'contract x { this is an invalid contract }' - } - } - }; - var output = JSON.parse(solc.lowlevel.compileStandard(JSON.stringify(input))); - st.plan(3); - st.ok('errors' in output); - st.ok(output.errors.length >= 1); - // Check if the ParserError exists, but allow others too - for (var error in output.errors) { - if (output.errors[error].type === 'ParserError') { - st.ok(true); - } - } - st.end(); - }); - - t.test('compiling standard JSON (with callback) (using lowlevel API)', function (st) { - if (typeof solc.lowlevel.compileStandard !== 'function') { - st.skip('Low-level compileStandard interface not implemented by this compiler version.'); - st.end(); - return; - } - - var input = { - 'language': 'Solidity', - 'settings': { - 'outputSelection': { - '*': { - '*': [ 'evm.bytecode' ] - } - } - }, - 'sources': { - 'cont.sol': { - 'content': 'import "lib.sol"; contract x { function g() public { L.f(); } }' - } - } - }; - - function findImports (path) { - if (path === 'lib.sol') { - return { contents: 'library L { function f() public returns (uint) { return 7; } }' }; - } else { - return { error: 'File not found' }; - } - } - - function bytecodeExists (output, fileName, contractName) { - try { - return output.contracts[fileName][contractName]['evm']['bytecode']['object'].length > 0; - } catch (e) { - return false; - } - } - - var output = JSON.parse(solc.lowlevel.compileStandard(JSON.stringify(input), findImports)); - st.ok(bytecodeExists(output, 'cont.sol', 'x')); - st.ok(bytecodeExists(output, 'lib.sol', 'L')); - st.end(); - }); - - t.test('compiling standard JSON', function (st) { - // <0.1.6 doesn't have this - if (!solc.features.multipleInputs) { - st.skip('Not supported by solc'); - st.end(); - return; - } - - var input = { - 'language': 'Solidity', - 'settings': { - 'outputSelection': { - '*': { - '*': [ 'evm.bytecode' ] - } - } - }, - 'sources': { - 'lib.sol': { - 'content': 'library L { function f() public returns (uint) { return 7; } }' - }, - 'cont.sol': { - 'content': 'import "lib.sol"; contract x { function g() public { L.f(); } }' - } - } - }; - - var output = JSON.parse(solc.compile(JSON.stringify(input))); - var x = getBytecodeStandard(output, 'cont.sol', 'x'); - st.ok(typeof x === 'string'); - st.ok(x.length > 0); - var L = getBytecodeStandard(output, 'lib.sol', 'L'); - st.ok(typeof L === 'string'); - st.ok(L.length > 0); - st.end(); - }); - - t.test('compiling standard JSON (with imports)', function (st) { - // <0.2.1 doesn't have this - if (!solc.features.importCallback) { - st.skip('Not supported by solc'); - st.end(); - return; - } - - var input = { - 'language': 'Solidity', - 'settings': { - 'outputSelection': { - '*': { - '*': [ 'evm.bytecode' ] - } - } - }, - 'sources': { - 'cont.sol': { - 'content': 'import "lib.sol"; contract x { function g() public { L.f(); } }' - } - } - }; - - function findImports (path) { - if (path === 'lib.sol') { - return { contents: 'library L { function f() public returns (uint) { return 7; } }' }; - } else { - return { error: 'File not found' }; - } - } - - var output = JSON.parse(solc.compile(JSON.stringify(input), findImports)); - var x = getBytecodeStandard(output, 'cont.sol', 'x'); - st.ok(typeof x === 'string'); - st.ok(x.length > 0); - var L = getBytecodeStandard(output, 'lib.sol', 'L'); - st.ok(typeof L === 'string'); - st.ok(L.length > 0); - - var outputNewApi = JSON.parse(solc.compile(JSON.stringify(input), { import: findImports })); - st.deepEqual(output, outputNewApi); - st.end(); - }); - - t.test('compiling standard JSON (using libraries)', function (st) { - // <0.1.6 doesn't have this - if (!solc.features.multipleInputs) { - st.skip('Not supported by solc'); - st.end(); - return; - } - - var input = { - 'language': 'Solidity', - 'settings': { - 'libraries': { - 'lib.sol': { - 'L': '0x4200000000000000000000000000000000000001' - } - }, - 'outputSelection': { - '*': { - '*': [ 'evm.bytecode' ] - } - } - }, - 'sources': { - 'lib.sol': { - 'content': 'library L { function f() public returns (uint) { return 7; } }' - }, - 'cont.sol': { - 'content': 'import "lib.sol"; contract x { function g() public { L.f(); } }' - } - } - }; - - var output = JSON.parse(solc.compile(JSON.stringify(input))); - var x = getBytecodeStandard(output, 'cont.sol', 'x'); - st.ok(typeof x === 'string'); - st.ok(x.length > 0); - st.ok(Object.keys(linker.findLinkReferences(x)).length === 0); - var L = getBytecodeStandard(output, 'lib.sol', 'L'); - st.ok(typeof L === 'string'); - st.ok(L.length > 0); - st.end(); - }); - - t.test('compiling standard JSON (using libraries) (using lowlevel API)', function (st) { - if (typeof solc.lowlevel.compileStandard !== 'function') { - st.skip('Low-level compileStandard interface not implemented by this compiler version.'); - st.end(); - return; - } - - var input = { - 'language': 'Solidity', - 'settings': { - 'libraries': { - 'lib.sol': { - 'L': '0x4200000000000000000000000000000000000001' - } - }, - 'outputSelection': { - '*': { - '*': [ 'evm.bytecode' ] - } - } - }, - 'sources': { - 'lib.sol': { - 'content': 'library L { function f() public returns (uint) { return 7; } }' - }, - 'cont.sol': { - 'content': 'import "lib.sol"; contract x { function g() public { L.f(); } }' - } - } - }; - - var output = JSON.parse(solc.lowlevel.compileStandard(JSON.stringify(input))); - var x = getBytecodeStandard(output, 'cont.sol', 'x'); - st.ok(typeof x === 'string'); - st.ok(x.length > 0); - st.ok(Object.keys(linker.findLinkReferences(x)).length === 0); - var L = getBytecodeStandard(output, 'lib.sol', 'L'); - st.ok(typeof L === 'string'); - st.ok(L.length > 0); - st.end(); - }); - - t.test('compiling standard JSON (invalid JSON)', function (st) { - var output = JSON.parse(solc.compile('{invalid')); - // TODO: change wrapper to output matching error - st.ok(expectError(output, 'JSONError', 'Line 1, Column 2\n Missing \'}\' or object member name') || expectError(output, 'JSONError', 'Invalid JSON supplied:')); - st.end(); - }); - - t.test('compiling standard JSON (invalid language)', function (st) { - var output = JSON.parse(solc.compile('{"language":"InvalidSolidity","sources":{"cont.sol":{"content":""}}}')); - st.ok(expectError(output, 'JSONError', 'supported as a language.') && expectError(output, 'JSONError', '"Solidity"')); - st.end(); - }); - - t.test('compiling standard JSON (no sources)', function (st) { - var output = JSON.parse(solc.compile('{"language":"Solidity"}')); - st.ok(expectError(output, 'JSONError', 'No input sources specified.')); - st.end(); - }); - - t.test('compiling standard JSON (multiple sources on old compiler)', function (st) { - var output = JSON.parse(solc.compile('{"language":"Solidity","sources":{"cont.sol":{"content":"import \\"lib.sol\\";"},"lib.sol":{"content":""}}}')); - if (solc.features.multipleInputs) { - st.ok(expectNoError(output)); - } else { - st.ok(expectError(output, 'JSONError', 'Multiple sources provided, but compiler only supports single input.') || expectError(output, 'Parser error', 'Parser error: Source not found.')); - } - st.end(); - }); - }); - }); - - // Only run on the latest version. - if (versionText === 'latest') { - tape('Loading Legacy Versions', function (t) { - t.test('loading remote version - development snapshot', function (st) { - // getting the development snapshot - st.plan(2); - solc.loadRemoteVersion('latest', function (err, solcSnapshot) { - if (err) { - st.plan(1); - st.skip('Network error - skipping remote loading test'); - st.end(); - return; - } - var input = { - 'language': 'Solidity', - 'settings': { - 'outputSelection': { - '*': { - '*': [ 'evm.bytecode' ] - } - } - }, - 'sources': { - 'cont.sol': { - 'content': 'contract x { function g() public {} }' - } - } - }; - var output = JSON.parse(solcSnapshot.compile(JSON.stringify(input))); - var x = getBytecodeStandard(output, 'cont.sol', 'x'); - st.ok(typeof x === 'string'); - st.ok(x.length > 0); - }); - }); - }); - - tape('API backwards compatibility', function (t) { - t.test('compileStandard and compileStandardWrapper exists', function (st) { - st.equal(solc.compile, solc.compileStandard); - st.equal(solc.compile, solc.compileStandardWrapper); - st.end(); - }); - }); - } -} - -runTests(solc, 'latest'); - -// New features 0.1.6, 0.2.1, 0.4.11, 0.5.0, etc. -const versions = [ - 'v0.1.1+commit.6ff4cd6', - 'v0.1.6+commit.d41f8b7', - 'v0.2.0+commit.4dc2445', - 'v0.2.1+commit.91a6b35', - 'v0.3.6+commit.3fc68da', - 'v0.4.26+commit.4563c3fc' -]; -for (var version in versions) { - version = versions[version]; - execSync(`curl -o /tmp/${version}.js https://ethereum.github.io/solc-bin/bin/soljson-${version}.js`); - const newSolc = require('../wrapper.js')(require(`/tmp/${version}.js`)); - runTests(newSolc, version); -} diff --git a/test/compiler.ts b/test/compiler.ts new file mode 100644 index 00000000..e05185c1 --- /dev/null +++ b/test/compiler.ts @@ -0,0 +1,898 @@ +import assert from 'assert'; +import tape from 'tape'; +import * as semver from 'semver'; +import * as tmp from 'tmp'; +import solc from '../'; +import linker from '../linker'; +import { execSync } from 'child_process'; +import wrapper from '../wrapper'; + +const noRemoteVersions = (process.argv.indexOf('--no-remote-versions') >= 0); + +function runTests (solc, versionText) { + console.log(`Running tests with ${versionText} ${solc.version()}`); + + function resplitFileNameOnFirstColon (fileName, contractName) { + assert(!contractName.includes(':')); + + const contractNameComponents = fileName.split(':'); + const truncatedFileName = contractNameComponents.shift(); + contractNameComponents.push(contractName); + + return [truncatedFileName, contractNameComponents.join(':')]; + } + + function getBytecode (output, fileName, contractName) { + try { + let outputContract; + if (semver.lt(solc.semver(), '0.4.9')) { + outputContract = output.contracts[contractName]; + } else { + outputContract = output.contracts[fileName + ':' + contractName]; + } + return outputContract.bytecode; + } catch (e) { + return ''; + } + } + + function getBytecodeStandard (output, fileName, contractName) { + try { + let outputFile; + if (semver.lt(solc.semver(), '0.4.9')) { + outputFile = output.contracts['']; + } else { + if (semver.gt(solc.semver(), '0.4.10') && semver.lt(solc.semver(), '0.4.20')) { + [fileName, contractName] = resplitFileNameOnFirstColon(fileName, contractName); + } + outputFile = output.contracts[fileName]; + } + return outputFile[contractName].evm.bytecode.object; + } catch (e) { + return ''; + } + } + + function getGasEstimate (output, fileName, contractName) { + try { + let outputFile; + if (semver.lt(solc.semver(), '0.4.9')) { + outputFile = output.contracts['']; + } else { + if (semver.gt(solc.semver(), '0.4.10') && semver.gt(solc.semver(), '0.4.20')) { + [fileName, contractName] = resplitFileNameOnFirstColon(fileName, contractName); + } + outputFile = output.contracts[fileName]; + } + return outputFile[contractName].evm.gasEstimates; + } catch (e) { + return ''; + } + } + + function expectError (output: any, errorType: any, message: any) { + if (output.errors) { + for (const errorIndex in output.errors) { + const error = output.errors[errorIndex]; + if (error.type === errorType) { + if (message) { + if (error.message.match(message) !== null) { + return true; + } + } else { + return true; + } + } + } + } + return false; + } + + function expectNoError (output: any) { + if (output.errors) { + for (const errorIndex in output.errors) { + const error = output.errors[errorIndex]; + if (error.severity === 'error') { + return false; + } + } + } + return true; + } + + tape(versionText, function (t) { + const tape = t.test; + + tape('Version and license', function (t) { + t.test('check version', function (st) { + st.equal(typeof solc.version(), 'string'); + st.end(); + }); + t.test('check semver', function (st) { + st.equal(typeof solc.semver(), 'string'); + st.end(); + }); + t.test('check license', function (st) { + st.ok(typeof solc.license() === 'undefined' || typeof solc.license() === 'string'); + st.end(); + }); + }); + + tape('Compilation', function (t) { + t.test('single files can be compiled (using lowlevel API)', function (st) { + if (typeof solc.lowlevel.compileSingle !== 'function') { + st.skip('Low-level compileSingle interface not implemented by this compiler version.'); + st.end(); + return; + } + + const output = JSON.parse(solc.lowlevel.compileSingle('contract A { function g() public {} }')); + st.ok('contracts' in output); + const bytecode = getBytecode(output, '', 'A'); + st.ok(typeof bytecode === 'string'); + st.ok(bytecode.length > 0); + st.end(); + }); + + t.test('invalid source code fails properly (using lowlevel API)', function (st) { + // TODO: try finding an example which doesn't crash it? + if (semver.eq(solc.semver(), '0.4.11')) { + st.skip('Skipping on broken compiler version'); + st.end(); + return; + } + + if (typeof solc.lowlevel.compileSingle !== 'function') { + st.skip('Low-level compileSingle interface not implemented by this compiler version.'); + st.end(); + return; + } + + const output = JSON.parse(solc.lowlevel.compileSingle('contract x { this is an invalid contract }')); + if (semver.lt(solc.semver(), '0.1.4')) { + st.ok(output.error.indexOf('Parser error: Expected identifier') !== -1); + st.end(); + return; + } + st.plan(3); + st.ok('errors' in output); + // Check if the ParserError exists, but allow others too + st.ok(output.errors.length >= 1); + for (const error in output.errors) { + // Error should be something like: + // ParserError + // Error: Expected identifier + // Parser error: Expected identifier + if ( + output.errors[error].indexOf('ParserError') !== -1 || + output.errors[error].indexOf('Error: Expected identifier') !== -1 || + output.errors[error].indexOf('Parser error: Expected identifier') !== -1 || + output.errors[error].indexOf(': Expected identifier') !== -1 // 0.4.12 + ) { + st.ok(true); + } + } + st.end(); + }); + + t.test('multiple files can be compiled (using lowlevel API)', function (st) { + // <0.1.6 doesn't have this + if (typeof solc.lowlevel.compileMulti !== 'function') { + st.skip('Low-level compileMulti interface not implemented by this compiler version.'); + st.end(); + return; + } + + const input = { + 'a.sol': 'contract A { function f() public returns (uint) { return 7; } }', + 'b.sol': 'import "a.sol"; contract B is A { function g() public { f(); } }' + }; + const output = JSON.parse(solc.lowlevel.compileMulti(JSON.stringify({ sources: input }))); + const B = getBytecode(output, 'b.sol', 'B'); + st.ok(typeof B === 'string'); + st.ok(B.length > 0); + const A = getBytecode(output, 'a.sol', 'A'); + st.ok(typeof A === 'string'); + st.ok(A.length > 0); + st.end(); + }); + + t.test('lazy-loading callback works (using lowlevel API)', function (st) { + // <0.2.1 doesn't have this + if (typeof solc.lowlevel.compileCallback !== 'function') { + st.skip('Low-level compileCallback interface not implemented by this compiler version.'); + st.end(); + return; + } + + const input = { + 'b.sol': 'import "a.sol"; contract B is A { function g() public { f(); } }' + }; + function findImports (path) { + if (path === 'a.sol') { + return { contents: 'contract A { function f() public returns (uint) { return 7; } }' }; + } else { + return { error: 'File not found' }; + } + } + const output = JSON.parse(solc.lowlevel.compileCallback(JSON.stringify({ sources: input }), 0, { import: findImports })); + const B = getBytecode(output, 'b.sol', 'B'); + st.ok(typeof B === 'string'); + st.ok(B.length > 0); + const A = getBytecode(output, 'a.sol', 'A'); + st.ok(typeof A === 'string'); + st.ok(A.length > 0); + st.end(); + }); + + t.test('lazy-loading callback works (with file not found) (using lowlevel API)', function (st) { + // <0.2.1 doesn't have this + if (typeof solc.lowlevel.compileCallback !== 'function') { + st.skip('Low-level compileCallback interface not implemented by this compiler version.'); + st.end(); + return; + } + + const input = { + 'b.sol': 'import "a.sol"; contract B { function g() public { f(); } }' + }; + function findImports (path) { + return { error: 'File not found' }; + } + const output = JSON.parse(solc.lowlevel.compileCallback(JSON.stringify({ sources: input }), 0, { import: findImports })); + st.plan(3); + st.ok('errors' in output); + // Check if the ParserError exists, but allow others too + st.ok(output.errors.length >= 1); + for (const error in output.errors) { + // Error should be something like: + // cont.sol:1:1: ParserError: Source "lib.sol" not found: File not found + // cont.sol:1:1: Error: Source "lib.sol" not found: File not found + if (output.errors[error].indexOf('Error') !== -1 && output.errors[error].indexOf('File not found') !== -1) { + st.ok(true); + } else if (output.errors[error].indexOf('not found: File not found') !== -1) { + // 0.4.12 had its own weird way: + // b.sol:1:1: : Source "a.sol" not found: File not found + st.ok(true); + } + } + st.end(); + }); + + t.test('lazy-loading callback works (with exception) (using lowlevel API)', function (st) { + // <0.2.1 doesn't have this + if (typeof solc.lowlevel.compileCallback !== 'function') { + st.skip('Low-level compileCallback interface not implemented by this compiler version.'); + st.end(); + return; + } + + const input = { + 'b.sol': 'import "a.sol"; contract B { function g() public { f(); } }' + }; + function findImports (path) { + throw new Error('Could not implement this interface properly...'); + } + st.throws(function () { + solc.lowlevel.compileCallback(JSON.stringify({ sources: input }), 0, { import: findImports }); + }, /^Error: Could not implement this interface properly.../); + st.end(); + }); + + t.test('lazy-loading callback fails properly (with invalid callback) (using lowlevel API)', function (st) { + // <0.2.1 doesn't have this + if (typeof solc.lowlevel.compileCallback !== 'function') { + st.skip('Low-level compileCallback interface not implemented by this compiler version.'); + st.end(); + return; + } + + const input = { + 'cont.sol': 'import "lib.sol"; contract x { function g() public { L.f(); } }' + }; + st.throws(function () { + solc.lowlevel.compileCallback(JSON.stringify({ sources: input }), 0, 'this isn\'t a callback'); + }, /Invalid callback object specified./); + st.end(); + }); + + t.test('file import without lazy-loading callback fails properly (using lowlevel API)', function (st) { + // <0.2.1 doesn't have this + if (typeof solc.lowlevel.compileCallback !== 'function') { + st.skip('Low-level compileCallback interface not implemented by this compiler version.'); + st.end(); + return; + } + + const input = { + 'b.sol': 'import "a.sol"; contract B is A { function g() public { f(); } }' + }; + const output = JSON.parse(solc.lowlevel.compileCallback(JSON.stringify({ sources: input }))); + st.plan(3); + st.ok('errors' in output); + // Check if the ParserError exists, but allow others too + st.ok(output.errors.length >= 1); + for (const error in output.errors) { + // Error should be something like: + // cont.sol:1:1: ParserError: Source "lib.sol" not found: File import callback not supported + // cont.sol:1:1: Error: Source "lib.sol" not found: File import callback not supported + if (output.errors[error].indexOf('Error') !== -1 && output.errors[error].indexOf('File import callback not supported') !== -1) { + st.ok(true); + } else if (output.errors[error].indexOf('not found: File import callback not supported') !== -1) { + // 0.4.12 had its own weird way: + // b.sol:1:1: : Source "a.sol" not found: File import callback not supported + st.ok(true); + } + } + st.end(); + }); + + t.test('compiling standard JSON (using lowlevel API)', function (st) { + if (typeof solc.lowlevel.compileStandard !== 'function') { + st.skip('Low-level compileStandard interface not implemented by this compiler version.'); + st.end(); + return; + } + + const input = { + language: 'Solidity', + settings: { + outputSelection: { + '*': { + '*': ['evm.bytecode'] + } + } + }, + sources: { + 'a.sol': { + content: 'contract A { function f() public returns (uint) { return 7; } }' + }, + 'b.sol': { + content: 'import "a.sol"; contract B is A { function g() public { f(); } }' + } + } + }; + + function bytecodeExists (output, fileName, contractName) { + try { + return output.contracts[fileName][contractName].evm.bytecode.object.length > 0; + } catch (e) { + return false; + } + } + + const output = JSON.parse(solc.lowlevel.compileStandard(JSON.stringify(input))); + st.ok(bytecodeExists(output, 'a.sol', 'A')); + st.ok(bytecodeExists(output, 'b.sol', 'B')); + st.end(); + }); + + t.test('invalid source code fails properly with standard JSON (using lowlevel API)', function (st) { + // TODO: try finding an example which doesn't crash it? + if (semver.eq(solc.semver(), '0.4.11')) { + st.skip('Skipping on broken compiler version'); + st.end(); + return; + } + + if (typeof solc.lowlevel.compileStandard !== 'function') { + st.skip('Low-level compileStandard interface not implemented by this compiler version.'); + st.end(); + return; + } + + const input = { + language: 'Solidity', + settings: { + outputSelection: { + '*': { + '*': ['evm.bytecode'] + } + } + }, + sources: { + 'x.sol': { + content: 'contract x { this is an invalid contract }' + } + } + }; + const output = JSON.parse(solc.lowlevel.compileStandard(JSON.stringify(input))); + st.plan(3); + st.ok('errors' in output); + st.ok(output.errors.length >= 1); + // Check if the ParserError exists, but allow others too + for (const error in output.errors) { + if (output.errors[error].type === 'ParserError') { + st.ok(true); + } + } + st.end(); + }); + + t.test('compiling standard JSON (with callback) (using lowlevel API)', function (st) { + if (typeof solc.lowlevel.compileStandard !== 'function') { + st.skip('Low-level compileStandard interface not implemented by this compiler version.'); + st.end(); + return; + } + + const input = { + language: 'Solidity', + settings: { + outputSelection: { + '*': { + '*': ['evm.bytecode'] + } + } + }, + sources: { + 'b.sol': { + content: 'import "a.sol"; contract B is A { function g() public { f(); } }' + } + } + }; + + function findImports (path) { + if (path === 'a.sol') { + return { contents: 'contract A { function f() public returns (uint) { return 7; } }' }; + } else { + return { error: 'File not found' }; + } + } + + function bytecodeExists (output, fileName, contractName) { + try { + return output.contracts[fileName][contractName].evm.bytecode.object.length > 0; + } catch (e) { + return false; + } + } + + const output = JSON.parse(solc.lowlevel.compileStandard(JSON.stringify(input), { import: findImports })); + st.ok(bytecodeExists(output, 'a.sol', 'A')); + st.ok(bytecodeExists(output, 'b.sol', 'B')); + st.end(); + }); + + t.test('compiling standard JSON (single file)', function (st) { + const input = { + language: 'Solidity', + settings: { + outputSelection: { + '*': { + '*': ['evm.bytecode', 'evm.gasEstimates'] + } + } + }, + sources: { + 'c.sol': { + content: 'contract C { function g() public { } function h() internal {} }' + } + } + }; + + const output = JSON.parse(solc.compile(JSON.stringify(input))); + st.ok(expectNoError(output)); + const C = getBytecodeStandard(output, 'c.sol', 'C'); + st.ok(typeof C === 'string'); + st.ok(C.length > 0); + const CGas = getGasEstimate(output, 'c.sol', 'C'); + st.ok(typeof CGas === 'object'); + st.ok(typeof CGas.creation === 'object'); + st.ok(typeof CGas.creation.codeDepositCost === 'string'); + st.ok(typeof CGas.external === 'object'); + st.ok(typeof CGas.external['g()'] === 'string'); + st.ok(typeof CGas.internal === 'object'); + st.ok(typeof CGas.internal['h()'] === 'string'); + st.end(); + }); + + t.test('compiling standard JSON (multiple files)', function (st) { + // <0.1.6 doesn't have this + if (!solc.features.multipleInputs) { + st.skip('Not supported by solc'); + st.end(); + return; + } + + const input = { + language: 'Solidity', + settings: { + outputSelection: { + '*': { + '*': ['evm.bytecode', 'evm.gasEstimates'] + } + } + }, + sources: { + 'a.sol': { + content: 'contract A { function f() public returns (uint) { return 7; } }' + }, + 'b.sol': { + content: 'import "a.sol"; contract B is A { function g() public { f(); } function h() internal {} }' + } + } + }; + + const output = JSON.parse(solc.compile(JSON.stringify(input))); + st.ok(expectNoError(output)); + const B = getBytecodeStandard(output, 'b.sol', 'B'); + st.ok(typeof B === 'string'); + st.ok(B.length > 0); + st.ok(Object.keys(linker.findLinkReferences(B)).length === 0); + const BGas = getGasEstimate(output, 'b.sol', 'B'); + st.ok(typeof BGas === 'object'); + st.ok(typeof BGas.creation === 'object'); + st.ok(typeof BGas.creation.codeDepositCost === 'string'); + st.ok(typeof BGas.external === 'object'); + st.ok(typeof BGas.external['g()'] === 'string'); + st.ok(typeof BGas.internal === 'object'); + st.ok(typeof BGas.internal['h()'] === 'string'); + const A = getBytecodeStandard(output, 'a.sol', 'A'); + st.ok(typeof A === 'string'); + st.ok(A.length > 0); + st.end(); + }); + + t.test('compiling standard JSON (abstract contract)', function (st) { + // <0.1.6 doesn't have this + if (!solc.features.multipleInputs) { + st.skip('Not supported by solc'); + st.end(); + return; + } + + const isVersion6 = semver.gt(solc.semver(), '0.5.99'); + let source; + if (isVersion6) { + source = 'abstract contract C { function f() public virtual; }'; + } else { + source = 'contract C { function f() public; }'; + } + + const input = { + language: 'Solidity', + settings: { + outputSelection: { + '*': { + '*': ['evm.bytecode', 'evm.gasEstimates'] + } + } + }, + sources: { + 'c.sol': { + content: source + } + } + }; + + const output = JSON.parse(solc.compile(JSON.stringify(input))); + st.ok(expectNoError(output)); + const C = getBytecodeStandard(output, 'c.sol', 'C'); + st.ok(typeof C === 'string'); + st.ok(C.length === 0); + st.end(); + }); + + t.test('compiling standard JSON (with imports)', function (st) { + // <0.2.1 doesn't have this + if (!solc.features.importCallback) { + st.skip('Not supported by solc'); + st.end(); + return; + } + + const input = { + language: 'Solidity', + settings: { + outputSelection: { + '*': { + '*': ['evm.bytecode'] + } + } + }, + sources: { + 'b.sol': { + content: 'import "a.sol"; contract B is A { function g() public { f(); } }' + } + } + }; + + function findImports (path) { + if (path === 'a.sol') { + return { contents: 'contract A { function f() public returns (uint) { return 7; } }' }; + } else { + return { error: 'File not found' }; + } + } + + const output = JSON.parse(solc.compile(JSON.stringify(input), { import: findImports })); + st.ok(expectNoError(output)); + const A = getBytecodeStandard(output, 'a.sol', 'A'); + st.ok(typeof A === 'string'); + st.ok(A.length > 0); + const B = getBytecodeStandard(output, 'b.sol', 'B'); + st.ok(typeof B === 'string'); + st.ok(B.length > 0); + st.ok(Object.keys(linker.findLinkReferences(B)).length === 0); + st.end(); + }); + + t.test('compiling standard JSON (using libraries)', function (st) { + // 0.4.0 has a bug with libraries + if (semver.eq(solc.semver(), '0.4.0')) { + st.skip('Skipping on broken compiler version'); + st.end(); + return; + } + + // <0.1.6 doesn't have this + if (!solc.features.multipleInputs) { + st.skip('Not supported by solc'); + st.end(); + return; + } + + const input = { + language: 'Solidity', + settings: { + libraries: { + 'lib.sol': { + L: '0x4200000000000000000000000000000000000001' + } + }, + outputSelection: { + '*': { + '*': ['evm.bytecode'] + } + } + }, + sources: { + 'lib.sol': { + content: 'library L { function f() public returns (uint) { return 7; } }' + }, + 'a.sol': { + content: 'import "lib.sol"; contract A { function g() public { L.f(); } }' + } + } + }; + + const output = JSON.parse(solc.compile(JSON.stringify(input))); + st.ok(expectNoError(output)); + const A = getBytecodeStandard(output, 'a.sol', 'A'); + st.ok(typeof A === 'string'); + st.ok(A.length > 0); + st.ok(Object.keys(linker.findLinkReferences(A)).length === 0); + const L = getBytecodeStandard(output, 'lib.sol', 'L'); + st.ok(typeof L === 'string'); + st.ok(L.length > 0); + st.end(); + }); + + t.test('compiling standard JSON (with warning >=0.4.0)', function (st) { + // In 0.4.0 "pragma solidity" was added. Not including it is a warning. + if (semver.lt(solc.semver(), '0.4.0')) { + st.skip('Not supported by solc'); + st.end(); + return; + } + + const input = { + language: 'Solidity', + settings: { + outputSelection: { + '*': { + '*': ['evm.bytecode'] + } + } + }, + sources: { + 'c.sol': { + content: 'contract C { function f() public { } }' + } + } + }; + + const output = JSON.parse(solc.compile(JSON.stringify(input))); + st.ok(expectError(output, 'Warning', 'Source file does not specify required compiler version!')); + st.end(); + }); + + t.test('compiling standard JSON (using libraries) (using lowlevel API)', function (st) { + // 0.4.0 has a bug with libraries + if (semver.eq(solc.semver(), '0.4.0')) { + st.skip('Skipping on broken compiler version'); + st.end(); + return; + } + + if (typeof solc.lowlevel.compileStandard !== 'function') { + st.skip('Low-level compileStandard interface not implemented by this compiler version.'); + st.end(); + return; + } + + const input = { + language: 'Solidity', + settings: { + libraries: { + 'lib.sol': { + L: '0x4200000000000000000000000000000000000001' + } + }, + outputSelection: { + '*': { + '*': ['evm.bytecode'] + } + } + }, + sources: { + 'lib.sol': { + content: 'library L { function f() public returns (uint) { return 7; } }' + }, + 'a.sol': { + content: 'import "lib.sol"; contract A { function g() public { L.f(); } }' + } + } + }; + + const output = JSON.parse(solc.lowlevel.compileStandard(JSON.stringify(input))); + st.ok(expectNoError(output)); + const A = getBytecodeStandard(output, 'a.sol', 'A'); + st.ok(typeof A === 'string'); + st.ok(A.length > 0); + st.ok(Object.keys(linker.findLinkReferences(A)).length === 0); + const L = getBytecodeStandard(output, 'lib.sol', 'L'); + st.ok(typeof L === 'string'); + st.ok(L.length > 0); + st.end(); + }); + + t.test('compiling standard JSON (invalid JSON)', function (st) { + const output = JSON.parse(solc.compile('{invalid')); + // TODO: change wrapper to output matching error + st.ok(expectError(output, 'JSONError', 'Line 1, Column 2\n Missing \'}\' or object member name') || expectError(output, 'JSONError', 'Invalid JSON supplied:')); + st.end(); + }); + + t.test('compiling standard JSON (invalid language)', function (st) { + const output = JSON.parse(solc.compile('{"language":"InvalidSolidity","sources":{"cont.sol":{"content":""}}}')); + st.ok(expectError(output, 'JSONError', 'supported as a language.') && expectError(output, 'JSONError', '"Solidity"')); + st.end(); + }); + + t.test('compiling standard JSON (no sources)', function (st) { + const output = JSON.parse(solc.compile('{"language":"Solidity"}')); + st.ok(expectError(output, 'JSONError', 'No input sources specified.')); + st.end(); + }); + + t.test('compiling standard JSON (multiple sources on old compiler)', function (st) { + const output = JSON.parse(solc.compile('{"language":"Solidity","sources":{"cont.sol":{"content":"import \\"lib.sol\\";"},"lib.sol":{"content":""}}}')); + if (solc.features.multipleInputs) { + st.ok(expectNoError(output)); + } else { + st.ok(expectError(output, 'JSONError', 'Multiple sources provided, but compiler only supports single input.') || expectError(output, 'Parser error', 'Parser error: Source not found.')); + } + st.end(); + }); + + t.test('compiling standard JSON (file names containing symbols)', function (st) { + const input = { + language: 'Solidity', + settings: { + outputSelection: { + '*': { + '*': ['evm.bytecode'] + } + } + }, + sources: { + '!@#$%^&*()_+-=[]{}\\|"\';:~`<>,.?/': { + content: 'contract C {}' + } + } + }; + + const output = JSON.parse(solc.compile(JSON.stringify(input))); + st.ok(expectNoError(output)); + const C = getBytecodeStandard(output, '!@#$%^&*()_+-=[]{}\\|"\';:~`<>,.?/', 'C'); + st.ok(typeof C === 'string'); + st.ok(C.length > 0); + st.end(); + }); + + t.test('compiling standard JSON (file names containing multiple semicolons)', function (st) { + const input = { + language: 'Solidity', + settings: { + outputSelection: { + '*': { + '*': ['evm.bytecode'] + } + } + }, + sources: { + 'a:b:c:d:e:f:G.sol': { + content: 'contract G {}' + } + } + }; + + const output = JSON.parse(solc.compile(JSON.stringify(input))); + st.ok(expectNoError(output)); + const G = getBytecodeStandard(output, 'a:b:c:d:e:f:G.sol', 'G'); + st.ok(typeof G === 'string'); + st.ok(G.length > 0); + st.end(); + }); + }); + }); + + // Only run on the latest version. + if (versionText === 'latest' && !noRemoteVersions) { + tape('Loading Legacy Versions', function (t) { + t.test('loading remote version - development snapshot', function (st) { + // getting the development snapshot + st.plan(2); + solc.loadRemoteVersion('latest', function (err, solcSnapshot) { + if (err) { + st.plan(1); + st.skip('Network error - skipping remote loading test'); + st.end(); + return; + } + const input = { + language: 'Solidity', + settings: { + outputSelection: { + '*': { + '*': ['evm.bytecode'] + } + } + }, + sources: { + 'cont.sol': { + content: 'contract x { function g() public {} }' + } + } + }; + const output = JSON.parse(solcSnapshot.compile(JSON.stringify(input))); + const x = getBytecodeStandard(output, 'cont.sol', 'x'); + st.ok(typeof x === 'string'); + st.ok(x.length > 0); + }); + }); + }); + } +} + +runTests(solc, 'latest'); + +if (!noRemoteVersions) { + // New compiler interface features 0.1.6, 0.2.1, 0.4.11, 0.5.0, etc. + // 0.4.0 added pragmas (used in tests above) + const versions = [ + 'v0.1.1+commit.6ff4cd6', + 'v0.1.6+commit.d41f8b7', + 'v0.2.0+commit.4dc2445', + 'v0.2.1+commit.91a6b35', + 'v0.3.6+commit.3fc68da', + 'v0.4.0+commit.acd334c9', + 'v0.4.9+commit.364da425', + 'v0.4.10+commit.f0d539ae', + 'v0.4.11+commit.68ef5810', + 'v0.4.12+commit.194ff033', + 'v0.4.19+commit.c4cbbb05', + 'v0.4.20+commit.3155dd80', + 'v0.4.26+commit.4563c3fc' + ]; + for (let version in versions) { + version = versions[version]; + // NOTE: The temporary directory will be removed on process exit. + const tempDir = tmp.dirSync({ unsafeCleanup: true, prefix: 'solc-js-compiler-test-' }).name; + execSync(`curl -L -o ${tempDir}/${version}.js https://binaries.soliditylang.org/bin/soljson-${version}.js`); + const newSolc = wrapper(require(`${tempDir}/${version}.js`)); + runTests(newSolc, version); + } +} diff --git a/test/index.js b/test/index.js deleted file mode 100644 index 660cc7ec..00000000 --- a/test/index.js +++ /dev/null @@ -1,12 +0,0 @@ -const semver = require('semver'); - -require('./linker.js'); -require('./translate.js'); -require('./compiler.js'); -require('./smtchecker.js'); -require('./abi.js'); - -// The CLI doesn't support Node 4 -if (semver.gte(process.version, '5.0.0')) { - require('./cli.js'); -} diff --git a/test/index.ts b/test/index.ts new file mode 100644 index 00000000..9815fa16 --- /dev/null +++ b/test/index.ts @@ -0,0 +1,13 @@ +import * as semver from 'semver'; + +import('./linker'); +import('./translate'); +import('./compiler'); +import('./smtcallback'); +import('./smtchecker'); +import('./abi'); + +// The CLI doesn't support Node 4 +if (semver.gte(process.version, '5.0.0')) { + import('./cli'); +} diff --git a/test/linker.js b/test/linker.ts similarity index 61% rename from test/linker.js rename to test/linker.ts index fd37739f..470f10e9 100644 --- a/test/linker.js +++ b/test/linker.ts @@ -1,5 +1,5 @@ -const tape = require('tape'); -const linker = require('../linker.js'); +import tape from 'tape'; +import linker from '../linker'; tape('Link references', function (t) { t.test('Empty bytecode', function (st) { @@ -13,59 +13,59 @@ tape('Link references', function (t) { }); t.test('One reference', function (st) { - var bytecode = '6060604052341561000f57600080fd5b60f48061001d6000396000f300606060405260043610603e5763ffffffff7c010000000000000000000000000000000000000000000000000000000060003504166326121ff081146043575b600080fd5b3415604d57600080fd5b60536055565b005b73__lib2.sol:L____________________________6326121ff06040518163ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040160006040518083038186803b151560b357600080fd5b6102c65a03f4151560c357600080fd5b5050505600a165627a7a723058207979b30bd4a07c77b02774a511f2a1dd04d7e5d65b5c2735b5fc96ad61d43ae40029'; - st.deepEqual(linker.findLinkReferences(bytecode), { 'lib2.sol:L': [ { start: 116, length: 20 } ] }); + const bytecode = '6060604052341561000f57600080fd5b60f48061001d6000396000f300606060405260043610603e5763ffffffff7c010000000000000000000000000000000000000000000000000000000060003504166326121ff081146043575b600080fd5b3415604d57600080fd5b60536055565b005b73__lib2.sol:L____________________________6326121ff06040518163ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040160006040518083038186803b151560b357600080fd5b6102c65a03f4151560c357600080fd5b5050505600a165627a7a723058207979b30bd4a07c77b02774a511f2a1dd04d7e5d65b5c2735b5fc96ad61d43ae40029'; + st.deepEqual(linker.findLinkReferences(bytecode), { 'lib2.sol:L': [{ start: 116, length: 20 }] }); st.end(); }); t.test('Two references', function (st) { - var bytecode = '6060604052341561000f57600080fd5b61011a8061001e6000396000f30060606040526004361060255763ffffffff60e060020a60003504166326121ff08114602a575b600080fd5b3415603457600080fd5b603a603c565b005b73__lib2.sol:L____________________________6326121ff06040518163ffffffff1660e060020a02815260040160006040518083038186803b1515608157600080fd5b6102c65a03f41515609157600080fd5b50505073__linkref.sol:Lx________________________6326121ff06040518163ffffffff1660e060020a02815260040160006040518083038186803b151560d957600080fd5b6102c65a03f4151560e957600080fd5b5050505600a165627a7a72305820fdfb8eab411d7bc86d7dfbb0c985c30bebf1cc105dc5b807291551b3d5aa29d90029'; + const bytecode = '6060604052341561000f57600080fd5b61011a8061001e6000396000f30060606040526004361060255763ffffffff60e060020a60003504166326121ff08114602a575b600080fd5b3415603457600080fd5b603a603c565b005b73__lib2.sol:L____________________________6326121ff06040518163ffffffff1660e060020a02815260040160006040518083038186803b1515608157600080fd5b6102c65a03f41515609157600080fd5b50505073__linkref.sol:Lx________________________6326121ff06040518163ffffffff1660e060020a02815260040160006040518083038186803b151560d957600080fd5b6102c65a03f4151560e957600080fd5b5050505600a165627a7a72305820fdfb8eab411d7bc86d7dfbb0c985c30bebf1cc105dc5b807291551b3d5aa29d90029'; st.deepEqual( linker.findLinkReferences(bytecode), - { 'lib2.sol:L': [ { start: 92, length: 20 } ], 'linkref.sol:Lx': [ { start: 180, length: 20 } ] } + { 'lib2.sol:L': [{ start: 92, length: 20 }], 'linkref.sol:Lx': [{ start: 180, length: 20 }] } ); st.end(); }); t.test('Library name with leading underscore', function (st) { - var bytecode = '6060604052341561000f57600080fd5b60f48061001d6000396000f300606060405260043610603e5763ffffffff7c010000000000000000000000000000000000000000000000000000000060003504166326121ff081146043575b600080fd5b3415604d57600080fd5b60536055565b005b73__lib2.sol:_L___________________________6326121ff06040518163ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040160006040518083038186803b151560b357600080fd5b6102c65a03f4151560c357600080fd5b5050505600a165627a7a7230582089689827bbf0b7dc385ffcb4b1deb9f9e61741f61f89b4af65f806ff2b0d73470029'; + const bytecode = '6060604052341561000f57600080fd5b60f48061001d6000396000f300606060405260043610603e5763ffffffff7c010000000000000000000000000000000000000000000000000000000060003504166326121ff081146043575b600080fd5b3415604d57600080fd5b60536055565b005b73__lib2.sol:_L___________________________6326121ff06040518163ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040160006040518083038186803b151560b357600080fd5b6102c65a03f4151560c357600080fd5b5050505600a165627a7a7230582089689827bbf0b7dc385ffcb4b1deb9f9e61741f61f89b4af65f806ff2b0d73470029'; st.deepEqual( linker.findLinkReferences(bytecode), - { 'lib2.sol:_L': [ { start: 116, length: 20 } ] } + { 'lib2.sol:_L': [{ start: 116, length: 20 }] } ); st.end(); }); t.test('Library name with leading underscore (without fqdn)', function (st) { - var bytecode = '6060604052341561000f57600080fd5b60f48061001d6000396000f300606060405260043610603e5763ffffffff7c010000000000000000000000000000000000000000000000000000000060003504166326121ff081146043575b600080fd5b3415604d57600080fd5b60536055565b005b73___L____________________________________6326121ff06040518163ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040160006040518083038186803b151560b357600080fd5b6102c65a03f4151560c357600080fd5b5050505600a165627a7a7230582089689827bbf0b7dc385ffcb4b1deb9f9e61741f61f89b4af65f806ff2b0d73470029'; + const bytecode = '6060604052341561000f57600080fd5b60f48061001d6000396000f300606060405260043610603e5763ffffffff7c010000000000000000000000000000000000000000000000000000000060003504166326121ff081146043575b600080fd5b3415604d57600080fd5b60536055565b005b73___L____________________________________6326121ff06040518163ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040160006040518083038186803b151560b357600080fd5b6102c65a03f4151560c357600080fd5b5050505600a165627a7a7230582089689827bbf0b7dc385ffcb4b1deb9f9e61741f61f89b4af65f806ff2b0d73470029'; st.deepEqual( linker.findLinkReferences(bytecode), - { '_L': [ { start: 116, length: 20 } ] } + { _L: [{ start: 116, length: 20 }] } ); st.end(); }); t.test('Library name with underscore in the name', function (st) { - var bytecode = '6060604052341561000f57600080fd5b60f48061001d6000396000f300606060405260043610603e5763ffffffff7c010000000000000000000000000000000000000000000000000000000060003504166326121ff081146043575b600080fd5b3415604d57600080fd5b60536055565b005b73__lib2.sol:L_L__________________________6326121ff06040518163ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040160006040518083038186803b151560b357600080fd5b6102c65a03f4151560c357600080fd5b5050505600a165627a7a723058205cb324a27452cc7f8894a57cb0e3ddce2dce0c423e4fc993a3dd51287abd49110029'; + const bytecode = '6060604052341561000f57600080fd5b60f48061001d6000396000f300606060405260043610603e5763ffffffff7c010000000000000000000000000000000000000000000000000000000060003504166326121ff081146043575b600080fd5b3415604d57600080fd5b60536055565b005b73__lib2.sol:L_L__________________________6326121ff06040518163ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040160006040518083038186803b151560b357600080fd5b6102c65a03f4151560c357600080fd5b5050505600a165627a7a723058205cb324a27452cc7f8894a57cb0e3ddce2dce0c423e4fc993a3dd51287abd49110029'; st.deepEqual( linker.findLinkReferences(bytecode), - { 'lib2.sol:L_L': [ { start: 116, length: 20 } ] } + { 'lib2.sol:L_L': [{ start: 116, length: 20 }] } ); st.end(); }); // Note: this is a case the reference finder cannot properly handle as there's no way to tell t.test('Library name with trailing underscore', function (st) { - var bytecode = '6060604052341561000f57600080fd5b60f48061001d6000396000f300606060405260043610603e5763ffffffff7c010000000000000000000000000000000000000000000000000000000060003504166326121ff081146043575b600080fd5b3415604d57600080fd5b60536055565b005b73__lib2.sol:L____________________________6326121ff06040518163ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040160006040518083038186803b151560b357600080fd5b6102c65a03f4151560c357600080fd5b5050505600a165627a7a7230582058e61511a603707222cfa83fd3ae4269f94eb86513cb9042cf0b44877403d85c0029'; + const bytecode = '6060604052341561000f57600080fd5b60f48061001d6000396000f300606060405260043610603e5763ffffffff7c010000000000000000000000000000000000000000000000000000000060003504166326121ff081146043575b600080fd5b3415604d57600080fd5b60536055565b005b73__lib2.sol:L____________________________6326121ff06040518163ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040160006040518083038186803b151560b357600080fd5b6102c65a03f4151560c357600080fd5b5050505600a165627a7a7230582058e61511a603707222cfa83fd3ae4269f94eb86513cb9042cf0b44877403d85c0029'; st.deepEqual( linker.findLinkReferences(bytecode), - { 'lib2.sol:L': [ { start: 116, length: 20 } ] } + { 'lib2.sol:L': [{ start: 116, length: 20 }] } ); st.end(); }); t.test('Invalid input (too short)', function (st) { - var bytecode = '6060604052341561000____66606060606060'; + const bytecode = '6060604052341561000____66606060606060'; st.deepEqual( linker.findLinkReferences(bytecode), {} @@ -74,7 +74,7 @@ tape('Link references', function (t) { }); t.test('Invalid input (1 byte short)', function (st) { - var bytecode = '6060604052341561000__lib2.sol:L___________________________66606060606060'; + const bytecode = '6060604052341561000__lib2.sol:L___________________________66606060606060'; st.deepEqual( linker.findLinkReferences(bytecode), {} @@ -83,10 +83,10 @@ tape('Link references', function (t) { }); t.test('Two references with same library name', function (st) { - var bytecode = '6060604052341561000f57600080fd5b61011a8061001e6000396000f30060606040526004361060255763ffffffff60e060020a60003504166326121ff08114602a575b600080fd5b3415603457600080fd5b603a603c565b005b73__lib2.sol:L____________________________6326121ff06040518163ffffffff1660e060020a02815260040160006040518083038186803b1515608157600080fd5b6102c65a03f41515609157600080fd5b50505073__lib2.sol:L____________________________6326121ff06040518163ffffffff1660e060020a02815260040160006040518083038186803b151560d957600080fd5b6102c65a03f4151560e957600080fd5b5050505600a165627a7a72305820fdfb8eab411d7bc86d7dfbb0c985c30bebf1cc105dc5b807291551b3d5aa29d90029'; + const bytecode = '6060604052341561000f57600080fd5b61011a8061001e6000396000f30060606040526004361060255763ffffffff60e060020a60003504166326121ff08114602a575b600080fd5b3415603457600080fd5b603a603c565b005b73__lib2.sol:L____________________________6326121ff06040518163ffffffff1660e060020a02815260040160006040518083038186803b1515608157600080fd5b6102c65a03f41515609157600080fd5b50505073__lib2.sol:L____________________________6326121ff06040518163ffffffff1660e060020a02815260040160006040518083038186803b151560d957600080fd5b6102c65a03f4151560e957600080fd5b5050505600a165627a7a72305820fdfb8eab411d7bc86d7dfbb0c985c30bebf1cc105dc5b807291551b3d5aa29d90029'; st.deepEqual( linker.findLinkReferences(bytecode), - { 'lib2.sol:L': [ { start: 92, length: 20 }, { start: 180, length: 20 } ] } + { 'lib2.sol:L': [{ start: 92, length: 20 }, { start: 180, length: 20 }] } ); st.end(); }); @@ -98,7 +98,7 @@ tape('Linking', function (t) { 'lib.sol': 'library L { function f() public returns (uint) { return 7; } }', 'cont.sol': 'import "lib.sol"; contract x { function g() public { L.f(); } }' */ - var bytecode = '608060405234801561001057600080fd5b5061011f806100206000396000f300608060405260043610603f576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff168063e2179b8e146044575b600080fd5b348015604f57600080fd5b5060566058565b005b73__lib.sol:L_____________________________6326121ff06040518163ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040160206040518083038186803b15801560b757600080fd5b505af415801560ca573d6000803e3d6000fd5b505050506040513d602081101560df57600080fd5b8101908080519060200190929190505050505600a165627a7a72305820ea2f6353179c181d7162544d637b7fe2d9e8da9803a0e2d9eafc2188d1d59ee30029'; + let bytecode = '608060405234801561001057600080fd5b5061011f806100206000396000f300608060405260043610603f576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff168063e2179b8e146044575b600080fd5b348015604f57600080fd5b5060566058565b005b73__lib.sol:L_____________________________6326121ff06040518163ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040160206040518083038186803b15801560b757600080fd5b505af415801560ca573d6000803e3d6000fd5b505050506040513d602081101560df57600080fd5b8101908080519060200190929190505050505600a165627a7a72305820ea2f6353179c181d7162544d637b7fe2d9e8da9803a0e2d9eafc2188d1d59ee30029'; bytecode = linker.linkBytecode(bytecode, { 'lib.sol:L': '0x123456' }); st.ok(bytecode.indexOf('_') < 0); st.end(); @@ -109,8 +109,8 @@ tape('Linking', function (t) { 'lib.sol': 'library L { function f() public returns (uint) { return 7; } }', 'cont.sol': 'import "lib.sol"; contract x { function g() public { L.f(); } }' */ - var bytecode = '608060405234801561001057600080fd5b5061011f806100206000396000f300608060405260043610603f576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff168063e2179b8e146044575b600080fd5b348015604f57600080fd5b5060566058565b005b73__lib.sol:L_____________________________6326121ff06040518163ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040160206040518083038186803b15801560b757600080fd5b505af415801560ca573d6000803e3d6000fd5b505050506040513d602081101560df57600080fd5b8101908080519060200190929190505050505600a165627a7a72305820ea2f6353179c181d7162544d637b7fe2d9e8da9803a0e2d9eafc2188d1d59ee30029'; - bytecode = linker.linkBytecode(bytecode, { 'lib.sol': { 'L': '0x123456' } }); + let bytecode = '608060405234801561001057600080fd5b5061011f806100206000396000f300608060405260043610603f576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff168063e2179b8e146044575b600080fd5b348015604f57600080fd5b5060566058565b005b73__lib.sol:L_____________________________6326121ff06040518163ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040160206040518083038186803b15801560b757600080fd5b505af415801560ca573d6000803e3d6000fd5b505050506040513d602081101560df57600080fd5b8101908080519060200190929190505050505600a165627a7a72305820ea2f6353179c181d7162544d637b7fe2d9e8da9803a0e2d9eafc2188d1d59ee30029'; + bytecode = linker.linkBytecode(bytecode, { 'lib.sol': { L: '0x123456' } }); st.ok(bytecode.indexOf('_') < 0); st.end(); }); @@ -120,7 +120,7 @@ tape('Linking', function (t) { 'lib.sol': 'library L { function f() public returns (uint) { return 7; } }', 'cont.sol': 'import "lib.sol"; contract x { function g() public { L.f(); } }' */ - var bytecode = '608060405234801561001057600080fd5b5061011f806100206000396000f300608060405260043610603f576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff168063e2179b8e146044575b600080fd5b348015604f57600080fd5b5060566058565b005b73__lib.sol:L_____________________________6326121ff06040518163ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040160206040518083038186803b15801560b757600080fd5b505af415801560ca573d6000803e3d6000fd5b505050506040513d602081101560df57600080fd5b8101908080519060200190929190505050505600a165627a7a72305820ea2f6353179c181d7162544d637b7fe2d9e8da9803a0e2d9eafc2188d1d59ee30029'; + let bytecode = '608060405234801561001057600080fd5b5061011f806100206000396000f300608060405260043610603f576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff168063e2179b8e146044575b600080fd5b348015604f57600080fd5b5060566058565b005b73__lib.sol:L_____________________________6326121ff06040518163ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040160206040518083038186803b15801560b757600080fd5b505af415801560ca573d6000803e3d6000fd5b505050506040513d602081101560df57600080fd5b8101908080519060200190929190505050505600a165627a7a72305820ea2f6353179c181d7162544d637b7fe2d9e8da9803a0e2d9eafc2188d1d59ee30029'; bytecode = linker.linkBytecode(bytecode, { }); st.ok(bytecode.indexOf('_') >= 0); st.end(); @@ -131,7 +131,7 @@ tape('Linking', function (t) { 'lib.sol': 'library L { function f() public returns (uint) { return 7; } }', 'cont.sol': 'import "lib.sol"; contract x { function g() public { L.f(); } }' */ - var bytecode = '608060405234801561001057600080fd5b5061011f806100206000396000f300608060405260043610603f576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff168063e2179b8e146044575b600080fd5b348015604f57600080fd5b5060566058565b005b73__lib.sol:L_____________________________6326121ff06040518163ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040160206040518083038186803b15801560b757600080fd5b505af415801560ca573d6000803e3d6000fd5b505050506040513d602081101560df57600080fd5b8101908080519060200190929190505050505600a165627a7a72305820ea2f6353179c181d7162544d637b7fe2d9e8da9803a0e2d9eafc2188d1d59ee30029'; + const bytecode = '608060405234801561001057600080fd5b5061011f806100206000396000f300608060405260043610603f576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff168063e2179b8e146044575b600080fd5b348015604f57600080fd5b5060566058565b005b73__lib.sol:L_____________________________6326121ff06040518163ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040160206040518083038186803b15801560b757600080fd5b505af415801560ca573d6000803e3d6000fd5b505050506040513d602081101560df57600080fd5b8101908080519060200190929190505050505600a165627a7a72305820ea2f6353179c181d7162544d637b7fe2d9e8da9803a0e2d9eafc2188d1d59ee30029'; st.throws(function () { linker.linkBytecode(bytecode, { 'lib.sol:L': '' }); }); @@ -143,14 +143,14 @@ tape('Linking', function (t) { 'lib.sol': 'library L1234567890123456789012345678901234567890 { function f() public returns (uint) { return 7; } }', 'cont.sol': 'import "lib.sol"; contract x { function g() public { L1234567890123456789012345678901234567890.f(); } }' */ - var bytecode = '608060405234801561001057600080fd5b5061011f806100206000396000f300608060405260043610603f576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff168063e2179b8e146044575b600080fd5b348015604f57600080fd5b5060566058565b005b73__lib.sol:L123456789012345678901234567__6326121ff06040518163ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040160206040518083038186803b15801560b757600080fd5b505af415801560ca573d6000803e3d6000fd5b505050506040513d602081101560df57600080fd5b8101908080519060200190929190505050505600a165627a7a723058209f88ff686bd8ceb0fc08853dc1332d5ff81dbcf5af3a1e9aa366828091761f8c0029'; + let bytecode = '608060405234801561001057600080fd5b5061011f806100206000396000f300608060405260043610603f576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff168063e2179b8e146044575b600080fd5b348015604f57600080fd5b5060566058565b005b73__lib.sol:L123456789012345678901234567__6326121ff06040518163ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040160206040518083038186803b15801560b757600080fd5b505af415801560ca573d6000803e3d6000fd5b505050506040513d602081101560df57600080fd5b8101908080519060200190929190505050505600a165627a7a723058209f88ff686bd8ceb0fc08853dc1332d5ff81dbcf5af3a1e9aa366828091761f8c0029'; bytecode = linker.linkBytecode(bytecode, { 'lib.sol:L1234567890123456789012345678901234567890': '0x123456' }); st.ok(bytecode.indexOf('_') < 0); st.end(); }); t.test('hashed placeholder', function (st) { - var bytecode = '6060604052341561000__$cb901161e812ceb78cfe30ca65050c4337$__66606060606060'; + let bytecode = '6060604052341561000__$cb901161e812ceb78cfe30ca65050c4337$__66606060606060'; bytecode = linker.linkBytecode(bytecode, { 'lib2.sol:L': '0x123456' }); st.equal(bytecode, '6060604052341561000000000000000000000000000000000000012345666606060606060'); st.end(); @@ -161,8 +161,8 @@ tape('Linking', function (t) { 'lib.sol': 'library L { function f() public returns (uint) { return 7; } }', 'cont.sol': 'import "lib.sol"; contract x { function g() public { L.f(); } }' */ - var bytecode = '608060405234801561001057600080fd5b5061011f806100206000396000f300608060405260043610603f576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff168063e2179b8e146044575b600080fd5b348015604f57600080fd5b5060566058565b005b73__libName_______________________________6326121ff06040518163ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040160206040518083038186803b15801560b757600080fd5b505af415801560ca573d6000803e3d6000fd5b505050506040513d602081101560df57600080fd5b8101908080519060200190929190505050505600a165627a7a72305820ea2f6353179c181d7162544d637b7fe2d9e8da9803a0e2d9eafc2188d1d59ee30029'; - bytecode = linker.linkBytecode(bytecode, { 'libName': '0x123456' }); + let bytecode = '608060405234801561001057600080fd5b5061011f806100206000396000f300608060405260043610603f576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff168063e2179b8e146044575b600080fd5b348015604f57600080fd5b5060566058565b005b73__libName_______________________________6326121ff06040518163ffffffff167c010000000000000000000000000000000000000000000000000000000002815260040160206040518083038186803b15801560b757600080fd5b505af415801560ca573d6000803e3d6000fd5b505050506040513d602081101560df57600080fd5b8101908080519060200190929190505050505600a165627a7a72305820ea2f6353179c181d7162544d637b7fe2d9e8da9803a0e2d9eafc2188d1d59ee30029'; + bytecode = linker.linkBytecode(bytecode, { libName: '0x123456' }); st.ok(bytecode.indexOf('_') < 0); st.end(); }); diff --git a/test/resources/importA.sol b/test/resources/importA.sol new file mode 100644 index 00000000..b0193e9e --- /dev/null +++ b/test/resources/importA.sol @@ -0,0 +1,7 @@ +import "./importB.sol"; + +contract C { + function f() public returns (uint) { + return 0; + } +} diff --git a/test/resources/importB.sol b/test/resources/importB.sol new file mode 100644 index 00000000..c1d8f321 --- /dev/null +++ b/test/resources/importB.sol @@ -0,0 +1,5 @@ +contract D { + function f() public returns (uint) { + return 0; + } +} diff --git a/test/resources/importCallback/base/contractA.sol b/test/resources/importCallback/base/contractA.sol new file mode 100644 index 00000000..a0136e74 --- /dev/null +++ b/test/resources/importCallback/base/contractA.sol @@ -0,0 +1,8 @@ +// SPDX-License-Identifier: GPL-3.0 +pragma solidity >=0.0; + +import "libX.sol"; +import "libY.sol"; +import "libZ.sol"; + +contract A {} diff --git a/test/resources/importCallback/base/contractB.sol b/test/resources/importCallback/base/contractB.sol new file mode 100644 index 00000000..c6792746 --- /dev/null +++ b/test/resources/importCallback/base/contractB.sol @@ -0,0 +1,6 @@ +// SPDX-License-Identifier: GPL-3.0 +pragma solidity >=0.0; + +import "./contractA.sol"; + +contract B {} diff --git a/test/resources/importCallback/contractC.sol b/test/resources/importCallback/contractC.sol new file mode 100644 index 00000000..f123f6c8 --- /dev/null +++ b/test/resources/importCallback/contractC.sol @@ -0,0 +1,4 @@ +// SPDX-License-Identifier: GPL-3.0 +pragma solidity >=0.0; + +contract C {} diff --git a/test/resources/importCallback/includeA/libX.sol b/test/resources/importCallback/includeA/libX.sol new file mode 100644 index 00000000..763fc402 --- /dev/null +++ b/test/resources/importCallback/includeA/libX.sol @@ -0,0 +1,4 @@ +// SPDX-License-Identifier: GPL-3.0 +pragma solidity >=0.0; + +library X {} diff --git a/test/resources/importCallback/includeA/libY.sol b/test/resources/importCallback/includeA/libY.sol new file mode 100644 index 00000000..55be31f6 --- /dev/null +++ b/test/resources/importCallback/includeA/libY.sol @@ -0,0 +1,6 @@ +// SPDX-License-Identifier: GPL-3.0 +pragma solidity >=0.0; + +import "./utils.sol"; + +library Y {} diff --git a/test/resources/importCallback/includeA/utils.sol b/test/resources/importCallback/includeA/utils.sol new file mode 100644 index 00000000..f544b941 --- /dev/null +++ b/test/resources/importCallback/includeA/utils.sol @@ -0,0 +1,4 @@ +// SPDX-License-Identifier: GPL-3.0 +pragma solidity >=0.0; + +library Utils {} diff --git a/test/resources/importCallback/includeB/libZ.sol b/test/resources/importCallback/includeB/libZ.sol new file mode 100644 index 00000000..6e3e6193 --- /dev/null +++ b/test/resources/importCallback/includeB/libZ.sol @@ -0,0 +1,4 @@ +// SPDX-License-Identifier: GPL-3.0 +pragma solidity >=0.0; + +library Z {} diff --git a/test/resources/smtChecker/loop.sol b/test/resources/smtChecker/loop.sol new file mode 100644 index 00000000..24e053d6 --- /dev/null +++ b/test/resources/smtChecker/loop.sol @@ -0,0 +1,8 @@ +contract C { + function f(uint x) public pure { + uint i = 0; + while (i < x) + ++i; + assert(i == x); + } +} diff --git a/test/resources/smtChecker/smoke.sol b/test/resources/smtChecker/smoke.sol new file mode 100644 index 00000000..74796e91 --- /dev/null +++ b/test/resources/smtChecker/smoke.sol @@ -0,0 +1,5 @@ +pragma experimental SMTChecker; +contract C { + function f() public pure { + } +} diff --git a/test/resources/smtChecker/smoke_with_engine.sol b/test/resources/smtChecker/smoke_with_engine.sol new file mode 100644 index 00000000..12dd42ce --- /dev/null +++ b/test/resources/smtChecker/smoke_with_engine.sol @@ -0,0 +1,6 @@ +contract C { + function f() public pure { + } +} +// ==== +// SMTEngine: all diff --git a/test/resources/smtChecker/smoke_with_multi_engine.sol b/test/resources/smtChecker/smoke_with_multi_engine.sol new file mode 100644 index 00000000..f5a8428a --- /dev/null +++ b/test/resources/smtChecker/smoke_with_multi_engine.sol @@ -0,0 +1,8 @@ +pragma experimental SMTChecker; +contract C { + function f() public pure { + } +} +// ==== +// SMTEngine: all +// SMTEngine: chc diff --git a/test/smtcallback.ts b/test/smtcallback.ts new file mode 100644 index 00000000..726cc87e --- /dev/null +++ b/test/smtcallback.ts @@ -0,0 +1,259 @@ +import assert from 'assert'; +import tape from 'tape'; +import * as fs from 'fs'; +import * as path from 'path'; +import * as semver from 'semver'; +import solc from '../'; +import smtchecker from '../smtchecker'; +import smtsolver from '../smtsolver'; + +const preamble = 'pragma solidity >=0.0;\n// SPDX-License-Identifier: GPL-3.0\n'; + +function collectErrors (solOutput) { + if (solOutput === undefined) { + return []; + } + + const errors = []; + for (const i in solOutput.errors) { + const error = solOutput.errors[i]; + if (error.message.includes('This is a pre-release compiler version')) { + continue; + } + errors.push(error.message); + } + return errors; +} + +function expectErrors (expectations, errors, ignoreCex) { + if (errors.length !== expectations.length) { + return false; + } + + for (const i in errors) { + if (errors[i].includes('Error trying to invoke SMT solver') || expectations[i].includes('Error trying to invoke SMT solver')) { + continue; + } + // Expectations containing counterexamples might have many '\n' in a single line. + // These are stored escaped in the test format (as '\\n'), whereas the actual error from the compiler has '\n'. + // Therefore we need to replace '\\n' by '\n' in the expectations. + // Function `replace` only replaces the first occurrence, and `replaceAll` is not standard yet. + // Replace all '\\n' by '\n' via split & join. + expectations[i] = expectations[i].split('\\n').join('\n'); + if (ignoreCex) { + expectations[i] = expectations[i].split('\nCounterexample')[0]; + errors[i] = errors[i].split('\nCounterexample')[0]; + } + // `expectations` have "// Warning ... " before the actual message, + // whereas `errors` have only the message. + if (!expectations[i].includes(errors[i])) { + return false; + } + } + + return true; +} + +tape('SMTCheckerCallback', function (t) { + t.test('Interface via callback', function (st) { + if (!semver.gt(solc.semver(), '0.5.99')) { + st.skip('SMT callback not implemented by this compiler version.'); + st.end(); + return; + } + + const satCallback = function (query) { + return { contents: 'sat\n' }; + }; + const unsatCallback = function (query) { + return { contents: 'unsat\n' }; + }; + const errorCallback = function (query) { + return { error: 'Fake SMT solver error.' }; + }; + + let pragmaSMT = ''; + let settings = {}; + // `pragma experimental SMTChecker;` was deprecated in 0.8.4 + if (!semver.gt(solc.semver(), '0.8.3')) { + pragmaSMT = 'pragma experimental SMTChecker;\n'; + } else { + settings = { modelChecker: { engine: 'all' } }; + } + + const input = { a: { content: preamble + pragmaSMT + 'contract C { function f(uint x) public pure { assert(x > 0); } }' } }; + const inputJSON = JSON.stringify({ + language: 'Solidity', + sources: input, + settings: settings + }); + + let tests; + if (!semver.gt(solc.semver(), '0.6.8')) { + // Up to version 0.6.8 there were no embedded solvers. + tests = [ + { cb: satCallback, expectations: ['Assertion violation happens here'] }, + { cb: unsatCallback, expectations: [] }, + { cb: errorCallback, expectations: ['BMC analysis was not possible'] } + ]; + } else if (!semver.gt(solc.semver(), '0.6.12')) { + // Solidity 0.6.9 comes with z3. + tests = [ + { cb: satCallback, expectations: ['Assertion violation happens here'] }, + { cb: unsatCallback, expectations: ['At least two SMT solvers provided conflicting answers. Results might not be sound.'] }, + { cb: errorCallback, expectations: ['Assertion violation happens here'] } + ]; + } else { + // Solidity 0.7.0 reports assertion violations via CHC. + tests = [ + { cb: satCallback, expectations: ['Assertion violation happens here'] }, + { cb: unsatCallback, expectations: ['Assertion violation happens here'] }, + { cb: errorCallback, expectations: ['Assertion violation happens here'] } + ]; + } + + for (const i in tests) { + const test = tests[i]; + const output = JSON.parse(solc.compile( + inputJSON, + { smtSolver: test.cb } + )); + const errors = collectErrors(output); + st.ok(expectErrors(errors, test.expectations, false)); + } + st.end(); + }); + + t.test('Solidity smtCheckerTests', function (st) { + const testdir = path.resolve(__dirname, 'resources/smtChecker/'); + if (!fs.existsSync(testdir)) { + st.skip('SMT checker tests not present.'); + st.end(); + return; + } + + // For these tests we actually need z3/Spacer. + const z3HornSolvers = smtsolver.availableSolvers.filter(solver => solver.command === 'z3'); + if (z3HornSolvers.length === 0) { + st.skip('z3/Spacer not available.'); + st.end(); + return; + } + + const sources = []; + + // BFS to get all test files + const dirs = [testdir]; + let i; + while (dirs.length > 0) { + const dir = dirs.shift(); + const files = fs.readdirSync(dir); + for (i in files) { + const file = path.join(dir, files[i]); + if (fs.statSync(file).isDirectory()) { + dirs.push(file); + } else { + sources.push(file); + } + } + } + + // Read tests and collect expectations + const tests = []; + for (i in sources) { + st.comment('Collecting ' + sources[i] + '...'); + const source = fs.readFileSync(sources[i], 'utf8'); + + let engine; + const option = '// SMTEngine: '; + if (source.includes(option)) { + const idx = source.indexOf(option); + if (source.indexOf(option, idx + 1) !== -1) { + st.comment('SMTEngine option given multiple times.'); + continue; + } + const re = new RegExp(option + '(\\w+)'); + const m = source.match(re); + assert(m !== undefined); + assert(m.length >= 2); + engine = m[1]; + } + + let expected = []; + const delimiter = '// ----'; + if (source.includes(delimiter)) { + expected = source.substring(source.indexOf('// ----') + 8, source.length).split('\n'); + // Sometimes the last expectation line ends with a '\n' + if (expected.length > 0 && expected[expected.length - 1] === '') { + expected.pop(); + } + } + tests[sources[i]] = { + expectations: expected, + solidity: { test: { content: preamble + source } }, + ignoreCex: source.includes('// SMTIgnoreCex: yes'), + engine: engine + }; + } + + // Run all tests + for (i in tests) { + const test = tests[i]; + + // Z3's nondeterminism sometimes causes a test to timeout in one context but not in the other, + // so if we see timeout we skip a potentially misleading run. + const findError = (errorMsg) => { return errorMsg.includes('Error trying to invoke SMT solver'); }; + if (test.expectations.find(findError) !== undefined) { + st.skip('Test contains timeout which may have been caused by nondeterminism.'); + continue; + } + + let settings = {}; + // `pragma experimental SMTChecker;` was deprecated in 0.8.4 + if (semver.gt(solc.semver(), '0.8.3')) { + const engine = test.engine !== undefined ? test.engine : 'all'; + settings = { + modelChecker: { + engine: engine, + solvers: [ + 'smtlib2' + ] + } + }; + } + const output = JSON.parse(solc.compile( + JSON.stringify({ + language: 'Solidity', + sources: test.solidity, + settings: settings + }), + // This test needs z3 specifically. + { smtSolver: smtchecker.smtCallback(smtsolver.smtSolver, z3HornSolvers[0]) } + )); + st.ok(output); + + // Collect obtained error messages + test.errors = collectErrors(output); + + // These are errors in the SMTLib2Interface encoding. + if (test.errors.length > 0 && test.errors[test.errors.length - 1].includes('BMC analysis was not possible')) { + continue; + } + + // These are due to CHC not being supported via SMTLib2Interface yet. + if (test.expectations.length !== test.errors.length) { + continue; + } + + if (test.errors.find(findError) !== undefined) { + st.skip('Test contains timeout which may have been caused by nondeterminism.'); + continue; + } + + // Compare expected vs obtained errors + st.ok(expectErrors(test.expectations, test.errors, test.ignoreCex)); + } + + st.end(); + }); +}); diff --git a/test/smtchecker.js b/test/smtchecker.js deleted file mode 100644 index 0ec2e24d..00000000 --- a/test/smtchecker.js +++ /dev/null @@ -1,25 +0,0 @@ -const tape = require('tape'); -const smtchecker = require('../smtchecker.js'); - -tape('SMTChecker', function (t) { - t.test('smoke test with no axuiliaryInputRequested', function (st) { - var input = {}; - var output = {}; - st.equal(smtchecker.handleSMTQueries(input, output), null); - st.end(); - }); - - t.test('smoke test with no smtlib2queries', function (st) { - var input = {}; - var output = { auxiliaryInputRequested: {} }; - st.equal(smtchecker.handleSMTQueries(input, output), null); - st.end(); - }); - - t.test('smoke test with empty smtlib2queries', function (st) { - var input = {}; - var output = { auxiliaryInputRequested: { smtlib2queries: { } } }; - st.equal(smtchecker.handleSMTQueries(input, output), null); - st.end(); - }); -}); diff --git a/test/smtchecker.ts b/test/smtchecker.ts new file mode 100644 index 00000000..70193339 --- /dev/null +++ b/test/smtchecker.ts @@ -0,0 +1,104 @@ +import tape from 'tape'; +import * as semver from 'semver'; +import solc from '../'; +import smtchecker from '../smtchecker'; +import smtsolver from '../smtsolver'; + +const preamble = 'pragma solidity >=0.0;\n// SPDX-License-Identifier: GPL-3.0\n'; +// +tape('SMTChecker', function (t) { + // We use null for `solverFunction` and `solver` when calling `handleSMTQueries` + // because these tests do not call a solver. + + t.test('smoke test with no axuiliaryInputRequested', function (st) { + const input = {}; + const output = {}; + st.equal(smtchecker.handleSMTQueries(input, output, null, null), null); + st.end(); + }); + + t.test('smoke test with no smtlib2queries', function (st) { + const input = {}; + const output = { auxiliaryInputRequested: {} }; + st.equal(smtchecker.handleSMTQueries(input, output, null, null), null); + st.end(); + }); + + t.test('smoke test with empty smtlib2queries', function (st) { + const input = {}; + const output = { auxiliaryInputRequested: { smtlib2queries: { } } }; + st.equal(smtchecker.handleSMTQueries(input, output, null, null), null); + st.end(); + }); + + t.test('smtCallback should return type function', (st) => { + const response = smtchecker.smtCallback(() => {}); + st.equal(typeof response, 'function'); + st.end(); + }); + + t.test('smtCallback should error when passed parser fails', (st) => { + const cbFun = smtchecker.smtCallback((content) => { throw new Error(content); }); + const response = cbFun('expected-error-message'); + + st.deepEqual(response, { error: new Error('expected-error-message') }); + st.end(); + }); + + t.test('smtCallback should return content when passed parser does not fail', (st) => { + const cbFun = smtchecker.smtCallback((content) => { return content; }); + const response = cbFun('expected-content-message'); + + st.deepEqual(response, { contents: 'expected-content-message' }); + st.end(); + }); +}); + +tape('SMTCheckerWithSolver', function (t) { + // In these tests we require z3 to actually run the solver. + // This uses the SMT double run mechanism instead of the callback. + + t.test('Simple test with axuiliaryInputRequested', function (st) { + const z3 = smtsolver.availableSolvers.filter(solver => solver.command === 'z3'); + if (z3.length === 0) { + st.skip('Test requires z3.'); + st.end(); + return; + } + + if (semver.lt(solc.semver(), '0.8.7')) { + st.skip('This test requires Solidity 0.8.7 to enable all SMTChecker options.'); + st.end(); + return; + } + + const settings = { + modelChecker: { + engine: 'chc', + solvers: ['smtlib2'] + } + }; + + const source = { a: { content: preamble + '\ncontract C { function f(uint x) public pure { assert(x > 0); } }' } }; + + const input = { + language: 'Solidity', + sources: source, + settings: settings + }; + + const output = JSON.parse(solc.compile(JSON.stringify(input))); + st.ok(output); + + const newInput = smtchecker.handleSMTQueries(input, output, smtsolver.smtSolver, z3[0]); + st.notEqual(newInput, null); + + const newOutput = JSON.parse(solc.compile(JSON.stringify(newInput))); + st.ok(newOutput); + + const smtErrors = newOutput.errors.filter(e => e.errorCode === '6328'); + st.equal(smtErrors.length, 1); + + st.end(); + }); +}); diff --git a/test/translate.js b/test/translate.ts similarity index 74% rename from test/translate.js rename to test/translate.ts index c03bedd9..ecd15231 100644 --- a/test/translate.js +++ b/test/translate.ts @@ -1,7 +1,8 @@ -const fs = require('fs'); -const path = require('path'); -const tape = require('tape'); -const translate = require('../translate.js'); +import * as fs from 'fs'; +import * as path from 'path'; +import tape from 'tape'; +import translate from '../translate'; + const versionToSemver = translate.versionToSemver; tape('Version string to Semver translator', function (t) { @@ -37,7 +38,10 @@ tape('Version string to Semver translator', function (t) { st.end(); }); t.test('Old style 0.3.5', function (st) { + // The one in the solc-bin list st.equal(versionToSemver('0.3.5-371690f0/Release-Emscripten/clang/Interpreter'), '0.3.5+commit.371690f0'); + // The actual one reported by the compiler + st.equal(versionToSemver('0.3.5-0/Release-Emscripten/clang/Interpreter'), '0.3.5'); st.end(); }); t.test('Old style 0.3.6', function (st) { @@ -48,9 +52,9 @@ tape('Version string to Semver translator', function (t) { tape('prettyPrintLegacyAssemblyJSON', function (t) { t.test('Works properly', function (st) { - var fixtureAsmJson = JSON.parse(fs.readFileSync(path.resolve(__dirname, 'resources/fixtureAsmJson.json')).toString()); - var fixtureAsmJsonSource = fs.readFileSync(path.resolve(__dirname, 'resources/fixtureAsmJson.sol')).toString(); - var fixtureAsmJsonOutput = fs.readFileSync(path.resolve(__dirname, 'resources/fixtureAsmJson.output')).toString(); + const fixtureAsmJson = JSON.parse(fs.readFileSync(path.resolve(__dirname, 'resources/fixtureAsmJson.json')).toString()); + const fixtureAsmJsonSource = fs.readFileSync(path.resolve(__dirname, 'resources/fixtureAsmJson.sol')).toString(); + const fixtureAsmJsonOutput = fs.readFileSync(path.resolve(__dirname, 'resources/fixtureAsmJson.output')).toString(); st.equal(translate.prettyPrintLegacyAssemblyJSON(fixtureAsmJson, fixtureAsmJsonSource), fixtureAsmJsonOutput); st.end(); }); diff --git a/translate.js b/translate.js deleted file mode 100644 index 26e469fb..00000000 --- a/translate.js +++ /dev/null @@ -1,195 +0,0 @@ -var linker = require('./linker.js'); - -/// Translate old style version numbers to semver. -/// Old style: 0.3.6-3fc68da5/Release-Emscripten/clang -/// 0.3.5-371690f0/Release-Emscripten/clang/Interpreter -/// 0.2.0-e7098958/.-Emscripten/clang/int linked to libethereum-1.1.1-bbb80ab0/.-Emscripten/clang/int -/// 0.1.3-0/.-/clang/int linked to libethereum-0.9.92-0/.-/clang/int -/// 0.1.2-5c3bfd4b*/.-/clang/int -/// 0.1.1-6ff4cd6b/RelWithDebInfo-Emscripten/clang/int -/// New style: 0.4.5+commit.b318366e.Emscripten.clang -function versionToSemver (version) { - // FIXME: parse more detail, but this is a good start - var parsed = version.match(/^([0-9]+\.[0-9]+\.[0-9]+)-([0-9a-f]{8})[/*].*$/); - if (parsed) { - return parsed[1] + '+commit.' + parsed[2]; - } - if (version.indexOf('0.1.3-0') !== -1) { - return '0.1.3'; - } - // assume it is already semver compatible - return version; -} - -function translateErrors (ret, errors) { - for (var error in errors) { - var type = 'error'; - var extractType = /^(.*):(\d+):(\d+):(.*):/; - extractType = extractType.exec(errors[error]); - if (extractType) { - type = extractType[4].trim(); - } else if (errors[error].indexOf(': Warning:')) { - type = 'Warning'; - } else if (errors[error].indexOf(': Error:')) { - type = 'Error'; - } - ret.push({ - type: type, - component: 'general', - severity: (type === 'Warning') ? 'warning' : 'error', - message: errors[error], - formattedMessage: errors[error] - }); - } -} - -function translateGasEstimates (gasEstimates) { - if (gasEstimates === null) { - return 'infinite'; - } - - if (typeof gasEstimates === 'number') { - return gasEstimates.toString(); - } - - var gasEstimatesTranslated = {}; - for (var func in gasEstimates) { - gasEstimatesTranslated[func] = translateGasEstimates(gasEstimates[func]); - } - return gasEstimatesTranslated; -} - -function translateJsonCompilerOutput (output, libraries) { - var ret = {}; - - ret['errors'] = []; - var errors; - if (output['error']) { - errors = [ output['error'] ]; - } else { - errors = output['errors']; - } - translateErrors(ret['errors'], errors); - - ret['contracts'] = {}; - for (var contract in output['contracts']) { - // Split name first, can be `contract`, `:contract` or `filename:contract` - var tmp = contract.match(/^(([^:]*):)?([^:]+)$/); - if (tmp.length !== 4) { - // Force abort - return null; - } - var fileName = tmp[2]; - if (fileName === undefined) { - // this is the case of `contract` - fileName = ''; - } - var contractName = tmp[3]; - - var contractInput = output['contracts'][contract]; - - var gasEstimates = contractInput['gasEstimates']; - var translatedGasEstimates = {}; - - if (gasEstimates['creation']) { - translatedGasEstimates['creation'] = { - 'codeDepositCost': translateGasEstimates(gasEstimates['creation'][1]), - 'executionCost': translateGasEstimates(gasEstimates['creation'][0]) - }; - } - if (gasEstimates['internal']) { - translatedGasEstimates['internal'] = translateGasEstimates(gasEstimates['internal']); - } - if (gasEstimates['external']) { - translatedGasEstimates['external'] = translateGasEstimates(gasEstimates['external']); - } - - var contractOutput = { - 'abi': JSON.parse(contractInput['interface']), - 'metadata': contractInput['metadata'], - 'evm': { - 'legacyAssembly': contractInput['assembly'], - 'bytecode': { - 'object': linker.linkBytecode(contractInput['bytecode'], libraries), - 'opcodes': contractInput['opcodes'], - 'sourceMap': contractInput['srcmap'], - 'linkReferences': linker.findLinkReferences(contractInput['bytecode']) - }, - 'deployedBytecode': { - 'object': linker.linkBytecode(contractInput['runtimeBytecode'], libraries), - 'sourceMap': contractInput['srcmapRuntime'], - 'linkReferences': linker.findLinkReferences(contractInput['runtimeBytecode']) - }, - 'methodIdentifiers': contractInput['functionHashes'], - 'gasEstimates': translatedGasEstimates - } - }; - - if (!ret['contracts'][fileName]) { - ret['contracts'][fileName] = {}; - } - - ret['contracts'][fileName][contractName] = contractOutput; - } - - var sourceMap = {}; - for (var sourceId in output['sourceList']) { - sourceMap[output['sourceList'][sourceId]] = sourceId; - } - - ret['sources'] = {}; - for (var source in output['sources']) { - ret['sources'][source] = { - id: sourceMap[source], - legacyAST: output['sources'][source].AST - }; - } - - return ret; -} - -function escapeString (text) { - return text - .replace(/\n/g, '\\n') - .replace(/\r/g, '\\r') - .replace(/\t/g, '\\t'); -} - -function formatAssemblyText (asm, prefix, source) { - if (typeof asm === typeof '' || asm === null || asm === undefined) { - return prefix + (asm || '') + '\n'; - } - var text = prefix + '.code\n'; - asm['.code'].forEach(function (item, i) { - var v = item.value === undefined ? '' : item.value; - var src = ''; - if (source !== undefined && item.begin !== undefined && item.end !== undefined) { - src = escapeString(source.slice(item.begin, item.end)); - } - if (src.length > 30) { - src = src.slice(0, 30) + '...'; - } - if (item.name !== 'tag') { - text += ' '; - } - text += prefix + item.name + ' ' + v + '\t\t\t' + src + '\n'; - }); - text += prefix + '.data\n'; - var asmData = asm['.data'] || []; - for (var i in asmData) { - var item = asmData[i]; - text += ' ' + prefix + '' + i + ':\n'; - text += formatAssemblyText(item, prefix + ' ', source); - } - return text; -} - -function prettyPrintLegacyAssemblyJSON (assembly, source) { - return formatAssemblyText(assembly, '', source); -} - -module.exports = { - versionToSemver: versionToSemver, - translateJsonCompilerOutput: translateJsonCompilerOutput, - prettyPrintLegacyAssemblyJSON: prettyPrintLegacyAssemblyJSON -}; diff --git a/translate.ts b/translate.ts new file mode 100644 index 00000000..ca0c39f7 --- /dev/null +++ b/translate.ts @@ -0,0 +1,200 @@ +import linker from './linker'; + +/// Translate old style version numbers to semver. +/// Old style: 0.3.6-3fc68da5/Release-Emscripten/clang +/// 0.3.5-371690f0/Release-Emscripten/clang/Interpreter +/// 0.3.5-0/Release-Emscripten/clang/Interpreter +/// 0.2.0-e7098958/.-Emscripten/clang/int linked to libethereum-1.1.1-bbb80ab0/.-Emscripten/clang/int +/// 0.1.3-0/.-/clang/int linked to libethereum-0.9.92-0/.-/clang/int +/// 0.1.2-5c3bfd4b*/.-/clang/int +/// 0.1.1-6ff4cd6b/RelWithDebInfo-Emscripten/clang/int +/// New style: 0.4.5+commit.b318366e.Emscripten.clang +function versionToSemver (version) { + // FIXME: parse more detail, but this is a good start + const parsed = version.match(/^([0-9]+\.[0-9]+\.[0-9]+)-([0-9a-f]{8})[/*].*$/); + if (parsed) { + return parsed[1] + '+commit.' + parsed[2]; + } + if (version.indexOf('0.1.3-0') !== -1) { + return '0.1.3'; + } + if (version.indexOf('0.3.5-0') !== -1) { + return '0.3.5'; + } + // assume it is already semver compatible + return version; +} + +function translateErrors (ret, errors) { + for (const error in errors) { + let type = 'error'; + let extractType: any = /^(.*):(\d+):(\d+):(.*):/; + extractType = extractType.exec(errors[error]); + if (extractType) { + type = extractType[4].trim(); + } else if (errors[error].indexOf(': Warning:')) { + type = 'Warning'; + } else if (errors[error].indexOf(': Error:')) { + type = 'Error'; + } + ret.push({ + type: type, + component: 'general', + severity: (type === 'Warning') ? 'warning' : 'error', + message: errors[error], + formattedMessage: errors[error] + }); + } +} + +function translateGasEstimates (gasEstimates) { + if (gasEstimates === null) { + return 'infinite'; + } + + if (typeof gasEstimates === 'number') { + return gasEstimates.toString(); + } + + const gasEstimatesTranslated = {}; + for (const func in gasEstimates) { + gasEstimatesTranslated[func] = translateGasEstimates(gasEstimates[func]); + } + return gasEstimatesTranslated; +} + +function translateJsonCompilerOutput (output, libraries) { + const ret: any = {}; + + ret.errors = []; + let errors; + if (output.error) { + errors = [output.error]; + } else { + errors = output.errors; + } + translateErrors(ret.errors, errors); + + ret.contracts = {}; + for (const contract in output.contracts) { + // Split name first, can be `contract`, `:contract` or `filename:contract` + const tmp = contract.match(/^((.*):)?([^:]+)$/); + if (tmp.length !== 4) { + // Force abort + return null; + } + let fileName = tmp[2]; + if (fileName === undefined) { + // this is the case of `contract` + fileName = ''; + } + const contractName = tmp[3]; + + const contractInput = output.contracts[contract]; + + const gasEstimates = contractInput.gasEstimates; + const translatedGasEstimates: any = {}; + + if (gasEstimates.creation) { + translatedGasEstimates.creation = { + codeDepositCost: translateGasEstimates(gasEstimates.creation[1]), + executionCost: translateGasEstimates(gasEstimates.creation[0]) + }; + } + if (gasEstimates.internal) { + translatedGasEstimates.internal = translateGasEstimates(gasEstimates.internal); + } + if (gasEstimates.external) { + translatedGasEstimates.external = translateGasEstimates(gasEstimates.external); + } + + const contractOutput = { + abi: JSON.parse(contractInput.interface), + metadata: contractInput.metadata, + evm: { + legacyAssembly: contractInput.assembly, + bytecode: { + object: contractInput.bytecode && linker.linkBytecode(contractInput.bytecode, libraries || {}), + opcodes: contractInput.opcodes, + sourceMap: contractInput.srcmap, + linkReferences: contractInput.bytecode && linker.findLinkReferences(contractInput.bytecode) + }, + deployedBytecode: { + object: contractInput.runtimeBytecode && linker.linkBytecode(contractInput.runtimeBytecode, libraries || {}), + sourceMap: contractInput.srcmapRuntime, + linkReferences: contractInput.runtimeBytecode && linker.findLinkReferences(contractInput.runtimeBytecode) + }, + methodIdentifiers: contractInput.functionHashes, + gasEstimates: translatedGasEstimates + } + }; + + if (!ret.contracts[fileName]) { + ret.contracts[fileName] = {}; + } + + ret.contracts[fileName][contractName] = contractOutput; + } + + const sourceMap = {}; + for (const sourceId in output.sourceList) { + sourceMap[output.sourceList[sourceId]] = sourceId; + } + + ret.sources = {}; + for (const source in output.sources) { + ret.sources[source] = { + id: sourceMap[source], + legacyAST: output.sources[source].AST + }; + } + + return ret; +} + +function escapeString (text) { + return text + .replace(/\n/g, '\\n') + .replace(/\r/g, '\\r') + .replace(/\t/g, '\\t'); +} + +// 'asm' can be an object or a string +function formatAssemblyText (asm, prefix, source) { + if (typeof asm === 'string' || asm === null || asm === undefined) { + return prefix + (asm || '') + '\n'; + } + let text = prefix + '.code\n'; + asm['.code'].forEach(function (item, i) { + const v = item.value === undefined ? '' : item.value; + let src = ''; + if (source !== undefined && item.begin !== undefined && item.end !== undefined) { + src = escapeString(source.slice(item.begin, item.end)); + } + if (src.length > 30) { + src = src.slice(0, 30) + '...'; + } + if (item.name !== 'tag') { + text += ' '; + } + text += prefix + item.name + ' ' + v + '\t\t\t' + src + '\n'; + }); + text += prefix + '.data\n'; + const asmData = asm['.data'] || []; + for (const i in asmData) { + const item = asmData[i]; + text += ' ' + prefix + '' + i + ':\n'; + text += formatAssemblyText(item, prefix + ' ', source); + } + return text; +} + +function prettyPrintLegacyAssemblyJSON (assembly, source) { + return formatAssemblyText(assembly, '', source); +} + +export = { + versionToSemver, + translateJsonCompilerOutput, + prettyPrintLegacyAssemblyJSON +}; diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 00000000..a4420494 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,34 @@ +{ + // Configuration reference: https://www.typescriptlang.org/tsconfig + "compilerOptions": { + "target": "esnext", + "module": "commonjs", + "resolveJsonModule": true, + // This is needed for backwards-compatibility, to keep imports of the form `wrapper = require('solc/wrapper)` + // working like they did before the TypeScript migration. + // TODO: Drop it in the next breaking release. + "esModuleInterop": true, + "outDir": "./dist", + "forceConsistentCasingInFileNames": true, + // Allow JS must be included to ensure that the built binary is included + // in the output. This could be copied directly in the future if required. + "allowJs": true, + // TODO: + // In order to gracefully move our project to TypeScript without having + // TS immediately yell at you, we'll disable strict mode for now. + "strict": false, + "noImplicitAny": false + }, + "include": [ + "**/*.js", + "**/*.ts", + "**/*.json" + ], + "exclude": [ + "coverage", + "dist" + ], + "ts-node": { + "transpileOnly": true + } +} diff --git a/verifyVersion.js b/verifyVersion.ts similarity index 63% rename from verifyVersion.js rename to verifyVersion.ts index 6cd0568c..863e6bee 100755 --- a/verifyVersion.js +++ b/verifyVersion.ts @@ -1,9 +1,11 @@ #!/usr/bin/env node -var semver = require('semver'); +import * as semver from 'semver'; +import solc from './'; -var packageVersion = require('./package.json').version; -var solcVersion = require('./index.js').version(); +const { version: packageVersion } = require('./package.json'); + +const solcVersion = (solc as any).version(); console.log('solcVersion: ' + solcVersion); console.log('packageVersion: ' + packageVersion); diff --git a/wrapper.js b/wrapper.js deleted file mode 100644 index b43c0030..00000000 --- a/wrapper.js +++ /dev/null @@ -1,269 +0,0 @@ -var assert = require('assert'); -var translate = require('./translate.js'); -var requireFromString = require('require-from-string'); -var https = require('https'); -var MemoryStream = require('memorystream'); - -function setupMethods (soljson) { - var version; - if ('_solidity_version' in soljson) { - version = soljson.cwrap('solidity_version', 'string', []); - } else { - version = soljson.cwrap('version', 'string', []); - } - - var versionToSemver = function () { - return translate.versionToSemver(version()); - }; - - var license; - if ('_solidity_license' in soljson) { - license = soljson.cwrap('solidity_license', 'string', []); - } else if ('_license' in soljson) { - license = soljson.cwrap('license', 'string', []); - } else { - // pre 0.4.14 - license = function () { - // return undefined - }; - } - - var copyString = function (str, ptr) { - var length = soljson.lengthBytesUTF8(str); - var buffer = soljson._malloc(length + 1); - soljson.stringToUTF8(str, buffer, length + 1); - soljson.setValue(ptr, buffer, '*'); - }; - - var wrapCallback = function (callback) { - assert(typeof callback === 'function', 'Invalid callback specified.'); - return function (path, contents, error) { - var result = callback(soljson.Pointer_stringify(path)); - if (typeof result.contents === 'string') { - copyString(result.contents, contents); - } - if (typeof result.error === 'string') { - copyString(result.error, error); - } - }; - }; - - // This calls compile() with args || cb - var runWithReadCallback = function (readCallback, compile, args) { - // Forward compatibility with 0.6.x - if (typeof readCallback === 'object') { - readCallback = readCallback.import; - } - - if (readCallback === undefined) { - readCallback = function (path) { - return { - error: 'File import callback not supported' - }; - }; - } - - // This is to support multiple versions of Emscripten. - var addFunction = soljson.addFunction || soljson.Runtime.addFunction; - var removeFunction = soljson.removeFunction || soljson.Runtime.removeFunction; - - var cb = addFunction(wrapCallback(readCallback)); - var output; - try { - args.push(cb); - output = compile.apply(undefined, args); - } catch (e) { - removeFunction(cb); - throw e; - } - removeFunction(cb); - return output; - }; - - var compileJSON = null; - if ('_compileJSON' in soljson) { - compileJSON = soljson.cwrap('compileJSON', 'string', ['string', 'number']); - } - - var compileJSONMulti = null; - if ('_compileJSONMulti' in soljson) { - compileJSONMulti = soljson.cwrap('compileJSONMulti', 'string', ['string', 'number']); - } - - var compileJSONCallback = null; - if ('_compileJSONCallback' in soljson) { - var compileInternal = soljson.cwrap('compileJSONCallback', 'string', ['string', 'number', 'number']); - compileJSONCallback = function (input, optimize, readCallback) { - return runWithReadCallback(readCallback, compileInternal, [ input, optimize ]); - }; - } - - var compileStandard = null; - if ('_compileStandard' in soljson) { - var compileStandardInternal = soljson.cwrap('compileStandard', 'string', ['string', 'number']); - compileStandard = function (input, readCallback) { - return runWithReadCallback(readCallback, compileStandardInternal, [ input ]); - }; - } - if ('_solidity_compile' in soljson) { - var solidityCompile = soljson.cwrap('solidity_compile', 'string', ['string', 'number']); - compileStandard = function (input, readCallback) { - return runWithReadCallback(readCallback, solidityCompile, [ input ]); - }; - } - - // Expects a Standard JSON I/O but supports old compilers - var compileStandardWrapper = function (input, readCallback) { - if (compileStandard !== null) { - return compileStandard(input, readCallback); - } - - function formatFatalError (message) { - return JSON.stringify({ - errors: [ - { - 'type': 'JSONError', - 'component': 'solcjs', - 'severity': 'error', - 'message': message, - 'formattedMessage': 'Error: ' + message - } - ] - }); - } - - // Forward compatibility with 0.6.x - if (typeof readCallback === 'object') { - readCallback = readCallback.import; - } - - if (readCallback !== undefined) { - assert(typeof readCallback === 'function', 'Invalid callback specified.'); - } - - try { - input = JSON.parse(input); - } catch (e) { - return formatFatalError('Invalid JSON supplied: ' + e.message); - } - - if (input['language'] !== 'Solidity') { - return formatFatalError('Only "Solidity" is supported as a language.'); - } - - // NOTE: this is deliberately `== null` - if (input['sources'] == null || input['sources'].length === 0) { - return formatFatalError('No input sources specified.'); - } - - // Bail out early - if ((input['sources'].length > 1) && (compileJSONMulti === null)) { - return formatFatalError('Multiple sources provided, but compiler only supports single input.'); - } - - function isOptimizerEnabled (input) { - return input['settings'] && input['settings']['optimizer'] && input['settings']['optimizer']['enabled']; - } - - function translateSources (input) { - var sources = {}; - for (var source in input['sources']) { - if (input['sources'][source]['content'] !== null) { - sources[source] = input['sources'][source]['content']; - } else { - // force failure - return null; - } - } - return sources; - } - - function librariesSupplied (input) { - if (input['settings']) { - return input['settings']['libraries']; - } - } - - function translateOutput (output, libraries) { - try { - output = JSON.parse(output); - } catch (e) { - return formatFatalError('Compiler returned invalid JSON: ' + e.message); - } - output = translate.translateJsonCompilerOutput(output, libraries); - if (output == null) { - return formatFatalError('Failed to process output.'); - } - return JSON.stringify(output); - } - - var sources = translateSources(input); - if (sources === null || Object.keys(sources).length === 0) { - return formatFatalError('Failed to process sources.'); - } - - // Try linking if libraries were supplied - var libraries = librariesSupplied(input); - - // Try to wrap around old versions - if (compileJSONCallback !== null) { - return translateOutput(compileJSONCallback(JSON.stringify({ 'sources': sources }), isOptimizerEnabled(input), readCallback), libraries); - } - - if (compileJSONMulti !== null) { - return translateOutput(compileJSONMulti(JSON.stringify({ 'sources': sources }), isOptimizerEnabled(input)), libraries); - } - - // Try our luck with an ancient compiler - if (compileJSON !== null) { - return translateOutput(compileJSON(sources[Object.keys(sources)[0]], isOptimizerEnabled(input)), libraries); - } - - return formatFatalError('Compiler does not support any known interface.'); - }; - - return { - version: version, - semver: versionToSemver, - license: license, - lowlevel: { - compileSingle: compileJSON, - compileMulti: compileJSONMulti, - compileCallback: compileJSONCallback, - compileStandard: compileStandard - }, - features: { - legacySingleInput: compileJSON !== null, - multipleInputs: compileJSONMulti !== null || compileStandard !== null, - importCallback: compileJSONCallback !== null || compileStandard !== null, - nativeStandardJSON: compileStandard !== null - }, - compile: compileStandardWrapper, - // Temporary wrappers to minimise breaking with other projects. - // NOTE: to be removed in 0.5.2 - compileStandard: compileStandardWrapper, - compileStandardWrapper: compileStandardWrapper, - // Loads the compiler of the given version from the github repository - // instead of from the local filesystem. - loadRemoteVersion: function (versionString, cb) { - var mem = new MemoryStream(null, {readable: false}); - var url = 'https://ethereum.github.io/solc-bin/bin/soljson-' + versionString + '.js'; - https.get(url, function (response) { - if (response.statusCode !== 200) { - cb(new Error('Error retrieving binary: ' + response.statusMessage)); - } else { - response.pipe(mem); - response.on('end', function () { - cb(null, setupMethods(requireFromString(mem.toString(), 'soljson-' + versionString + '.js'))); - }); - } - }).on('error', function (error) { - cb(error); - }); - }, - // Use this if you want to add wrapper functions around the pure module. - setupMethods: setupMethods - }; -} - -module.exports = setupMethods; diff --git a/wrapper.ts b/wrapper.ts new file mode 100755 index 00000000..e9bf471e --- /dev/null +++ b/wrapper.ts @@ -0,0 +1,169 @@ +import MemoryStream from 'memorystream'; +import { https } from 'follow-redirects'; + +import { formatFatalError } from './formatters'; +import { isNil } from './common/helpers'; +import setupBindings from './bindings'; +import translate from './translate'; + +const Module = module.constructor as any; + +function wrapper (soljson) { + const { + coreBindings, + compileBindings, + methodFlags + } = setupBindings(soljson); + + return { + version: coreBindings.version, + semver: coreBindings.versionToSemver, + license: coreBindings.license, + lowlevel: { + compileSingle: compileBindings.compileJson, + compileMulti: compileBindings.compileJsonMulti, + compileCallback: compileBindings.compileJsonCallback, + compileStandard: compileBindings.compileStandard + }, + features: { + legacySingleInput: methodFlags.compileJsonStandardSupported, + multipleInputs: methodFlags.compileJsonMultiSupported || methodFlags.compileJsonStandardSupported, + importCallback: methodFlags.compileJsonCallbackSuppported || methodFlags.compileJsonStandardSupported, + nativeStandardJSON: methodFlags.compileJsonStandardSupported + }, + compile: compileStandardWrapper.bind(this, compileBindings), + // Loads the compiler of the given version from the github repository + // instead of from the local filesystem. + loadRemoteVersion, + // Use this if you want to add wrapper functions around the pure module. + setupMethods: wrapper + }; +} + +function loadRemoteVersion (versionString, callback) { + const memoryStream = new MemoryStream(null, { readable: false }); + const url = `https://binaries.soliditylang.org/bin/soljson-${versionString}.js`; + + https.get(url, response => { + if (response.statusCode !== 200) { + callback(new Error(`Error retrieving binary: ${response.statusMessage}`)); + } else { + response.pipe(memoryStream); + response.on('end', () => { + // Based on the require-from-string package. + const soljson = new Module(); + soljson._compile(memoryStream.toString(), `soljson-${versionString}.js`); + + if (module.parent && module.parent.children) { + // Make sure the module is plugged into the hierarchy correctly to have parent + // properly garbage collected. + module.parent.children.splice(module.parent.children.indexOf(soljson), 1); + } + + callback(null, wrapper(soljson.exports)); + }); + } + }).on('error', function (error) { + callback(error); + }); +} + +// Expects a Standard JSON I/O but supports old compilers +function compileStandardWrapper (compile, inputRaw, readCallback) { + if (!isNil(compile.compileStandard)) { + return compile.compileStandard(inputRaw, readCallback); + } + + let input: { language: string, sources: any[], settings: any }; + + try { + input = JSON.parse(inputRaw); + } catch (e) { + return formatFatalError(`Invalid JSON supplied: ${e.message}`); + } + + if (input.language !== 'Solidity') { + return formatFatalError('Only "Solidity" is supported as a language.'); + } + + // NOTE: this is deliberately `== null` + if (isNil(input.sources) || input.sources.length === 0) { + return formatFatalError('No input sources specified.'); + } + + const sources = translateSources(input); + const optimize = isOptimizerEnabled(input); + const libraries = librariesSupplied(input); + + if (isNil(sources) || Object.keys(sources).length === 0) { + return formatFatalError('Failed to process sources.'); + } + + // Try to wrap around old versions + if (!isNil(compile.compileJsonCallback)) { + const inputJson = JSON.stringify({ sources: sources }); + const output = compile.compileJsonCallback(inputJson, optimize, readCallback); + return translateOutput(output, libraries); + } + + if (!isNil(compile.compileJsonMulti)) { + const output = compile.compileJsonMulti(JSON.stringify({ sources: sources }), optimize); + return translateOutput(output, libraries); + } + + // Try our luck with an ancient compiler + if (!isNil(compile.compileJson)) { + if (Object.keys(sources).length > 1) { + return formatFatalError('Multiple sources provided, but compiler only supports single input.'); + } + + const input = sources[Object.keys(sources)[0]]; + const output = compile.compileJson(input, optimize); + return translateOutput(output, libraries); + } + + return formatFatalError('Compiler does not support any known interface.'); +} + +function isOptimizerEnabled (input) { + return input.settings && input.settings.optimizer && input.settings.optimizer.enabled; +} + +function translateSources (input) { + const sources = {}; + + for (const source in input.sources) { + if (input.sources[source].content !== null) { + sources[source] = input.sources[source].content; + } else { + // force failure + return null; + } + } + + return sources; +} + +function librariesSupplied (input) { + if (!isNil(input.settings)) return input.settings.libraries; +} + +function translateOutput (outputRaw, libraries) { + let parsedOutput; + + try { + parsedOutput = JSON.parse(outputRaw); + } catch (e) { + return formatFatalError(`Compiler returned invalid JSON: ${e.message}`); + } + + const output = translate.translateJsonCompilerOutput(parsedOutput, libraries); + + if (isNil(output)) { + return formatFatalError('Failed to process output.'); + } + + return JSON.stringify(output); +} + +export = wrapper;