Merge pull request #460 from sc-forks/beta

Merge 0.7.x beta to master
pull/461/head
cgewecke 5 years ago committed by GitHub
commit d6b86fa387
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 70
      .circleci/config.yml
  2. 7
      .gitignore
  3. 4
      .npmignore
  4. 121
      BUIDLER_README.md
  5. 246
      README.md
  6. 4
      api.js
  7. 23
      bin/exec.js
  8. 88
      docs/advanced.md
  9. 367
      docs/api.md
  10. 143
      docs/faq.md
  11. 91
      docs/testrpc-sc.md
  12. 138
      docs/upgrade.md
  13. 307
      lib/api.js
  14. 476
      lib/app.js
  15. 61
      lib/collector.js
  16. 107
      lib/coverage.js
  17. 145
      lib/coverageMap.js
  18. 276
      lib/injector.js
  19. 65
      lib/instrumentSolidity.js
  20. 346
      lib/instrumenter.js
  21. 135
      lib/parse.js
  22. 53
      lib/preprocessor.js
  23. 238
      lib/registrar.js
  24. 22
      lib/ternary/conditional.js
  25. 121
      lib/ternary/ternary.js
  26. 119
      lib/ui.js
  27. 68
      lib/validator.js
  28. 52
      package.json
  29. 8
      plugins/bin.js
  30. 142
      plugins/buidler.plugin.js
  31. 85
      plugins/resources/buidler.ui.js
  32. 108
      plugins/resources/buidler.utils.js
  33. 273
      plugins/resources/plugin.utils.js
  34. 493393
      plugins/resources/truffle.library.js
  35. 1
      plugins/resources/truffle.library.js.map
  36. 107
      plugins/resources/truffle.ui.js
  37. 217
      plugins/resources/truffle.utils.js
  38. 128
      plugins/truffle.plugin.js
  39. 88
      scripts/run-buidler.sh
  40. 36
      scripts/run-metacoin.sh
  41. 34
      scripts/run-zeppelin.sh
  42. 468
      test/app.js
  43. 30
      test/assembly.js
  44. 109
      test/assert.js
  45. 9
      test/cli/block-gas-limit.js
  46. 24
      test/cli/command-options.js
  47. 8
      test/cli/empty.js
  48. 21
      test/cli/events.js
  49. 14
      test/cli/inheritance.js
  50. 17
      test/cli/only-call.js
  51. 10
      test/cli/oraclize.js
  52. 17
      test/cli/requires-externally.js
  53. 31
      test/cli/sign.js
  54. 16
      test/cli/simple.js
  55. 17
      test/cli/sol-parse-fail.js
  56. 22
      test/cli/testrpc-options.js
  57. 16
      test/cli/truffle-test-fail.js
  58. 38
      test/comments.js
  59. 29
      test/expressions.js
  60. 192
      test/function.js
  61. 274
      test/if.js
  62. 6
      test/integration/generic/assets/SimpleError.sol
  63. 1
      test/integration/generic/assets/asset.js
  64. 25
      test/integration/generic/contracts/Migrations.sol
  65. 4
      test/integration/generic/migrations/1_initial.js
  66. 1
      test/integration/generic/test/.marker
  67. 3
      test/integration/projects/bad-solcoverjs/.solcover.js
  68. 8
      test/integration/projects/bad-solcoverjs/buidler.config.js
  69. 7
      test/integration/projects/bad-solcoverjs/truffle-config.js
  70. 1
      test/integration/projects/ganache-solcoverjs/.gitignore
  71. 5
      test/integration/projects/ganache-solcoverjs/.solcover.js
  72. 8
      test/integration/projects/ganache-solcoverjs/buidler.config.js
  73. 17
      test/integration/projects/ganache-solcoverjs/contracts/ContractA.sol
  74. 17
      test/integration/projects/ganache-solcoverjs/contracts/ContractB.sol
  75. 17
      test/integration/projects/ganache-solcoverjs/contracts/ContractC.sol
  76. 23
      test/integration/projects/ganache-solcoverjs/contracts/Migrations.sol
  77. 15
      test/integration/projects/ganache-solcoverjs/test/contracta.js
  78. 15
      test/integration/projects/ganache-solcoverjs/test/contractb.js
  79. 20
      test/integration/projects/ganache-solcoverjs/test/contractc.js
  80. 7
      test/integration/projects/ganache-solcoverjs/truffle-config.js
  81. 1
      test/integration/projects/import-paths/.gitignore
  82. 4
      test/integration/projects/import-paths/.solcover.js
  83. 10
      test/integration/projects/import-paths/assets/RelativePathImport.sol
  84. 8
      test/integration/projects/import-paths/buidler.config.js
  85. 23
      test/integration/projects/import-paths/contracts/Migrations.sol
  86. 3
      test/integration/projects/import-paths/contracts/OnlyUsesImports.sol
  87. 17
      test/integration/projects/import-paths/contracts/UsesImports.sol
  88. 5
      test/integration/projects/import-paths/migrations/1_initial_migration.js
  89. 10
      test/integration/projects/import-paths/node_modules/package/AnotherImport.sol
  90. 10
      test/integration/projects/import-paths/node_modules/package/NodeModulesImport.sol
  91. 1
      test/integration/projects/import-paths/node_modules/package/package.json
  92. 16
      test/integration/projects/import-paths/test/uses_imports.js
  93. 7
      test/integration/projects/import-paths/truffle-config.js
  94. 7
      test/integration/projects/libraries/buidler.config.js
  95. 0
      test/integration/projects/libraries/contracts/CLibrary.sol
  96. 23
      test/integration/projects/libraries/contracts/Migrations.sol
  97. 0
      test/integration/projects/libraries/contracts/PureView.sol
  98. 8
      test/integration/projects/libraries/contracts/UsesPure.sol
  99. 6
      test/integration/projects/libraries/contracts/_Interface.sol
  100. 5
      test/integration/projects/libraries/migrations/1_initial_migration.js
  101. Some files were not shown because too many files have changed in this diff Show More

@ -1,6 +1,10 @@
version: 2.0
# Necessary for running in machine mode, which is necessary to execute the
# Zeppelin and MetaCoin E2E scripts
version: 2.1
orbs:
win: circleci/windows@2.2.0
# Necessary for running in machine mode,
# which is necessary to execute the E2E scripts
step_install_nvm: &step_install_nvm
run:
name: "Install nvm for machine"
@ -12,16 +16,25 @@ step_install_nvm: &step_install_nvm
nvm alias default v8.15.0
echo 'export NVM_DIR="/opt/circleci/.nvm"' >> $BASH_ENV
echo "[ -s \"$NVM_DIR/nvm.sh\" ] && . \"$NVM_DIR/nvm.sh\"" >> $BASH_ENV
jobs:
unit-test:
docker:
- image: circleci/node:8.15.0
machine: true
steps:
- checkout
- <<: *step_install_nvm
- run:
name: Delete any old node_modules
command: |
rm -rf node_modules/
- run:
name: Install yarn
command: |
npm install -g yarn
- run:
name: Install truffle (globally)
command: |
npm install -g truffle
- run:
name: Install dependencies
command: |
@ -29,24 +42,11 @@ jobs:
- run:
name: Run tests
command: |
npm run test-cov
npm run test:ci
- run:
name: Upload coverage
command: |
bash <(curl -s https://codecov.io/bash)
# This works but takes a while....
e2e-colony:
docker:
- image: circleci/node:10.12-stretch
working_directory: ~/colonyNetwork
steps:
- checkout
- run:
name: ColonyNetwork E2E
command: |
./scripts/run-colony.sh
e2e-zeppelin:
machine: true
steps:
@ -56,7 +56,6 @@ jobs:
name: Zeppelin E2E
command: |
./scripts/run-zeppelin.sh
e2e-metacoin:
machine: true
steps:
@ -66,6 +65,24 @@ jobs:
name: MetaCoin E2E
command: |
./scripts/run-metacoin.sh
e2e-metacoin-windows:
executor: win/default
steps:
- checkout
- run: dotnet tool install --global PowerShell
- run:
name: Windows Metacoin E2E
command: |
bash ./scripts/run-metacoin.sh
e2e-buidler:
machine: true
steps:
- checkout
- <<: *step_install_nvm
- run:
name: Buidler E2E
command: |
./scripts/run-buidler.sh
workflows:
version: 2
build:
@ -73,14 +90,5 @@ workflows:
- unit-test
- e2e-zeppelin
- e2e-metacoin
nightly:
triggers:
- schedule:
cron: "0 1 * * *" # 1am UTC
filters:
branches:
only:
- master
jobs:
- e2e-zeppelin
#- e2e-colony
- e2e-metacoin-windows
- e2e-buidler

7
.gitignore vendored

@ -1,8 +1,9 @@
allFiredEvents
scTopics
scDebugLog
coverage.json
coverage/
node_modules/
.changelog
.DS_Store
test/artifacts
test/cache
temp
.nyc_output/

@ -0,0 +1,4 @@
test/
.circleci/
docs/
.nyc_output/

@ -0,0 +1,121 @@
[![Gitter chat](https://badges.gitter.im/sc-forks/solidity-coverage.svg)][18]
![npm (tag)](https://img.shields.io/npm/v/solidity-coverage/beta)
[![CircleCI](https://circleci.com/gh/sc-forks/solidity-coverage.svg?style=svg)][20]
[![codecov](https://codecov.io/gh/sc-forks/solidity-coverage/branch/beta/graph/badge.svg)][21]
[![buidler](https://buidler.dev/buidler-plugin-badge.svg?1)][26]
# solidity-coverage
Solidity code coverage plugin for [buidler](http://getbuidler.com).
## What
![coverage example][22]
+ For more details about how it works and potential limitations, see [the accompanying article][16].
+ `solidity-coverage` is also [JoinColony/solcover][17]
## Installation
```bash
$ npm install --save-dev solidity-coverage@beta
```
And add the following to your `buidler.config.js`:
```js
usePlugin("solidity-coverage");
```
## Tasks
This plugin implements a `coverage` task
```bash
npx buidler coverage [options]
```
| Option <img width=200/> | Example <img width=750/>| Description <img width=1000/> |
|--------------|------------------------------------|--------------------------------|
| testfiles | `--testfiles test/file.js` | JS test file(s) to run.|
| solcoverjs | `--solcoverjs ./../.solcover.js` | Relative path from working directory to config. Useful for monorepo packages that share settings. (Path must be "./" prefixed) |
| network | `--network development` | Use network settings defined in the Buidler config |
## Configuration
Options can be specified in a `.solcover.js` config file located in the root directory of your project.
**Project Examples:**
+ Simple: [buidler-metacoin][29]
+ More complex: [MolochDao/moloch][30]
**Config Example:**
```javascript
module.exports = {
skipFiles: ['Routers/EtherRouter.sol']
};
```
| Option <img width=200/>| Type <img width=200/> | Default <img width=1300/> | Description <img width=800/> |
| ------ | ---- | ------- | ----------- |
| silent | *Boolean* | false | Suppress logging output |
| client | *Object* | `require("ganache-core")` | Useful if you need a specific ganache version. |
| providerOptions | *Object* | `{ }` | [ganache-core options][1] |
| skipFiles | *Array* | `['Migrations.sol']` | Array of contracts or folders (with paths expressed relative to the `contracts` directory) that should be skipped when doing instrumentation. |
| istanbulFolder | *String* | `./coverage` | Folder location for Istanbul coverage reports. |
| istanbulReporter | *Array* | `['html', 'lcov', 'text', 'json']` | [Istanbul coverage reporters][2] |
| mocha | *Object* | `{ }` | [Mocha options][3] to merge into existing mocha config. `grep` and `invert` are useful for skipping certain tests under coverage using tags in the test descriptions.|
| onServerReady[<sup>*</sup>][14] | *Function* | | Hook run *after* server is launched, *before* the tests execute. Useful if you need to use the Oraclize bridge or have setup scripts which rely on the server's availability. [More...][23] |
| onCompileComplete[<sup>*</sup>][14] | *Function* | | Hook run *after* compilation completes, *before* tests are run. Useful if you have secondary compilation steps or need to modify built artifacts. [More...][23]|
| onTestsComplete[<sup>*</sup>][14] | *Function* | | Hook run *after* the tests complete, *before* Istanbul reports are generated. [More...][23]|
| onIstanbulComplete[<sup>*</sup>][14] | *Function* | | Hook run *after* the Istanbul reports are generated, *before* the ganache server is shut down. Useful if you need to clean resources up. [More...][23]|
[<sup>*</sup> Advanced use][14]
## Usage
+ Coverage runs tests a little more slowly.
+ Coverage launches its own in-process ganache server.
+ You can set [ganache options][1] using the `providerOptions` key in your `.solcover.js` [config][15].
+ Coverage [distorts gas consumption][13]. Tests that check exact gas consumption should be [skipped][24].
+ :warning: Contracts are compiled **without optimization**. Please report unexpected compilation faults to [issue 417][25]
## Documentation
More documentation, including FAQ and information about solidity-coverage's API [is available here][28].
[1]: https://github.com/trufflesuite/ganache-core#options
[2]: https://istanbul.js.org/docs/advanced/alternative-reporters/
[3]: https://mochajs.org/api/mocha
[4]: https://github.com/sc-forks/solidity-coverage/blob/beta/docs/faq.md#running-out-of-gas
[5]: https://github.com/sc-forks/solidity-coverage/blob/beta/docs/faq.md#running-out-of-memory
[6]: https://github.com/sc-forks/solidity-coverage/blob/beta/docs/faq.md#running-out-of-time
[7]: https://github.com/sc-forks/solidity-coverage/blob/beta/docs/faq.md#continuous-integration
[8]: https://github.com/sc-forks/solidity-coverage/blob/beta/docs/faq.md#notes-on-branch-coverage
[9]: https://sc-forks.github.io/metacoin/
[10]: https://coveralls.io/github/OpenZeppelin/openzeppelin-solidity?branch=master
[11]: https://github.com/sc-forks/solidity-coverage/tree/beta/test/units
[12]: https://github.com/sc-forks/solidity-coverage/issues
[13]: https://github.com/sc-forks/solidity-coverage/blob/beta/docs/faq.md#notes-on-gas-distortion
[14]: https://github.com/sc-forks/solidity-coverage/blob/beta/docs/advanced.md
[15]: #config-options
[16]: https://blog.colony.io/code-coverage-for-solidity-eecfa88668c2
[17]: https://github.com/JoinColony/solcover
[18]: https://gitter.im/sc-forks/solidity-coverage?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge
[19]: https://badge.fury.io/js/solidity-coverage
[20]: https://circleci.com/gh/sc-forks/solidity-coverage
[21]: https://codecov.io/gh/sc-forks/solidity-coverage
[22]: https://cdn-images-1.medium.com/max/800/1*uum8t-31bUaa6dTRVVhj6w.png
[23]: https://github.com/sc-forks/solidity-coverage/blob/beta/docs/advanced.md#workflow-hooks
[24]: https://github.com/sc-forks/solidity-coverage/blob/beta/docs/advanced.md#skipping-tests
[25]: https://github.com/sc-forks/solidity-coverage/issues/417
[26]: https://buidler.dev/
[27]: https://www.trufflesuite.com/docs
[28]: https://github.com/sc-forks/solidity-coverage/blob/beta/README.md
[29]: https://github.com/sc-forks/buidler-e2e/tree/coverage
[30]: https://github.com/sc-forks/moloch

@ -1,132 +1,131 @@
# solidity-coverage
[![Join the chat at https://gitter.im/sc-forks/solidity-coverage](https://badges.gitter.im/sc-forks/solidity-coverage.svg)](https://gitter.im/sc-forks/solidity-coverage?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
[![npm version](https://badge.fury.io/js/solidity-coverage.svg)](https://badge.fury.io/js/solidity-coverage)
[![CircleCI](https://circleci.com/gh/sc-forks/solidity-coverage.svg?style=svg)](https://circleci.com/gh/sc-forks/solidity-coverage)
[![codecov](https://codecov.io/gh/sc-forks/solidity-coverage/branch/master/graph/badge.svg)](https://codecov.io/gh/sc-forks/solidity-coverage)
### Code coverage for Solidity testing
![coverage example](https://cdn-images-1.medium.com/max/800/1*uum8t-31bUaa6dTRVVhj6w.png)
+ For more details about what this is, how it works and potential limitations, see
[the accompanying article](https://blog.colony.io/code-coverage-for-solidity-eecfa88668c2).
+ `solidity-coverage` is in development and **its accuracy is unknown.** If you
find discrepancies between the coverage report and your suite's behavior, please open an
[issue](https://github.com/sc-forks/solidity-coverage/issues).
+ `solidity-coverage` is [Solcover](https://github.com/JoinColony/solcover)
### A new (beta) version of solidity-coverage is out (0.7.x)
+ Installs as a plugin
+ Uses any ganache
+ Just works. (Read the [instructions](https://github.com/sc-forks/solidity-coverage/tree/beta#solidity-coverage))
+ [0.7.0-beta release notes](https://github.com/sc-forks/solidity-coverage/releases/tag/v0.7.0-beta.2)
+ [0.6.x to 0.7.x upgrade guide](https://github.com/sc-forks/solidity-coverage/blob/beta/docs/upgrade.md#upgrading-from-06x-to-070-betax).
### Install 0.6.x
```
$ npm install --save-dev solidity-coverage
```
[![Gitter chat](https://badges.gitter.im/sc-forks/solidity-coverage.svg)][18]
![npm (tag)](https://img.shields.io/npm/v/solidity-coverage/beta)
[![CircleCI](https://circleci.com/gh/sc-forks/solidity-coverage.svg?style=svg)][20]
[![codecov](https://codecov.io/gh/sc-forks/solidity-coverage/branch/beta/graph/badge.svg)][21]
[![buidler](https://buidler.dev/buidler-plugin-badge.svg?1)][26]
## Code coverage for Solidity testing
![coverage example][22]
+ For more details about what this is, how it works and potential limitations,
see [the accompanying article][16].
+ `solidity-coverage` is [Solcover][17]
### Run 0.6.x
Set a `coverage` network in truffle-config.js (see [Network Configuration](#network-configuration)) and then run:
## Install
```
$ npx solidity-coverage
$ npm install --save-dev solidity-coverage@beta
```
### Usage notes:
+ For pragma solidity >=0.4.22 <0.6.0 / Petersburg / Truffle v4 and v5
+ Tests run more slowly while coverage is being generated.
+ Your contracts will be double-compiled and a (long) delay between compilation and
the beginning of test execution is possible if your contracts are large.
+ Truffle should be globallly installed in your environment.. If you prefer running truffle as
a local dependency, please see [this section](https://github.com/sc-forks/solidity-coverage/blob/master/docs/faq.md#running-truffle-as-a-local-dependency) of the FAQ.
+ If your suite uses native Solidity testing or accesses contracts via mocks stored in `tests/` (a la Zeppelin), coverage will trigger test errors because it can't control the way truffle compiles that folder. Mocks should be relocated to the root `contracts` directory. More on why this is necessary at issue [146](https://github.com/sc-forks/solidity-coverage/issues/146)
### Truffle V5
### Network Configuration
By default, this tool connects to a coverage-enabled fork of ganache-cli
called **testrpc-sc** on port 8555.
+ it's a dependency - there's nothing extra to download.
+ the solidity-coverage command launches it automatically in the background. (See [this section of the FAQ](https://github.com/sc-forks/solidity-coverage/blob/master/docs/faq.md#running-testrpc-sc-on-its-own) if you need to launch it separately yourself)
**Add** this package to your plugins array in `truffle-config.js` ([Truffle docs][27])
```javascript
module.exports = {
networks: {...},
plugins: ["solidity-coverage"]
}
```
**Run**
```
truffle run coverage [command-options]
```
In `truffle-config.js`, add a coverage network following the example below.
### Buidler
**Example**
**Add** the plugin in `buidler.config.js` ([Buidler docs][26])
```javascript
usePlugin('solidity-coverage')
module.exports = {
networks: {
development: {
host: "localhost",
port: 8545,
network_id: "*"
},
coverage: {
host: "localhost",
network_id: "*",
port: 8555, // <-- If you change this, also set the port option in .solcover.js.
gas: 0xfffffffffff, // <-- Use this high gas value
gasPrice: 0x01 // <-- Use this low gas price
},
...etc...
}
};
networks: {...},
}
```
### Options
**Run**
```
npx buidler coverage [command-options]
```
## Usage notes:
+ Coverage runs tests a little more slowly.
+ Coverage launches its own in-process ganache server.
+ You can set [ganache options][1] using the `providerOptions` key in your `.solcover.js` [config][15].
+ Coverage [distorts gas consumption][13]. Tests that check exact gas consumption should be [skipped][24].
+ :warning: Contracts are compiled **without optimization**. Please report unexpected compilation faults to [issue 417][25]
## Command Options
| Option <img width=200/> | Example <img width=750/>| Description <img width=1000/> |
|--------------|------------------------------------|--------------------------------|
| file | `--file="test/registry/*.js"` | (Truffle) Filename or glob describing a subset of JS tests to run. (Globs must be enclosed by quotes.)|
| testfiles | `--testfiles test/file.js` | (Buidler) JS test file(s) to run.|
| solcoverjs | `--solcoverjs ./../.solcover.js` | Relative path from working directory to config. Useful for monorepo packages that share settings. (Path must be "./" prefixed) |
| network | `--network development` | Use network settings defined in the Truffle or Buidler config |
| temp[<sup>*</sup>][14] | `--temp build` | :warning: **Caution** :warning: Path to a *disposable* folder to store compilation artifacts in. Useful when your test setup scripts include hard-coded paths to a build directory. [More...][14] |
Additional options can be specified in a `.solcover.js` config file located in
the root directory of your project.
[<sup>*</sup> Advanced use][14]
## Config Options
Additional options can be specified in a `.solcover.js` config file located in the root directory
of your project.
**Example:**
```javascript
module.exports = {
port: 6545,
testrpcOptions: '-p 6545 -u 0x54fd80d6ae7584d8e9a19fe1df43f04e5282cc43',
testCommand: 'mocha --timeout 5000',
norpc: true,
dir: './secretDirectory',
copyPackages: ['openzeppelin-solidity'],
skipFiles: ['Routers/EtherRouter.sol']
skipFiles: ['Routers/EtherRouter.sol']
};
```
| Option | Type | Default | Description |
| Option <img width=200/>| Type <img width=200/> | Default <img width=1300/> | Description <img width=800/> |
| ------ | ---- | ------- | ----------- |
| accounts | *Number* | 35 | Number of accounts to launch testrpc with. |
| port | *Number* | 8555 | Port to run testrpc on / have truffle connect to |
| norpc | *Boolean* | false | Prevent solidity-coverage from launching its own testrpc. Useful if you are managing a complex test suite with a [shell script](https://github.com/OpenZeppelin/openzeppelin-solidity/blob/ed872ca0a11c4926f8bb91dd103bea1378a3384c/scripts/coverage.sh) |
| testCommand | *String* | `truffle test` | Run an arbitrary test command. ex: `mocha --timeout 5000`. NB: Also set the `port` option to whatever your tests require (probably 8545). |
| testrpcOptions | *String* | `--port 8555` | options to append to a command line invocation of testrpc. NB: Using this overwrites the defaults so always specify a port in this string *and* in the `port` option |
| copyNodeModules | *Boolean* | false | :warning: **DEPRECATED** use `copyPackages` instead :warning: Copies `node_modules` into the coverage environment. May significantly increase the time for coverage to complete if enabled. Useful if your contracts import solidity files from an npm installed package (and your node_modules is small). |
| copyPackages | *Array* | `[]` | Copies specific `node_modules` packages into the coverage environment. May significantly reduce the time for coverage to complete compared to `copyNodeModules`. Useful if your contracts import solidity files from an npm installed package. |
| skipFiles | *Array* | `['Migrations.sol']` | Array of contracts or folders (with paths expressed relative to the `contracts` directory) that should be skipped when doing instrumentation. `Migrations.sol` is skipped by default, and does not need to be added to this configuration option if it is used. |
| deepSkip | boolean | false | Use this if instrumentation hangs on large, skipped files (like Oraclize). It's faster. |
| dir | *String* | `.` | Solidity-coverage copies all the assets in your root directory (except `node_modules`) to a special folder where it instruments the contracts and executes the tests. `dir` allows you to define a relative path from the root directory to those assets. Useful if your contracts & tests are within their own folder as part of a larger project.|
| buildDirPath | *String* | `/build/contracts` | Build directory path for compiled smart contracts
### FAQ
Solutions to common problems people run into:
+ [Running out of gas](https://github.com/sc-forks/solidity-coverage/blob/master/docs/faq.md#running-out-of-gas)
+ [Running out of memory (locally and in CI)](https://github.com/sc-forks/solidity-coverage/blob/master/docs/faq.md#running-out-of-memory-locally-and-in-ci)
+ [Running out of time (in mocha)](https://github.com/sc-forks/solidity-coverage/blob/master/docs/faq.md#running-out-of-time-in-mocha)
+ [Running on windows](https://github.com/sc-forks/solidity-coverage/blob/master/docs/faq.md#running-on-windows)
+ [Running testrpc-sc on its own](https://github.com/sc-forks/solidity-coverage/blob/master/docs/faq.md#running-testrpc-sc-on-its-own)
+ [Running truffle as a local dependency](https://github.com/sc-forks/solidity-coverage/blob/master/docs/faq.md#running-truffle-as-a-local-dependency)
+ [Integrating into CI](https://github.com/sc-forks/solidity-coverage/blob/master/docs/faq.md#continuous-integration-installing-metacoin-on-travisci-with-coveralls)
+ [Why are asserts and requires highlighted as branch points?](https://github.com/sc-forks/solidity-coverage/blob/master/docs/faq.md#why-has-my-branch-coverage-decreased-why-is-assert-being-shown-as-a-branch-point)
### Example reports
+ [metacoin](https://sc-forks.github.io/metacoin/) (Istanbul HTML)
+ [OpenZeppelin Contracts](https://codecov.io/gh/OpenZeppelin/openzeppelin-contracts) (Codecov)
### Contribution Guidelines
Contributions are welcome! If you're opening a PR that adds features please consider writing some
[unit tests](https://github.com/sc-forks/solidity-coverage/tree/master/test) for them. Bugs can be
reported in the [issues](https://github.com/sc-forks/solidity-coverage/issues).
| silent | *Boolean* | false | Suppress logging output |
| client | *Object* | `require("ganache-core")` | Useful if you need a specific ganache version. |
| providerOptions | *Object* | `{ }` | [ganache-core options][1] |
| skipFiles | *Array* | `['Migrations.sol']` | Array of contracts or folders (with paths expressed relative to the `contracts` directory) that should be skipped when doing instrumentation. |
| istanbulFolder | *String* | `./coverage` | Folder location for Istanbul coverage reports. |
| istanbulReporter | *Array* | `['html', 'lcov', 'text', 'json']` | [Istanbul coverage reporters][2] |
| mocha | *Object* | `{ }` | [Mocha options][3] to merge into existing mocha config. `grep` and `invert` are useful for skipping certain tests under coverage using tags in the test descriptions.|
| onServerReady[<sup>*</sup>][14] | *Function* | | Hook run *after* server is launched, *before* the tests execute. Useful if you need to use the Oraclize bridge or have setup scripts which rely on the server's availability. [More...][23] |
| onCompileComplete[<sup>*</sup>][14] | *Function* | | Hook run *after* compilation completes, *before* tests are run. Useful if you have secondary compilation steps or need to modify built artifacts. [More...][23]|
| onTestsComplete[<sup>*</sup>][14] | *Function* | | Hook run *after* the tests complete, *before* Istanbul reports are generated. [More...][23]|
| onIstanbulComplete[<sup>*</sup>][14] | *Function* | | Hook run *after* the Istanbul reports are generated, *before* the ganache server is shut down. Useful if you need to clean resources up. [More...][23]|
[<sup>*</sup> Advanced use][14]
## API
Solidity-coverage's core methods and many utilities are available as an API.
```javascript
const CoverageAPI = require('solidity-coverage/api');
```
[Documentation available here][28].
## FAQ
Common problems & questions:
+ [Running in CI][7]
+ [Running out of gas][13]
+ [Running out of time][6]
+ [Running out of memory][5]
+ [Why are `require` statements highlighted as branch points?][8]
## Example reports
+ [metacoin][9] (Istanbul HTML)
+ [openzeppelin-solidity][10](Coveralls)
## Contribution Guidelines
Contributions are welcome! If you're opening a PR that adds features or options *please consider
writing full [unit tests][11] for them*. (We've built simple fixtures for almost everything
and are happy to add some for your case if necessary).
Set up the development environment with:
```
@ -134,7 +133,7 @@ $ git clone https://github.com/sc-forks/solidity-coverage.git
$ yarn
```
### Contributors
## Contributors
+ [@area](https://github.com/area)
+ [@cgewecke](https://github.com/cgewecke)
+ [@adriamb](https://github.com/adriamb)
@ -153,3 +152,34 @@ $ yarn
+ [@pinkiebell](https://github.com/pinkiebell)
+ [@obernardovieira](https://github.com/obernardovieira)
+ [@angus-hamill](https://github.com/angus-hamill)
+ [@kandrianov](https://github.com/kandrianov)
+ [@yxliang01](https://github.com/yxliang01)
[1]: https://github.com/trufflesuite/ganache-core#options
[2]: https://istanbul.js.org/docs/advanced/alternative-reporters/
[3]: https://mochajs.org/api/mocha
[4]: https://github.com/sc-forks/solidity-coverage/blob/beta/docs/faq.md#running-out-of-gas
[5]: https://github.com/sc-forks/solidity-coverage/blob/beta/docs/faq.md#running-out-of-memory
[6]: https://github.com/sc-forks/solidity-coverage/blob/beta/docs/faq.md#running-out-of-time
[7]: https://github.com/sc-forks/solidity-coverage/blob/beta/docs/faq.md#continuous-integration
[8]: https://github.com/sc-forks/solidity-coverage/blob/beta/docs/faq.md#notes-on-branch-coverage
[9]: https://sc-forks.github.io/metacoin/
[10]: https://coveralls.io/github/OpenZeppelin/openzeppelin-solidity?branch=master
[11]: https://github.com/sc-forks/solidity-coverage/tree/beta/test/units
[12]: https://github.com/sc-forks/solidity-coverage/issues
[13]: https://github.com/sc-forks/solidity-coverage/blob/beta/docs/faq.md#notes-on-gas-distortion
[14]: https://github.com/sc-forks/solidity-coverage/blob/beta/docs/advanced.md
[15]: #config-options
[16]: https://blog.colony.io/code-coverage-for-solidity-eecfa88668c2
[17]: https://github.com/JoinColony/solcover
[18]: https://gitter.im/sc-forks/solidity-coverage?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge
[19]: https://badge.fury.io/js/solidity-coverage
[20]: https://circleci.com/gh/sc-forks/solidity-coverage
[21]: https://codecov.io/gh/sc-forks/solidity-coverage
[22]: https://cdn-images-1.medium.com/max/800/1*uum8t-31bUaa6dTRVVhj6w.png
[23]: https://github.com/sc-forks/solidity-coverage/blob/beta/docs/advanced.md#workflow-hooks
[24]: https://github.com/sc-forks/solidity-coverage/blob/beta/docs/advanced.md#skipping-tests
[25]: https://github.com/sc-forks/solidity-coverage/issues/417
[26]: https://buidler.dev/
[27]: https://www.trufflesuite.com/docs
[28]: https://github.com/sc-forks/solidity-coverage/blob/beta/docs/api.md

@ -0,0 +1,4 @@
// For require('solidity-coverage/api');
const api = require('./lib/api');
module.exports = api;

@ -1,23 +0,0 @@
#!/usr/bin/env node
const App = require('./../lib/app.js');
const reqCwd = require('req-cwd');
const death = require('death');
const log = console.log;
const config = reqCwd.silent('./.solcover.js') || {};
const app = new App(config);
death((signal, err) => app.cleanUp(err));
app.generateCoverageEnvironment();
app.instrumentTarget();
app.launchTestrpc()
.then(() => {
app.runTestCommand();
app.generateReport();
})
.catch(err => log(err));

@ -0,0 +1,88 @@
# Advanced Use
## Skipping tests
Sometimes it's convenient to skip specific tests when running coverage. You can do this by
tagging your test descriptions and setting appropriate filters in the `.solcover.js` mocha options.
**Example**
```javascript
// Mocha test to skip
it("is a gas usage simulation [ @skip-on-coverage ]", async function(){
...
})
```
```javascript
//.solcover.js
module.exports = {
mocha: {
grep: "@skip-on-coverage", // Find everything with this tag
invert: true // Run the grep's inverse set.
}
}
```
## Workflow hooks
The plugin exposes a set of workflow hooks that let you run arbitrary async logic between the main
stages of the coverage generation process. These are useful for tasks like launching secondary
services which need to connect to a running ganache instance (ex: the Oraclize/Provable bridge),
or reading data from the compilation artifacts to run special preparatory steps for your tests.
The stages/hooks are (in order of execution):
| Stage | Post-stage hook |
|----------------------------------------|--------------------|
| Launch server | onServerReady |
| Instrument and compile contracts | onCompileComplete |
| Run tests using instrumented artifacts | onTestsComplete |
| Generate istanbul coverage reports | onIstanbulComplete |
The tool's general workflow is:
+ Launch an ethereum client, attaching special listeners that monitor each opcode execution step
+ Read Solidity contract sources from a standard contracts directory
+ Rewrite the sources so the code execution path can be tracked by the opcode monitors.
+ Compile the modified sources, without optimization
+ Save the compilation artifacts to a temporary folder
+ Tell the testing framework to use the instrumented artifacts & run tests to completion.
+ Transfer collected data to a coverage reporter & report.
Each hook is passed a `config` object provided by your plugin's dev platform which will contain
relevant source/artifact paths and network info for that stage.
**Example**
```javascript
// .solcover.js
const { provableBridge } = require('./helpers');
async function serverReadyHandler(config){
await provableBridge(config.port);
}
module.exports = {
onServerReady: serverReadyHandler,
}
```
## Setting the temporary artifacts directory
The `temp` command line option lets you to specify the name of a disposable folder to
stage the compilation artifacts of instrumented contracts in before the tests are run.
**Example**
```
$ truffle run coverage --temp build
```
By default this folder is called `.coverage_artifacts`. If you already have
preparatory scripts which run between compilation and the tests, you'll probably
find it inconvenient to modify them to handle an alternate path.
This option allows you to avoid that but it's important to realise that the temp
folder is **automatically deleted** when coverage completes. You shouldn't use it if your preferred
build target contains information you want to preserve between test runs.

@ -0,0 +1,367 @@
# Solidity-Coverage API
Solidity-coverage tracks which lines are hit as your tests run by instrumenting the contracts with special solidity statements and detecting their execution in a coverage-enabled EVM.
As such, the API spans the full set of tasks typically required to run a solidity test suite. The
table below shows how its core methods relate to the stages of a test run:
| Test Stage <img width=200/> | API Method <img width=200/> | Description <img width=800/> |
|---------------|--------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| compilation | `instrument` | A **pre-compilation** step: Rewrites contracts and generates an instrumentation data map. |
| client launch | `ganache` | A **substitute** step: Launches a ganache client with coverage collection enabled in its VM. As the client runs it will mark line/branch hits on the instrumentation data map. |
| test | `report` | A **post-test** step: Generates a coverage report from the data collected by the VM after tests complete. |
| exit | `finish` | A **substitute** step: Shuts client down |
[3]: https://github.com/gotwarlost/istanbul
**Additional Resources:**
+ the library includes [file system utilities](#Utils) for managing the
disposable set of contracts/artifacts which coverage must use in lieu of the 'real' (uninstrumented)
contracts.
+ there are two complete [coverage tool/plugin implementations][5] (for Buidler and Truffle)
which can be used as sources if you're building something similar.
[5]: https://github.com/sc-forks/solidity-coverage/tree/beta/plugins
# Table of Contents
- [API Methods](#api)
* [constructor](#constructor)
* [instrument](#instrument)
* [ganache](#ganache)
* [report](#report)
* [finish](#finish)
* [getInstrumentationData](#getinstrumentationdata)
* [setInstrumentationData](#setinstrumentationdata)
- [Utils Methods](#utils)
* [loadSolcoverJS](#loadsolcoverjs)
* [assembleFiles](#assemblefiles)
* [getTempLocations](#gettemplocations)
* [setupTempFolders](#setuptempfolders)
* [save](#save)
* [finish](#finish-1)
# API
**Example**
```javascript
const CoverageAPI = require("solidity-coverage/api");
const api = new CoverageAPI(options);
```
## constructor
Creates a coverage API instance. Configurable.
**Parameters**
- `options` **Object** : API options
| Option <img width=200/>| Type <img width=200/> | Default <img width=1300/> | Description <img width=800/> |
| ------ | ---- | ------- | ----------- |
| port | *Number* | 8555 | Port to launch client on |
| silent | *Boolean* | false | Suppress logging output |
| client | *Object* | `require("ganache-core")` | JS Ethereum client |
| providerOptions | *Object* | `{ }` | [ganache-core options][1] |
| skipFiles | *Array* | `[]` | Array of contracts or folders (with paths expressed relative to the `contracts` directory) that should be skipped when doing instrumentation. |
| istanbulFolder | *String* | `./coverage` | Folder location for Istanbul coverage reports. |
| istanbulReporter | *Array* | `['html', 'lcov', 'text', 'json']` | [Istanbul coverage reporters][2] |
[1]: https://github.com/trufflesuite/ganache-core#options
[2]: https://istanbul.js.org/docs/advanced/alternative-reporters/
--------------
## instrument
Instruments a set of sources to prepare them for compilation.
:warning: **Important:** Instrumented sources must be compiled with **solc optimization OFF** :warning:
**Parameters**
- `contracts` **Object[]**: Array of solidity sources and their paths
Returns **Object[]** in the same format as the `contracts` param, but with sources instrumented.
**Example**
```javascript
const contracts = [{
source: "contract Simple { uint x = 5; }",
canonicalPath: "/Users/user/project/contracts/Simple.sol",
relativePath: "Simple.sol" // Optional, used for pretty printing.
},...]
const instrumented = api.instrument(contracts)
```
--------------
## ganache
Enables coverage data collection on an in-process ganache server. By default, this method launches
the server, begins listening on the port specified in the [config](#constructor) (or 8555 if unspecified), and
returns a url string. When `autoLaunchServer` is false, method returns `ganache.server` so you can control
the `server.listen` invocation yourself.
**Parameters**
- `client` **Object**: (*Optional*) ganache module
- `autoLaunchServer` **Boolean**: (*Optional*)
Returns **Promise** Address of server to connect to, or initialized, unlaunched server
**Example**
```javascript
const client = require('ganache-cli');
const api = new CoverageAPI( { client: client } );
const address = await api.ganache();
> http://127.0.0.1:8555
// Alternatively...
const server = await api.ganache(client, false);
await pify(server.listen()(8545));
```
--------------
## report
Generates coverage report using IstanbulJS
**Parameters**
- `istanbulFolder` **String**: (*Optional*) path to folder Istanbul will deposit coverage reports in.
Returns **Promise**
**Example**
```javascript
await api.report('./coverage_4A3cd2b'); // Default folder name is 'coverage'
```
-------------
## finish
Shuts down coverage-enabled ganache server instance
Returns **Promise**
**Example**
```javascript
const client = require('ganache-cli');
await api.ganache(client); // Server listening...
await api.finish(); // Server shut down.
```
-------------
## getInstrumentationData
Returns a copy of the hit map created during instrumentation. Useful if you'd like to delegate
coverage collection to multiple processes.
Returns **Object** instrumentation data;
**Example**
```javascript
const instrumented = api.instrument(contracts);
const data = api.getInstrumentationData();
save(data); // Pseudo-code
```
-------------
## setInstrumentationData
Sets the hit map object generated during instrumentation. Useful if you'd like
to collect or convert data to coverage for an instrumentation which was generated
in a different process.
**Example**
```javascript
const data = load(data); // Pseudo-code
api.setIntrumentationData(data);
// Client will collect data for the loaded map
const address = await api.ganache(client);
// Or to `report` instrumentation data which was collected in a different process.
const data = load(data); // Pseudo-code
api.setInstrumentationData(data);
api.report();
```
----------------------------------------------------------------------------------------------------
# Utils
```javascript
const utils = require('solidity-coverage/utils');
```
Many of the utils methods take a `config` object param which
defines the absolute paths to your project root and contracts directory.
**Example**
```javascript
const config = {
workingDir: process.cwd(),
contractsDir: path.join(process.cwd(), 'contracts'),
}
```
-------------
## loadSolcoverJS
Loads `.solcoverjs`. Users may specify [options][7] in a `.solcover.js` config file which your
application needs to consume.
**Parameters**
- `config` **Object**: [See *config* above](#Utils)
Returns **Object** Normalized coverage config
**Example**
```javascript
const solcoverJS = utils.loadSolcoverJS(config);
const api = new CoverageAPI(solcoverJS);
```
[7]: https://github.com/sc-forks/solidity-coverage/tree/beta#config-options
-------------
## assembleFiles
Loads contracts from the filesystem in a format that can be passed directly to the
[api.instrument](#instrument) method. Filters by an optional `skipFiles` parameter.
**Parameters**
- `config` **Object**: [See *config* above](#Utils)
- `skipFiles` **String[]**: (*Optional*) Array of files or folders to skip
[See API *constructor*](#constructor)
Returns **Object** with `targets` and `skipped` keys. These are Object arrays of contract sources
and paths.
**Example**
```javascript
const {
targets,
skipped
} = utils.assembleFiles(config, ['Migrations.sol'])
const instrumented = api.instrument(contracts);
```
--------------
## getTempLocations
Returns a pair of canonically named temporary directory paths for contracts
and artifacts. Instrumented assets can be compiled from and written to these so the unit tests can
use them as sources.
**Parameters**
- `config` **Object**: [See *config* above](#Utils)
Returns **Object** with two absolute paths to disposable folders, `tempContractsDir`, `tempArtifactsDir`.
These directories are named `.coverage_contracts` and `.coverage_artifacts`.
**Example**
```javascript
const {
tempContractsDir,
tempArtifactsDir
} = utils.getTempLocations(config)
utils.setupTempFolders(config, tempContractsDir, tempArtifactsDir)
// Later, you can call `utils.finish` to delete these...
utils.finish(config, api)
```
----------
## setupTempFolders
Creates temporary directories to store instrumented contracts and their compilation artifacts in.
**Parameters**
- `config` **Object**: [See *config* above](#Utils)
- `tempContractsDir` **String**: absolute path to temporary contracts directory
- `tempArtifactsDir` **String**: absolute path to temporary artifacts directory
**Example**
```javascript
const {
tempContractsDir,
tempArtifactsDir
} = utils.getTempLocations(config)
utils.setupTempFolders(config, tempContractsDir, tempArtifactsDir);
```
-------------
## save
Writes an array of instrumented sources in the object format returned by
[api.instrument](#instrument) to a temporary directory.
**Parameters**
- `contracts` **Object[]**: array of contracts & paths generated by [api.instrument](#instrument)
- `originalDir` **String**: absolute path to original contracts directory
- `tempDir` **String**: absolute path to temp contracts directory (the destination of the save)
**Example**
```javascript
const {
tempContractsDir,
tempArtifactsDir
} = utils.getTempLocations(config)
utils.setupTempFolders(config, tempContractsDir, tempArtifactsDir);
const instrumented = api.instrument(contracts);
utils.save(instrumented, config.contractsDir, tempContractsDir);
```
-------------
## finish
Deletes temporary folders and shuts the ganache server down. Is tolerant - if folders or ganache
server don't exist it will return silently.
**Parameters**
- `config` **Object**: [See *config* above](#Utils)
- `api` **Object**: (*Optional*) coverage api instance whose own `finish` method will be called
Returns **Promise**
**Example**
```javascript
await utils.finish(config, api);
```

@ -1,106 +1,107 @@
# FAQ
### Continuous Integration: installing Metacoin on TravisCI with Coveralls
- [Table of Contents](#contents)
* [Continuous Integration](#continuous-integration)
* [Running out of memory](#running-out-of-memory)
* [Running out of time](#running-out-of-time)
* [Notes on gas distortion](#notes-on-gas-distortion)
* [Notes on branch coverage](#notes-on-branch-coverage)
## Continuous Integration
An example using Truffle MetaCoin, TravisCI, and Coveralls:
**Step 1: Create a metacoin project & install coverage tools**
```bash
$ mkdir metacoin && cd metacoin
$ truffle unbox metacoin
$ rm test/TestMetacoin.sol # No solidity tests, sorry.
# Install coverage dependencies
# Install coverage and development dependencies
$ npm init
$ npm install --save-dev truffle
$ npm install --save-dev coveralls
$ npm install --save-dev solidity-coverage
```
**Step 2: Add test and coverage scripts to the `package.json`:**
**Step 2: Add solidity-coverage to the plugins array in truffle-config.js:**
```javascript
"scripts": {
"test": "truffle test",
"coverage": "npx solidity-coverage"
},
module.exports = {
networks: {...},
plugins: ["solidity-coverage"]
}
```
**Step 3: Create a .travis.yml:**
```yml
sudo: required
dist: trusty
language: node_js
node_js:
- '10'
install:
- npm install -g truffle
- npm install -g ganache-cli
- npm install
script:
- npm test
before_script:
- testrpc > /dev/null &
- sleep 5
after_script:
- npm run coverage && cat coverage/lcov.info | coveralls
- npx truffle run coverage
- cat coverage/lcov.info | coveralls
```
**NB:** It's best practice to run coverage in a [parallel CI build](https://github.com/OpenZeppelin/zeppelin-solidity/blob/master/.travis.yml) rather than assume its equivalence to `truffle test`. Coverage's `testrpc-sc` uses gasLimits far above the current blocklimit and rewrites your contracts in ways that might affect their behavior. It's also less robust than Truffle and may fail more frequently.
**NB:** It's best practice to run coverage as a separate CI job rather than assume its
equivalence to `test`. Coverage uses block gas settings far above the network limits,
ignores [EIP 170][4] and rewrites your contracts in ways that might affect
their behavior.
**Step 4: Toggle the project on at Travis and Coveralls and push.**
[It should look like this](https://coveralls.io/github/sc-forks/metacoin)
[It should look like this][1]
**Appendix: Coveralls vs. Codecov**
[Codecov.io](https://codecov.io/) is another CI coverage provider (we use it for this project). They're very reliable, easy to integrate with and have a nice UI. Unfortunately we haven't found a way to get their reports to show branch coverage. Coveralls has excellent branch coverage reporting out of the box (see below).
![missed_branch](https://user-images.githubusercontent.com/7332026/28502310-6851f79c-6fa4-11e7-8c80-c8fd80808092.png)
**TLDR: We recommend Coveralls for the accuracy of its branch reporting.**
We use [Codecov.io][2] here as a coverage provider for our JS tests - they're great. Unfortunately we haven't found a way to get their reports to show branch coverage for Solidity. Coveralls has excellent Solidity branch coverage reporting out of the box (see below).
![missed_branch][3]
### Running out of gas
If you have hardcoded gas costs into your tests some of them may fail when using solidity-coverage.
This is because the instrumentation process increases the gas costs for using the contracts, due to
the extra events. If this is the case, then the coverage may be incomplete. To avoid this, using
`estimateGas` to estimate your gas costs should be more resilient in most cases.
## Running out of memory
### Running out of memory (Locally and in CI)
(See [issue #59](https://github.com/sc-forks/solidity-coverage/issues/59)).
If your target contains dozens of contracts, you may run up against node's 1.7MB memory cap during the
contract compilation step. This can be addressed by setting the `testCommand` option in `.solcover.js` as
below:
```javascript
testCommand: 'node --max-old-space-size=4096 ../node_modules/.bin/truffle test --network coverage'
If your target contains dozens of large contracts, you may run up against node's memory cap during the
contract compilation step. This can be addressed by setting the size of the memory space allocated to the command
when you run it. (NB: you must use the relative path to the truffle `bin` in node_modules)
```
$ node --max-old-space-size=4096 ../node_modules/.bin/truffle run coverage [options]
```
Note the path: it reaches outside a temporarily generated `coverageEnv` folder to access a locally
installed version of truffle in your root directory's `node_modules`.
Large projects may also hit their CI container memcap running coverage after unit tests. This can be
addressed on TravisCI by adding `sudo: required` to the `travis.yml`, which raises the container's
limit to 7.5MB (ProTip courtesy of [@federicobond](https://github.com/federicobond).
## Running out of time
Truffle sets a default mocha timeout of 5 minutes. Because tests run slower under coverage, it's possible to hit this limit with a test that iterates hundreds of times before producing a result. Timeouts can be disabled by configuring the mocha option in `.solcover.js` as below: (ProTip courtesy of [@cag](https://github.com/cag))
### Running out of time (in mocha)
Truffle sets a default mocha timeout of 5 minutes. Because tests run slower under coverage, it's possible to hit this limit with a test that iterates hundreds of times before producing a result. Timeouts can be disabled by configuring the mocha option in `truffle.js` as below: (ProTip courtesy of [@cag](https://github.com/cag))
```javascript
module.exports = {
networks: {
development: {
host: "localhost",
port: 8545,
network_id: "*"
},
...etc...
},
mocha: {
enableTimeouts: false
enableTimeouts: false
}
}
```
### Why has my branch coverage decreased? Why is assert being shown as a branch point?
## Notes on gas distortion
Solidity-coverage instruments by injecting statements into your code, increasing its execution costs.
`assert` and `require` check whether a condition is true or not. If it is, they allow execution to proceed. If not, they throw, and all changes are reverted. Indeed, prior to [Solidity 0.4.10](https://github.com/ethereum/solidity/releases/tag/v0.4.10), when `assert` and `require` were introduced, this functionality was achieved by code that looked like
+ If you are running gas usage simulations, they will **not be accurate**.
+ If you have hardcoded gas costs into your tests, some of them may **error**.
+ If your solidity logic constrains gas usage within narrow bounds, it may **fail**.
+ Solidity's `.send` and `.transfer` methods usually work fine though.
Using `estimateGas` to calculate your gas costs or allowing your transactions to use the default gas
settings should be more resilient in most cases.
Gas metering within Solidity is increasingly seen as anti-pattern because EVM gas costs are recalibrated from fork to fork. Depending on their exact values can result in deployed contracts ceasing to behave as intended.
## Notes on branch coverage
Solidity-coverage treats `assert` and `require` as code branches because they check whether a condition is true or not. If it is, they allow execution to proceed. If not, they throw, and all changes are reverted. Indeed, prior to [Solidity 0.4.10](https://github.com/ethereum/solidity/releases/tag/v0.4.10), when `assert` and `require` were introduced, this functionality was achieved by code that looked like
```
if (!x) throw;
@ -115,37 +116,7 @@ Clearly, the coverage should be the same in these situations, as the code is (fu
If an `assert` or `require` is marked with an `I` in the coverage report, then during your tests the conditional is never true. If it is marked with an `E`, then it is never false.
### Running on windows
Since `v0.2.6` it's possible to produce a report on Windows (thanks to [@phiferd](https://github.com/phiferd),
who also maintains their own windows-compatible fork of solidity-coverage with other useful improvements). However,
problems remain with the tool's internal launch of `testrpc-sc` so you should create a `.solcover.js` config
file in your root directory and set the `norpc` option to `true`. Then follow the directions below for
launching `testrpc-sc` on its own from the command line before running `solidity-coverage` itself.
### Running testrpc-sc on its own
Sometimes its useful to launch `testrpc-sc` separately at the command line or with a script, after
setting the `norpc` config option in `.solcover.js` to true:
```
$ npx testrpc-sc <options>
```
### Running truffle as a local dependency
If your project ships with Truffle as a dev dependency and expects that instance to be
invoked when running tests, you should either set the `copyNodeModules` option to `true`
in your`.solcover.js` config file OR (if doing so results in poor run time performance), set
the config's `testCommand` and `compileCommand` options as below:
```javascript
compileCommand: '../node_modules/.bin/truffle compile',
testCommand: '../node_modules/.bin/truffle test --network coverage',
```
[1]: https://coveralls.io/builds/25886294
[2]: https://codecov.io/
[3]: https://user-images.githubusercontent.com/7332026/28502310-6851f79c-6fa4-11e7-8c80-c8fd80808092.png
[4]: https://github.com/ethereum/EIPs/blob/master/EIPS/eip-170.md

@ -1,91 +0,0 @@
# How to upgrade testrpc-sc
Warning: this is a birds nest. Any ideas for improvement, however small, are welcome.
### testrpc-sc:
+ published on `npm` as `ethereumjs-testrpc-sc`
+ published **from the coverage branch** of [`sc-forks/testrpc-sc`](https://github.com/sc-forks/testrpc-sc/tree/coverage)
+ a webpack bundle of `sc-forks/ganache-core-sc#coverage` and all of its dependencies.
+ changes to `sc-forks/ganache-core` do not propagate until `testrpc-sc` is rebuilt and published
+ publishing `testrpc-sc` does not propagate until `solidity-coverage`s deps are updated.
To publish a new version:
```
$ git checkout develop
$ git pull upstream develop
$ git checkout coverage
$ git rebase develop
> Update your ganache-core hash
> NOTE TO CGEWECKE: MAKE SURE YOU RENAMED THE PACKAGE (and the .bin command)!!!!
> OTHERWISE YOU WILL PUBLISH OVER THE REAL GANACHE-CLI
>
$ rm -rf node_modules
$ yarn install // This step seems to be absolutely necessary.
$ npm run build // Check build, just to make sure
$ npm version patch // If helpful. If you're tracking the upstream with a ganache-core sync, use theirs.
$ git push
$ npm publish // This also runs build.
// Go to `solidity-coverage` and pin its `testrpc-sc` dependency to the new version.
```
### sc-forks/ganache-core-sc:
+ is what testrpc-sc used to be
+ set by default to [its `coverage` branch](https://github.com/sc-forks/ganache-core-sc)
+ depends on `sc-forks/ethereumjs-vm-sc.git`
+ depends on `sc-forks/provider-engine-sc.git#8.1.19`
+ differs from `truffle-suite/ganache-core` by these deps and
[two lines](https://github.com/sc-forks/ganache-core/blob/ae31080cdc581fef416a1c68cbe28ff71b6fb7c9/lib/blockchain_double.js#L36-L37)
in `blockchain_double.js` which set the block and transaction default gas limits.
To sync `ganache-core-sc` with its upstream parent at `truffle-suite`
```
$ git checkout master
$ git remote add upstream https://github.com/trufflesuite/ganache-core.git
$ git pull upstream master
$ git push
$ git checkout coverage
$ git rebase -i master (there will probably be conflicts)
$ git push
```
### How can I modify ethereumjs-vm-sc and test the changes at `solidity-coverage`?
Since `solidity-coverage@0.1.10`, ethereumjs-vm-sc is an independent dev dependency,
required by the coverage unit tests. The new testrpc has a separate webpacked copy. The simplest
thing to do is open a branch at `solidity-coverage` and develop directly on the `vm` dep.
When you're satisfied that tests pass with your changes, copy your work over to the `ethereumjs-vm-sc` repo itself.
In `test/util/vm.js` the `results` object passed back by `vm.runTx` at [callMethod](https://github.com/sc-forks/solidity-coverage/blob/master/test/util/vm.js#L120)
also contains things like the runState and the logs: ex: `results.vm.runState.logs`.
+ To merge / publish the changes:
+ Merge `ethereumjs-vm-sc#my-new-vm` to master.
+ follow the `testrpc-sc` publication steps above.
There's no reason to worry about changing ethereumjs-vm-sc at master. If that affects anyone (unlikely)
they have safe harbour at any solidity-coverage installation @0.1.9 and up. They can update.
### E2E Testing
[sc-forks/zeppelin-solidity](https://github.com/sc-forks/zeppelin-solidity) has been configured to
serve as a simple pre-publication E2E test. By default the package pulls solidity-coverage from the repo's master branch.
You can trigger a [CI build](https://travis-ci.org/sc-forks/zeppelin-solidity) and [Coveralls report](https://coveralls.io/github/sc-forks/zeppelin-solidity) by running:
```
$ npm run ci
```
### solidity-parser-sc
We also publish `solidity-parser-sc` because `consensys/solidity-parser` in its .pegjs form has been
left to die in the wild, unloved by all. Publish at the publish branch by running `npm version patch`, `npm publish`.

@ -0,0 +1,138 @@
# How to install 0.7.0
**Install**
```
$ npm install --save-dev solidity-coverage@beta
```
**Add** this package to your plugins array in `truffle-config.js`
```javascript
module.exports = {
networks: {...},
plugins: ["solidity-coverage"]
}
```
**Run**
```
truffle run coverage [command-options]
```
A full list of options and other information are [available here][8]
# Upgrading from 0.6.x to 0.7.0-beta.x
First, follow [the installation instructions](#how-to-install-070) and see if it works.
:rabbit2: It does!? Bye.
:elephant: It does not. Good...
#### Are you using Truffle V5?
+ Everything works best with Truffle versions >= 5.0.31.
#### Are you launching testrpc-sc yourself as a stand-alone client?
+ Stop launching it. The coverage plugin needs to initialize the client itself so it can hook into the EVM.
+ By default it uses the ganache bundled with Truffle, but you can use any version (see below).
#### Were you passing testrpc-sc lots of options as flags? :jp: :jp: :jp: :jp: :jp:
+ If the flags were `allowUnlimitedContractSize`, `gasLimit`, `gasPrice` or `emitFreeLogs`,
you can safely ignore them. Ditto if your port was `8555`.
+ If the flags were things like `accounts` or `network_id`, you'll need to transfer them as
[ganache-core options][1] to the `providerOptions` key in .solcover.js.
+ Ganache's "cli flag" and "js option" formats are slightly different. Check out [their docs][1]
**Example**
*Before (at the command line)*
```
$ testrpc-sc --account="0x2b...7cd,1000000000000000000000000" -i 1999 --noVmErrorsOnRPCResponse
```
*Now (in .solcover.js)*
```javascript
providerOptions: {
accounts: [{
secretKey: "0x2b.............7cd",
balance: "0xD3C21BCECCEDA1000000" // <-- Must be hex
}],
network_id: 1999,
vmErrorsOnRPCResponse: false
}
#### Do you have a 'coverage' network in truffle-config.js?
+ If you copy-pasted it from the <= 0.6.x docs, **you can safely delete it**.
+ You should be able to `truffle run coverage --network <network-name>` and use the same config you
run your regular tests with.
+ You can also omit the network flag and you'll be given default settings which look like this:
```javascript
'soliditycoverage': {
port: 8555,
host: "127.0.0.1",
network_id: "*",
}
```
#### Do your tests depend on the specific ganache version you have as a local dependency?
+ Declare it in .solcover.js using the client option
```javascript
client: require('ganache-cli'),
```
#### Does your config contain any deprecated options?
+ They are:
```
accounts, # Now: `providerOptions: { total_accounts: <number> }`
buildDirPath, # Now: `--temp <path>` (At the command line, see Advanced Use)
copyPackages,
copyNodeModules,
deepSkip,
testCommand,
compileCommand,
noRpc
```
+ You can delete them.
#### Do you usually: (1) launch testrpc-sc, (2) do something special, (3) run solidity-coverage?
+ See [the workflow hooks documentation][3]. The "something special" will likely need to run within
an async function declared in .solcover.js
#### Are you what some might call an 'advanced user'?
+ See [Advanced Use][2]
#### Would you like to see some real-world installation examples?
+ [metacoin][4]
+ [openzeppelin-contracts][5]
+ [joinColony/colonyNetwork][6]
+ [aragon/aragon-court][7]
#### :tada: It's still not working!! :tada:
+ If your project is public, please open an issue linking to it and we will advise and/or
open a PR into your repo installing solidity-coverage after patching any relevant bugs here.
+ If your project is private, see if you can generate a reproduction case for the
problem and we'll try to fix that.
[1]: https://github.com/trufflesuite/ganache-core#options
[2]: https://github.com/sc-forks/solidity-coverage/blob/beta/docs/advanced.md
[3]: https://github.com/sc-forks/solidity-coverage/blob/beta/docs/advanced.md#workflow-hooks
[4]: https://github.com/sc-forks/metacoin
[5]: https://github.com/OpenZeppelin/openzeppelin-contracts/pull/1923
[6]: https://github.com/JoinColony/colonyNetwork/pull/716
[7]: https://github.com/aragon/aragon-court/pull/123
[8]: https://github.com/sc-forks/solidity-coverage/tree/beta#command-options

@ -0,0 +1,307 @@
const shell = require('shelljs');
const pify = require('pify');
const fs = require('fs');
const path = require('path');
const istanbul = require('istanbul');
const assert = require('assert');
const detect = require('detect-port');
const _ = require('lodash/lang');
const ConfigValidator = require('./validator');
const Instrumenter = require('./instrumenter');
const Coverage = require('./coverage');
const DataCollector = require('./collector');
const AppUI = require('./ui').AppUI;
/**
* Coverage Runner
*/
class API {
constructor(config={}) {
this.coverage = new Coverage();
this.instrumenter = new Instrumenter();
this.validator = new ConfigValidator()
this.config = config || {};
// Validate
this.validator.validate(this.config);
// Options
this.testsErrored = false;
this.cwd = config.cwd || process.cwd();
this.defaultHook = () => {};
this.onServerReady = config.onServerReady || this.defaultHook;
this.onTestsComplete = config.onTestsComplete || this.defaultHook;
this.onCompileComplete = config.onCompileComplete || this.defaultHook;
this.onIstanbulComplete = config.onIstanbulComplete || this.defaultHook;
this.server = null;
this.defaultPort = 8555;
this.client = config.client;
this.defaultNetworkName = 'soliditycoverage';
this.port = config.port || this.defaultPort;
this.host = config.host || "127.0.0.1";
this.providerOptions = config.providerOptions || {};
this.autoLaunchServer = config.autoLaunchServer === false ? false : true;
this.skipFiles = config.skipFiles || [];
this.log = config.log || console.log;
this.gasLimit = 0xffffffffff; // default "gas sent" with transactions
this.gasLimitString = "0xfffffffffff"; // block gas limit for ganache (higher than "gas sent")
this.gasPrice = 0x01;
this.istanbulFolder = config.istanbulFolder || false;
this.istanbulReporter = config.istanbulReporter || ['html', 'lcov', 'text', 'json'];
this.setLoggingLevel(config.silent);
this.ui = new AppUI(this.log);
}
/**
* Instruments a set of sources to prepare them for running under coverage
* @param {Object[]} targets (see below)
* @return {Object[]} (see below)
* @example of input/output array:
* [{
* source: (required) <solidity-source>,
* canonicalPath: (required) <absolute path to source file>
* relativePath: (optional) <rel path to source file for logging>
* }]
*/
instrument(targets=[]) {
let currentFile; // Keep track of filename in case we crash...
let started = false;
let outputs = [];
try {
for (let target of targets) {
currentFile = target.relativePath || target.canonicalPath;
if(!started){
started = true;
this.ui.report('instr-start');
}
this.ui.report('instr-item', [currentFile]);
const instrumented = this.instrumenter.instrument(
target.source,
target.canonicalPath
);
this.coverage.addContract(instrumented, target.canonicalPath);
outputs.push({
canonicalPath: target.canonicalPath,
relativePath: target.relativePath,
source: instrumented.contract
})
}
} catch (err) {
err.message = this.ui.generate('instr-fail', [currentFile]) + err.message;
throw err;
}
return outputs;
}
/**
* Returns a copy of the hit map created during instrumentation.
* Useful if you'd like to delegate coverage collection to multiple processes.
* @return {Object} instrumentationData
*/
getInstrumentationData(){
return _.cloneDeep(this.instrumenter.instrumentationData)
}
/**
* Sets the hit map object generated during instrumentation. Useful if you'd like
* to collect data for a pre-existing instrumentation.
* @param {Object} data
*/
setInstrumentationData(data={}){
this.instrumenter.instrumentationData = _.cloneDeep(data);
}
/**
* Enables coverage collection on in-process ethereum client server, hooking the DataCollector
* to its VM. By default, method will return a url after server has begun listening on the port
* specified in the config. When `autoLaunchServer` is false, method returns`ganache.server` so
* the consumer can control the 'server.listen' invocation themselves.
* @param {Object} client ganache client
* @param {Boolean} autoLaunchServer boolean
* @return {<Promise> (String | Server) } address of server to connect to, or initialized, unlaunched server.
*/
async ganache(client, autoLaunchServer){
// Check for port-in-use
if (await detect(this.port) !== this.port){
throw new Error(this.ui.generate('server-fail', [this.port]))
}
this.collector = new DataCollector(this.instrumenter.instrumentationData);
this.providerOptions.gasLimit = this.gasLimitString;
this.providerOptions.allowUnlimitedContractSize = true;
// Attach to vm step of supplied client
try {
if (this.config.forceBackupServer) throw new Error()
await this.attachToVM(client)
}
// Fallback to ganache-cli)
catch(err) {
const _ganache = require('ganache-cli');
this.ui.report('vm-fail', [_ganache.version]);
await this.attachToVM(_ganache);
}
if (autoLaunchServer === false || this.autoLaunchServer === false){
return this.server;
}
await pify(this.server.listen)(this.port);
const address = `http://${this.host}:${this.port}`;
this.ui.report('server', [address]);
return address;
}
/**
* Generate coverage / write coverage report / run istanbul
*/
async report(_folder) {
const folder = _folder || this.istanbulFolder;
const collector = new istanbul.Collector();
const reporter = new istanbul.Reporter(false, folder);
return new Promise((resolve, reject) => {
try {
this.coverage.generate(this.instrumenter.instrumentationData);
const mapping = this.makeKeysRelative(this.coverage.data, this.cwd);
this.saveCoverage(mapping);
collector.add(mapping);
this.istanbulReporter.forEach(report => reporter.add(report));
// Pify doesn't like this one...
reporter.write(collector, true, (err) => {
if (err) return reject(err);
this.ui.report('istanbul');
resolve();
});
} catch (error) {
error.message = this.ui.generate('istanbul-fail') + error.message;
throw error;
}
})
}
/**
* Removes coverage build artifacts, kills testrpc.
*/
async finish() {
if (this.server && this.server.close){
this.ui.report('finish');
await pify(this.server.close)();
}
}
// ------------------------------------------ Utils ----------------------------------------------
// ========
// Provider
// ========
async attachToVM(client){
const self = this;
// Fallback to client from options
if(!client) client = this.client;
this.server = client.server(this.providerOptions);
this.assertHasBlockchain(this.server.provider);
await this.vmIsResolved(this.server.provider);
const blockchain = this.server.provider.engine.manager.state.blockchain;
const createVM = blockchain.createVMFromStateTrie;
// Attach to VM which ganache has already created for transactions
blockchain.vm.on('step', self.collector.step.bind(self.collector));
// Hijack createVM method which ganache runs for each `eth_call`
blockchain.createVMFromStateTrie = function(state, activatePrecompiles) {
const vm = createVM.apply(blockchain, arguments);
vm.on('step', self.collector.step.bind(self.collector));
return vm;
}
}
assertHasBlockchain(provider){
assert(provider.engine.manager.state.blockchain !== undefined);
assert(provider.engine.manager.state.blockchain.createVMFromStateTrie !== undefined);
}
async vmIsResolved(provider){
return new Promise(resolve => {
const interval = setInterval(() => {
if (provider.engine.manager.state.blockchain.vm !== undefined){
clearInterval(interval);
resolve();
}
});
})
}
// ========
// File I/O
// ========
saveCoverage(data){
const covPath = path.join(this.cwd, "coverage.json");
fs.writeFileSync(covPath, JSON.stringify(data));
}
// =====
// Paths
// =====
//
/**
* Relativizes path keys so that istanbul report can be read on Windows
* @param {Object} map coverage map generated by coverageMap
* @param {String} wd working directory
* @return {Object} map with relativized keys
*/
makeKeysRelative(map, wd) {
const newCoverage = {};
Object
.keys(map)
.forEach(pathKey => newCoverage[path.relative(wd, pathKey)] = map[pathKey]);
return newCoverage;
}
// =======
// Logging
// =======
/**
* Turn logging off (for CI)
* @param {Boolean} isSilent
*/
setLoggingLevel(isSilent) {
if (isSilent) this.log = () => {};
}
}
module.exports = API;

@ -1,476 +0,0 @@
const shell = require('shelljs');
const fs = require('fs');
const path = require('path');
const childprocess = require('child_process');
const readline = require('readline');
const reqCwd = require('req-cwd');
const istanbul = require('istanbul');
const treeKill = require('tree-kill');
const getInstrumentedVersion = require('./instrumentSolidity.js');
const CoverageMap = require('./coverageMap.js');
const preprocessor = require('./preprocessor');
const isWin = /^win/.test(process.platform);
const gasLimitHex = 0xfffffffffff; // High gas block limit / contract deployment limit
const gasPriceHex = 0x01; // Low gas price
/**
* Coverage Runner
*/
class App {
constructor(config) {
this.coverageDir = './coverageEnv'; // Env that instrumented .sols are tested in
// Options
this.network = ''; // Default truffle network execution flag
this.silence = ''; // Default log level passed to shell
this.log = console.log;
// Other
this.testrpcProcess = null; // ref to testrpc server we need to close on exit
this.events = null; // ref to string array loaded from 'allFiredEvents'
this.testsErrored = null; // flag set to non-null if truffle tests error
this.coverage = new CoverageMap(); // initialize a coverage map
this.originalArtifacts = []; // Artifacts from original build (we swap these in)
this.skippedFolders = [];
// Config
this.config = config || {};
this.workingDir = config.dir || '.'; // Relative path to contracts folder
this.accounts = config.accounts || 35; // Number of accounts to testrpc launches with
this.skipFiles = config.skipFiles || []; // Which files should be skipped during instrumentation
this.norpc = config.norpc || false; // Launch testrpc-sc internally?
this.port = config.port || 8555; // Port testrpc should listen on
this.buildDirPath = config.buildDirPath || '/build/contracts' // Build directory path for compiled smart contracts
this.copyNodeModules = config.copyNodeModules || false; // Copy node modules into coverageEnv?
this.copyPackages = config.copyPackages || []; // Only copy specific node_modules packages into coverageEnv
this.testrpcOptions = config.testrpcOptions || null; // Options for testrpc-sc
this.testCommand = config.testCommand || null; // Optional test command
this.compileCommand = config.compileCommand || null; // Optional compile command
this.deepSkip = config.deepSkip || null; // Don't post-process skipped files
this.setLoggingLevel(config.silent);
}
// -------------------------------------- Methods ------------------------------------------------
/**
* Generates a copy of the target project configured for solidity-coverage and saves to
* the coverage environment folder. Process exits(1) if try fails
*/
generateCoverageEnvironment() {
this.log('Generating coverage environment');
try {
this.sanityCheckContext();
let files = shell.ls('-A', this.workingDir);
const nmIndex = files.indexOf('node_modules');
// Removes node_modules from array (unless requested).
if (!this.copyNodeModules && nmIndex > -1) {
files.splice(nmIndex, 1);
}
// Identify folders to exclude
this.skipFiles.forEach(item => {
if (path.extname(item) !== '.sol')
this.skippedFolders.push(item);
});
files = files.map(file => `${this.workingDir}/${file}`);
shell.mkdir(this.coverageDir);
shell.cp('-R', files, this.coverageDir);
// Add specific node_modules packages.
if (!this.copyNodeModules && this.copyPackages.length) {
shell.mkdir(this.coverageDir + '/node_modules');
this.copyPackages.forEach((nodePackage) => {
shell.mkdir('-p', this.coverageDir + '/node_modules/' + nodePackage);
shell.cp('-rfL', 'node_modules/' + nodePackage + '/.', this.coverageDir + '/node_modules/' + nodePackage);
});
}
// Load config
const coverageNetwork = {
host: 'localhost',
network_id: '*',
port: this.port,
gas: gasLimitHex,
gasPrice: gasPriceHex
};
let truffleConfig = {
networks: {
coverage: coverageNetwork
}
};
let newTrufflePath = `${this.workingDir}/truffle-config.js`;
let oldTrufflePath = `${this.workingDir}/truffle.js`;
if (shell.test('-e', newTrufflePath)) truffleConfig = reqCwd.silent(newTrufflePath);
else if (shell.test('-e', oldTrufflePath)) truffleConfig = reqCwd.silent(oldTrufflePath);
this.network = '--network coverage';
// Coverage network opts specified: use port if declared
if (truffleConfig.networks && truffleConfig.networks.coverage) {
this.port = truffleConfig.networks.coverage.port || this.port;
} else {
// Put the coverage network in the existing config
if (!truffleConfig.networks) truffleConfig.networks = {};
truffleConfig.networks.coverage = coverageNetwork;
const configString = `module.exports = ${JSON.stringify(truffleConfig)}`;
fs.writeFileSync(`${this.coverageDir}/truffle-config.js`, configString);
}
// Compile the contracts before instrumentation and preserve their ABI's.
// We will be stripping out access modifiers like view before we recompile
// post-instrumentation.
if (shell.test('-e', `${this.coverageDir}${this.buildDirPath}`)){
shell.rm('-Rf', `${this.coverageDir}${this.buildDirPath}`)
}
this.runCompileCommand();
this.originalArtifacts = this.loadArtifacts();
shell.rm('-Rf', `${this.coverageDir}${this.buildDirPath}`);
} catch (err) {
const msg = ('There was a problem generating the coverage environment: ');
this.cleanUp(msg + err);
}
}
/**
* For each contract except migrations.sol (or those in skipFiles):
* + Generate file path reference for coverage report
* + Load contract as string
* + Instrument contract
* + Save instrumented contract in the coverage environment folder where covered tests will run
* + Add instrumentation info to the coverage map
*/
instrumentTarget() {
this.skipFiles = this.skipFiles.map(contract => `${this.coverageDir}/contracts/${contract}`);
this.skipFiles.push(`${this.coverageDir}/contracts/Migrations.sol`);
const instrumentedFiles = [];
let currentFile;
try {
shell.ls(`${this.coverageDir}/contracts/**/*.sol`).forEach(file => {
currentFile = file;
if (!this.skipFiles.includes(file) && !this.inSkippedFolder(file)) {
this.log('Instrumenting ', file);
const contractPath = this.platformNeutralPath(file);
const working = this.workingDir.substring(1);
const canonicalPath = contractPath.split('/coverageEnv').join(working);
const contract = fs.readFileSync(contractPath).toString();
const instrumentedContractInfo = getInstrumentedVersion(contract, canonicalPath);
fs.writeFileSync(contractPath, instrumentedContractInfo.contract);
this.coverage.addContract(instrumentedContractInfo, canonicalPath);
instrumentedFiles.push(file);
} else {
this.log('Skipping instrumentation of ', file);
}
});
} catch (err) {
const msg = `There was a problem instrumenting ${currentFile}: `;
this.cleanUp(msg + err);
}
// Strip any view / pure modifiers in other files in case they depend on any instrumented files
shell
.ls(`${this.coverageDir}/**/*.sol`)
.filter(file => !instrumentedFiles.includes(file))
.forEach(file => {
// Skip post-processing of skipped files
if (this.deepSkip && (this.skipFiles.includes(file) || this.inSkippedFolder(file))) return;
const contractPath = this.platformNeutralPath(file);
const contract = fs.readFileSync(contractPath).toString();
const contractProcessed = preprocessor.run(contract);
if (contractProcessed.name && contractProcessed.name === 'SyntaxError' && file.slice(-15) !== 'SimpleError.sol') {
console.log(`Warning: The file at ${file} was identified as a Solidity Contract, ` +
'but did not parse correctly. You may ignore this warning if it is not a Solidity file, ' +
'or your project does not use it');
} else {
fs.writeFileSync(contractPath, contractProcessed);
}
});
// Now that they've been modified, compile all the contracts again
this.runCompileCommand();
// And swap the original abis into the instrumented artifacts so that truffle etc uses 'call'
// on them.
this.modifyArtifacts();
}
/**
* Run modified testrpc with large block limit, on (hopefully) unused port.
* Changes here should be also be added to the before() block of test/run.js).
* @return {Promise} Resolves when testrpc prints 'Listening' to std out / norpc is true.
*/
launchTestrpc() {
return new Promise((resolve, reject) => {
if (!this.norpc) {
const defaultRpcOptions = `--accounts ${this.accounts} --port ${this.port}`;
const options = (this.testrpcOptions || defaultRpcOptions) + ` --gasLimit ${gasLimitHex}`;
// Launch
const execOpts = {maxBuffer: 1024 * 1024 * 100};
this.testrpcProcess = childprocess.exec(`npx testrpc-sc ${options}`, execOpts, (err, stdout, stderr) => {
if (err) {
if (stdout) this.log(`testRpc stdout:\n${stdout}`);
if (stderr) this.log(`testRpc stderr:\n${stderr}`);
this.cleanUp('testRpc errored after launching as a childprocess.');
}
});
// Resolve when testrpc logs that it's listening.
this.testrpcProcess.stdout.on('data', data => {
if (data.includes('Listening')) {
this.log(`Launched testrpc on port ${this.port}`);
return resolve();
}
});
} else {
return resolve();
}
});
}
/**
* Run truffle (or config.testCommand) over instrumented contracts in the
* coverage environment folder. Shell cd command needs to be invoked
* as its own statement for command line options to work, apparently.
* Also reads the 'allFiredEvents' log.
*/
runTestCommand() {
try {
const defaultCommand = `truffle test ${this.network} ${this.silence}`;
const command = this.testCommand || defaultCommand;
this.log(`Running: ${command}\n(this can take a few seconds)...`);
shell.cd(this.coverageDir);
shell.exec(command);
this.testsErrored = shell.error();
shell.cd('./..');
} catch (err) {
const msg =
`
There was an error generating coverage. Possible reasons include:
1. Another application is using port ${this.port}
2. Your test runner (Truffle?) crashed because the tests encountered an error.
`;
this.cleanUp(msg + err);
}
}
/**
* Run truffle compile (or config.compileCommand) over instrumented contracts in the
* coverage environment folder. Shell cd command needs to be invoked
* as its own statement for command line options to work, apparently.
*/
runCompileCommand() {
try {
const defaultCommand = `truffle compile ${this.network} ${this.silence}`;
const command = this.compileCommand || defaultCommand;
this.log(`Running: ${command}\n(this can take a few seconds)...`);
shell.cd(this.coverageDir);
shell.exec(command);
this.testsErrored = shell.error();
shell.cd('./..');
} catch (err) {
const msg =
`
There was an error compiling the contracts.
`;
this.cleanUp(msg + err);
}
}
/**
* Loads artifacts generated by compiling the contracts before we instrument them.
* @return {Array} Array of artifact objects
*/
loadArtifacts() {
const artifacts = [];
shell.ls(`${this.coverageDir}${this.buildDirPath}/*.json`).forEach(file => {
const artifactPath = this.platformNeutralPath(file);
const artifactRaw = fs.readFileSync(artifactPath);
const artifact = JSON.parse(artifactRaw);
artifacts.push(artifact);
})
return artifacts;
}
/**
* Swaps original ABIs into artifacts generated post-instrumentation. We are stripping
* access modifiers like `view` out of the source during that step and need to ensure
* truffle automatically invokes those methods by `.call`, based on the ABI sig.
*/
modifyArtifacts(){
shell.ls(`${this.coverageDir}${this.buildDirPath}/*.json`).forEach((file, index) => {
const artifactPath = this.platformNeutralPath(file);
const artifactRaw = fs.readFileSync(artifactPath);
const artifact = JSON.parse(artifactRaw);
artifact.abi = this.originalArtifacts[index].abi;
fs.writeFileSync(artifactPath, JSON.stringify(artifact));
})
}
/**
* Generate coverage / write coverage report / run istanbul
*/
generateReport() {
const collector = new istanbul.Collector();
const reporter = new istanbul.Reporter();
return new Promise((resolve, reject) => {
// Get events fired during instrumented contracts execution.
const stream = fs.createReadStream(`./allFiredEvents`);
stream.on('error', err => this.cleanUp('Event trace could not be read.\n' + err));
const reader = readline.createInterface({
input: stream,
});
this.events = [];
reader
.on('line', line => this.events.push(line))
.on('close', () => {
// Generate Istanbul report
try {
this.coverage.generate(this.events, `${this.workingDir}/contracts`);
const relativeMapping = this.makeKeysRelative(this.coverage.coverage, this.workingDir);
const json = JSON.stringify(relativeMapping);
fs.writeFileSync('./coverage.json', json);
collector.add(relativeMapping);
reporter.add('html');
reporter.add('lcov');
reporter.add('text');
reporter.write(collector, true, () => {
this.log('Istanbul coverage reports generated');
this.cleanUp();
resolve();
});
} catch (err) {
const msg = 'There was a problem generating the coverage map / running Istanbul.\n';
console.log(err.stack);
this.cleanUp(msg + err);
}
});
});
}
// ------------------------------------------ Utils ----------------------------------------------
sanityCheckContext(){
if (!shell.test('-e', `${this.workingDir}/contracts`)){
this.cleanUp("Couldn't find a 'contracts' folder to instrument.");
}
if (shell.test('-e', `${this.workingDir}/${this.coverageDir}`)){
shell.rm('-Rf', this.coverageDir);
}
if (shell.test('-e', `${this.workingDir}/scTopics`)){
shell.rm(`${this.workingDir}/scTopics`);
}
}
/**
* Relativizes path keys so that istanbul report can be read on Windows
* @param {Object} map coverage map generated by coverageMap
* @param {[type]} root working directory
* @return {[type]} map with relativized keys
*/
makeKeysRelative(map, root) {
const newCoverage = {};
Object.keys(map).forEach(pathKey => {
newCoverage[path.relative(root, pathKey)] = map[pathKey];
});
return newCoverage;
}
/**
* Conver absolute paths from Windows, if necessary
* @param {String} file path
* @return {[type]} normalized path
*/
platformNeutralPath(file) {
return (isWin)
? path.resolve(file).split('\\').join('/')
: path.resolve(file);
}
/**
* Determines if a file is in a folder marked skippable.
* @param {String} file file path
* @return {Boolean}
*/
inSkippedFolder(file){
let shouldSkip;
this.skippedFolders.forEach(folderToSkip => {
folderToSkip = `${this.coverageDir}/contracts/${folderToSkip}`;
if (file.indexOf(folderToSkip) === 0)
shouldSkip = true;
});
return shouldSkip;
}
/**
* Allows config to turn logging off (for CI)
* @param {Boolean} isSilent
*/
setLoggingLevel(isSilent) {
if (isSilent) {
this.silence = '> /dev/null 2>&1';
this.log = () => {};
}
}
/**
* Removes coverage build artifacts, kills testrpc.
* Exits (1) and prints msg on error, exits (0) otherwise.
* @param {String} err error message
*/
cleanUp(err) {
const self = this;
function exit(err){
if (err) {
self.log(`${err}\nExiting without generating coverage...`);
process.exit(1);
} else if (self.testsErrored) {
self.log('Some truffle tests failed while running coverage');
process.exit(1);
} else {
self.log('Done.');
process.exit(0);
}
}
self.log('Cleaning up...');
shell.config.silent = true;
shell.rm('-Rf', self.coverageDir);
shell.rm('./allFiredEvents');
shell.rm('./scTopics');
if (self.testrpcProcess) {
treeKill(self.testrpcProcess.pid, function(killError){
self.log(`Shutting down testrpc-sc (pid ${self.testrpcProcess.pid})`)
exit(err)
});
} else {
exit(err);
}
}
}
module.exports = App;

@ -0,0 +1,61 @@
const web3Utils = require('web3-utils')
/**
* Writes data from the VM step to the in-memory
* coverage map constructed by the Instrumenter.
*/
class DataCollector {
constructor(instrumentationData={}){
this.instrumentationData = instrumentationData;
this.validOpcodes = {
"PUSH1": true,
}
}
/**
* VM step event handler. Detects instrumentation hashes when they are pushed to the
* top of the stack. This runs millions of times - trying to keep it fast.
* @param {Object} info vm step info
*/
step(info){
try {
if (this.validOpcodes[info.opcode.name] && info.stack.length > 0){
const idx = info.stack.length - 1;
let hash = web3Utils.toHex(info.stack[idx]).toString();
hash = this._normalizeHash(hash);
if(this.instrumentationData[hash]){
this.instrumentationData[hash].hits++;
}
}
} catch (err) { /*Ignore*/ };
}
/**
* Left-pads zero prefixed bytes 32 hashes to length 66. The '59' in the
* comparison below is arbitrary. It provides a margin for recurring zeros
* but prevents left-padding shorter irrelevant hashes (like fn sigs)
*
* @param {String} hash data hash from evm stack.
* @return {String} 0x prefixed hash of length 66.
*/
_normalizeHash(hash){
if (hash.length < 66 && hash.length > 59){
hash = hash.slice(2);
while(hash.length < 64) hash = '0' + hash;
hash = '0x' + hash
}
return hash;
}
/**
* Unit test helper
* @param {Object} data Instrumenter.instrumentationData
*/
_setInstrumentationData(data){
this.instrumentationData = data;
}
}
module.exports = DataCollector;

@ -0,0 +1,107 @@
/**
* Converts instrumentation data accumulated a the vm steps to an instanbul spec coverage object.
* @type {Coverage}
*/
const util = require('util');
class Coverage {
constructor() {
this.data = {};
this.assertData = {};
}
/**
* Initializes an entry in the coverage map for an instrumented contract. Tracks by
* its canonical contract path, e.g. *not* by its location in the temp folder.
* @param {Object} info 'info = instrumenter.instrument(contract, fileName, true)'
* @param {String} contractPath canonical path to contract file
*/
addContract(info, contractPath) {
this.data[contractPath] = {
l: {},
path: contractPath,
s: {},
b: {},
f: {},
fnMap: {},
statementMap: {},
branchMap: {},
};
this.assertData[contractPath] = { };
info.runnableLines.forEach((item, idx) => {
this.data[contractPath].l[info.runnableLines[idx]] = 0;
});
this.data[contractPath].fnMap = info.fnMap;
for (let x = 1; x <= Object.keys(info.fnMap).length; x++) {
this.data[contractPath].f[x] = 0;
}
this.data[contractPath].branchMap = info.branchMap;
for (let x = 1; x <= Object.keys(info.branchMap).length; x++) {
this.data[contractPath].b[x] = [0, 0];
this.assertData[contractPath][x] = {
preEvents: 0,
postEvents: 0,
};
}
this.data[contractPath].statementMap = info.statementMap;
for (let x = 1; x <= Object.keys(info.statementMap).length; x++) {
this.data[contractPath].s[x] = 0;
}
}
/**
* Populates an empty coverage map with values derived from a hash map of
* data collected as the instrumented contracts are tested
* @param {Object} map of collected instrumentation data
* @return {Object} coverage map.
*/
generate(collectedData) {
const hashes = Object.keys(collectedData);
for (let hash of hashes){
const data = collectedData[hash];
const contractPath = collectedData[hash].contractPath;
const id = collectedData[hash].id;
const hits = collectedData[hash].hits;
switch(collectedData[hash].type){
case 'line': this.data[contractPath].l[id] = hits; break;
case 'function': this.data[contractPath].f[id] = hits; break;
case 'statement': this.data[contractPath].s[id] = hits; break;
case 'branch': this.data[contractPath].b[id][data.locationIdx] = hits; break;
case 'assertPre': this.assertData[contractPath][id].preEvents = hits; break;
case 'assertPost': this.assertData[contractPath][id].postEvents = hits; break;
}
}
// Finally, interpret the assert pre/post events
const contractPaths = Object.keys(this.assertData);
for (let contractPath of contractPaths){
const contract = this.data[contractPath];
for (let i = 1; i <= Object.keys(contract.b).length; i++) {
const branch = this.assertData[contractPath][i];
// Was it an assert branch?
if (branch && branch.preEvents > 0){
this.data[contractPath].b[i] = [
branch.postEvents,
branch.preEvents - branch.postEvents
]
}
}
}
return Object.assign({}, this.data);
}
};
module.exports = Coverage;

@ -1,145 +0,0 @@
/**
* This file contains methods that produce a coverage map to pass to instanbul
* from data generated by `instrumentSolidity.js`
*/
const { AbiCoder } = require('web3-eth-abi');
const SolidityCoder = AbiCoder();
const path = require('path');
const keccak = require('keccakjs');
const fs = require('fs');
/**
* Converts solcover event data into an object that can be
* be passed to instanbul to produce coverage reports.
* @type {CoverageMap}
*/
module.exports = class CoverageMap {
constructor() {
this.coverage = {};
this.assertCoverage = {};
this.lineTopics = [];
this.functionTopics = [];
this.branchTopics = [];
this.statementTopics = [];
this.assertPreTopics = [];
this.assertPostTopics = [];
}
/**
* Initializes a coverage map object for contract instrumented per `info` and located
* at `canonicalContractPath`
* @param {Object} info `info = getIntrumentedVersion(contract, fileName, true)`
* @param {String} canonicalContractPath target file location
* @return {Object} coverage map with all values set to zero
*/
addContract(info, canonicalContractPath) {
this.coverage[canonicalContractPath] = {
l: {},
path: canonicalContractPath,
s: {},
b: {},
f: {},
fnMap: {},
statementMap: {},
branchMap: {},
};
this.assertCoverage[canonicalContractPath] = { };
info.runnableLines.forEach((item, idx) => {
this.coverage[canonicalContractPath].l[info.runnableLines[idx]] = 0;
});
this.coverage[canonicalContractPath].fnMap = info.fnMap;
for (let x = 1; x <= Object.keys(info.fnMap).length; x++) {
this.coverage[canonicalContractPath].f[x] = 0;
}
this.coverage[canonicalContractPath].branchMap = info.branchMap;
for (let x = 1; x <= Object.keys(info.branchMap).length; x++) {
this.coverage[canonicalContractPath].b[x] = [0, 0];
this.assertCoverage[canonicalContractPath][x] = {
preEvents: 0,
postEvents: 0,
};
}
this.coverage[canonicalContractPath].statementMap = info.statementMap;
for (let x = 1; x <= Object.keys(info.statementMap).length; x++) {
this.coverage[canonicalContractPath].s[x] = 0;
}
const keccakhex = (x => {
const hash = new keccak(256); // eslint-disable-line new-cap
hash.update(x);
return hash.digest('hex');
});
const lineHash = keccakhex('__Coverage' + info.contractName + '(string,uint256)');
const fnHash = keccakhex('__FunctionCoverage' + info.contractName + '(string,uint256)');
const branchHash = keccakhex('__BranchCoverage' + info.contractName + '(string,uint256,uint256)');
const statementHash = keccakhex('__StatementCoverage' + info.contractName + '(string,uint256)');
const assertPreHash = keccakhex('__AssertPreCoverage' + info.contractName + '(string,uint256)');
const assertPostHash = keccakhex('__AssertPostCoverage' + info.contractName + '(string,uint256)');
this.lineTopics.push(lineHash);
this.functionTopics.push(fnHash);
this.branchTopics.push(branchHash);
this.statementTopics.push(statementHash);
this.assertPreTopics.push(assertPreHash);
this.assertPostTopics.push(assertPostHash);
const topics = `${lineHash}\n${fnHash}\n${branchHash}\n${statementHash}\n${assertPreHash}\n${assertPostHash}\n`;
fs.appendFileSync('./scTopics', topics);
}
/**
* Populates an empty coverage map with values derived from an array of events
* fired by instrumented contracts as they are tested
* @param {Array} events
* @param {String} relative path to host contracts eg: './../contracts'
* @return {Object} coverage map.
*/
generate(events, pathPrefix) {
for (let idx = 0; idx < events.length; idx++) {
const event = JSON.parse(events[idx]);
if (event.topics.filter(t => this.lineTopics.indexOf(t) >= 0).length > 0) {
const data = SolidityCoder.decodeParameters(['string', 'uint256'], `0x${event.data}`);
const canonicalContractPath = data[0];
this.coverage[canonicalContractPath].l[parseInt(data[1], 10)] += 1;
} else if (event.topics.filter(t => this.functionTopics.indexOf(t) >= 0).length > 0) {
const data = SolidityCoder.decodeParameters(['string', 'uint256'], `0x${event.data}`);
const canonicalContractPath = data[0];
this.coverage[canonicalContractPath].f[parseInt(data[1], 10)] += 1;
} else if (event.topics.filter(t => this.branchTopics.indexOf(t) >= 0).length > 0) {
const data = SolidityCoder.decodeParameters(['string', 'uint256', 'uint256'], `0x${event.data}`);
const canonicalContractPath = data[0];
this.coverage[canonicalContractPath].b[parseInt(data[1], 10)][parseInt(data[2], 10)] += 1;
} else if (event.topics.filter(t => this.statementTopics.indexOf(t) >= 0).length > 0) {
const data = SolidityCoder.decodeParameters(['string', 'uint256'], `0x${event.data}`);
const canonicalContractPath = data[0];
this.coverage[canonicalContractPath].s[parseInt(data[1], 10)] += 1;
} else if (event.topics.filter(t => this.assertPreTopics.indexOf(t) >= 0).length > 0) {
const data = SolidityCoder.decodeParameters(['string', 'uint256'], `0x${event.data}`);
const canonicalContractPath = data[0];
this.assertCoverage[canonicalContractPath][parseInt(data[1], 10)].preEvents += 1;
} else if (event.topics.filter(t => this.assertPostTopics.indexOf(t) >= 0).length > 0) {
const data = SolidityCoder.decodeParameters(['string', 'uint256'], `0x${event.data}`);
const canonicalContractPath = data[0];
this.assertCoverage[canonicalContractPath][parseInt(data[1], 10)].postEvents += 1;
}
}
// Finally, interpret the assert pre/post events
Object.keys(this.assertCoverage).forEach(contractPath => {
const contract = this.coverage[contractPath];
for (let i = 1; i <= Object.keys(contract.b).length; i++) {
const branch = this.assertCoverage[contractPath][i];
if (branch.preEvents > 0) {
// Then it was an assert branch.
this.coverage[contractPath].b[i] = [branch.postEvents, branch.preEvents - branch.postEvents];
}
}
});
return Object.assign({}, this.coverage);
}
};

@ -1,77 +1,203 @@
const injector = {};
// These functions are used to actually inject the instrumentation events.
injector.callEvent = function injectCallEvent(contract, fileName, injectionPoint) {
const linecount = (contract.instrumented.slice(0, injectionPoint).match(/\n/g) || []).length + 1;
contract.runnableLines.push(linecount);
contract.instrumented = contract.instrumented.slice(0, injectionPoint) +
'emit __Coverage' + contract.contractName + '(\'' + fileName + '\',' + linecount + ');\n' +
contract.instrumented.slice(injectionPoint);
};
injector.callFunctionEvent = function injectCallFunctionEvent(contract, fileName, injectionPoint, injection) {
contract.instrumented = contract.instrumented.slice(0, injectionPoint) +
'emit __FunctionCoverage' + contract.contractName + '(\'' + fileName + '\',' + injection.fnId + ');\n' +
contract.instrumented.slice(injectionPoint);
};
injector.callBranchEvent = function injectCallFunctionEvent(contract, fileName, injectionPoint, injection) {
contract.instrumented = contract.instrumented.slice(0, injectionPoint) +
(injection.openBracket ? '{' : '') +
'emit __BranchCoverage' + contract.contractName + '(\'' + fileName + '\',' + injection.branchId + ',' + injection.locationIdx + ')' +
(injection.comma ? ',' : ';') +
contract.instrumented.slice(injectionPoint);
};
injector.callEmptyBranchEvent = function injectCallEmptyBranchEvent(contract, fileName, injectionPoint, injection) {
contract.instrumented = contract.instrumented.slice(0, injectionPoint) +
'else { emit __BranchCoverage' + contract.contractName + '(\'' + fileName + '\',' + injection.branchId + ',' + injection.locationIdx + ');}\n' +
contract.instrumented.slice(injectionPoint);
};
injector.callAssertPreEvent = function callAssertPreEvent(contract, fileName, injectionPoint, injection) {
contract.instrumented = contract.instrumented.slice(0, injectionPoint) +
'emit __AssertPreCoverage' + contract.contractName + '(\'' + fileName + '\',' + injection.branchId + ');\n' +
contract.instrumented.slice(injectionPoint);
};
injector.callAssertPostEvent = function callAssertPostEvent(contract, fileName, injectionPoint, injection) {
contract.instrumented = contract.instrumented.slice(0, injectionPoint) +
'emit __AssertPostCoverage' + contract.contractName + '(\'' + fileName + '\',' + injection.branchId + ');\n' +
contract.instrumented.slice(injectionPoint);
const web3Utils = require("web3-utils");
class Injector {
constructor(){
this.hashCounter = 0;
}
_split(contract, injectionPoint){
return {
start: contract.instrumented.slice(0, injectionPoint),
end: contract.instrumented.slice(injectionPoint)
}
}
_getInjectable(fileName, hash, type){
return `${this._getMethodIdentifier(fileName)}(${hash}); /* ${type} */ \n`;
}
_getHash(fileName) {
this.hashCounter++;
return web3Utils.keccak256(`${fileName}:${this.hashCounter}`);
}
_getMethodIdentifier(fileName){
return `coverage_${web3Utils.keccak256(fileName).slice(0,10)}`
}
_getInjectionComponents(contract, injectionPoint, fileName, type){
const { start, end } = this._split(contract, injectionPoint);
const hash = this._getHash(fileName)
const injectable = this._getInjectable(fileName, hash, type);
return {
start: start,
end: end,
hash: hash,
injectable: injectable
}
}
/**
* Generates a solidity statement injection. Declared once per fn.
* Definition is the same for every fn in file.
* @param {String} fileName
* @return {String} ex: bytes32[1] memory _sc_82e0891
*/
_getHashMethodDefinition(fileName){
const hash = web3Utils.keccak256(fileName).slice(0,10);
const method = this._getMethodIdentifier(fileName);
return `\nfunction ${method}(bytes32 c__${hash}) public pure {}\n`;
}
injectLine(contract, fileName, injectionPoint, injection, instrumentation){
const type = 'line';
const { start, end } = this._split(contract, injectionPoint);
const newLines = start.match(/\n/g);
const linecount = ( newLines || []).length + 1;
contract.runnableLines.push(linecount);
const hash = this._getHash(fileName)
const injectable = this._getInjectable(fileName, hash, type);
instrumentation[hash] = {
id: linecount,
type: type,
contractPath: fileName,
hits: 0
}
contract.instrumented = `${start}${injectable}${end}`;
}
injectStatement(contract, fileName, injectionPoint, injection, instrumentation) {
const type = 'statement';
const {
start,
end,
hash,
injectable
} = this._getInjectionComponents(contract, injectionPoint, fileName, type);
instrumentation[hash] = {
id: injection.statementId,
type: type,
contractPath: fileName,
hits: 0
}
contract.instrumented = `${start}${injectable}${end}`;
};
injectFunction(contract, fileName, injectionPoint, injection, instrumentation){
const type = 'function';
const {
start,
end,
hash,
injectable
} = this._getInjectionComponents(contract, injectionPoint, fileName, type);
instrumentation[hash] = {
id: injection.fnId,
type: type,
contractPath: fileName,
hits: 0
}
contract.instrumented = `${start}${injectable}${end}`;
}
injectBranch(contract, fileName, injectionPoint, injection, instrumentation){
const type = 'branch';
const {
start,
end,
hash,
injectable
} = this._getInjectionComponents(contract, injectionPoint, fileName, type);
instrumentation[hash] = {
id: injection.branchId,
locationIdx: injection.locationIdx,
type: type,
contractPath: fileName,
hits: 0
}
contract.instrumented = `${start}${injectable}${end}`;
}
injectEmptyBranch(contract, fileName, injectionPoint, injection, instrumentation) {
const type = 'branch';
const {
start,
end,
hash,
injectable
} = this._getInjectionComponents(contract, injectionPoint, fileName, type);
instrumentation[hash] = {
id: injection.branchId,
locationIdx: injection.locationIdx,
type: type,
contractPath: fileName,
hits: 0
}
contract.instrumented = `${start}else { ${injectable}}${end}`;
}
injectAssertPre(contract, fileName, injectionPoint, injection, instrumentation) {
const type = 'assertPre';
const {
start,
end,
hash,
injectable
} = this._getInjectionComponents(contract, injectionPoint, fileName, type);
instrumentation[hash] = {
id: injection.branchId,
type: type,
contractPath: fileName,
hits: 0
}
contract.instrumented = `${start}${injectable}${end}`;
}
injectAssertPost(contract, fileName, injectionPoint, injection, instrumentation) {
const type = 'assertPost';
const {
start,
end,
hash,
injectable
} = this._getInjectionComponents(contract, injectionPoint, fileName, type);
instrumentation[hash] = {
id: injection.branchId,
type: type,
contractPath: fileName,
hits: 0
}
contract.instrumented = `${start}${injectable}${end}`;
}
injectHashMethod(contract, fileName, injectionPoint, injection, instrumentation){
const start = contract.instrumented.slice(0, injectionPoint);
const end = contract.instrumented.slice(injectionPoint);
contract.instrumented = `${start}${this._getHashMethodDefinition(fileName)}${end}`;
}
};
injector.openParen = function injectOpenParen(contract, fileName, injectionPoint, injection) {
contract.instrumented = contract.instrumented.slice(0, injectionPoint) + '(' + contract.instrumented.slice(injectionPoint);
};
injector.closeParen = function injectCloseParen(contract, fileName, injectionPoint, injection) {
contract.instrumented = contract.instrumented.slice(0, injectionPoint) + ')' + contract.instrumented.slice(injectionPoint);
};
injector.literal = function injectLiteral(contract, fileName, injectionPoint, injection) {
contract.instrumented = contract.instrumented.slice(0, injectionPoint) + injection.string + contract.instrumented.slice(injectionPoint);
};
injector.statement = function injectStatement(contract, fileName, injectionPoint, injection) {
contract.instrumented = contract.instrumented.slice(0, injectionPoint) +
'emit __StatementCoverage' + contract.contractName + '(\'' + fileName + '\',' + injection.statementId + ');\n' +
contract.instrumented.slice(injectionPoint);
};
injector.eventDefinition = function injectEventDefinition(contract, fileName, injectionPoint, injection) {
contract.instrumented = contract.instrumented.slice(0, injectionPoint) +
'event __Coverage' + contract.contractName + '(string fileName, uint256 lineNumber);\n' +
'event __FunctionCoverage' + contract.contractName + '(string fileName, uint256 fnId);\n' +
'event __StatementCoverage' + contract.contractName + '(string fileName, uint256 statementId);\n' +
'event __BranchCoverage' + contract.contractName + '(string fileName, uint256 branchId, uint256 locationIdx);\n' +
'event __AssertPreCoverage' + contract.contractName + '(string fileName, uint256 branchId);\n' +
'event __AssertPostCoverage' + contract.contractName + '(string fileName, uint256 branchId);\n' +
contract.instrumented.slice(injectionPoint);
};
module.exports = injector;
module.exports = Injector;

@ -1,65 +0,0 @@
const SolidityParser = require('solidity-parser-antlr');
const preprocessor = require('./preprocessor');
const injector = require('./injector');
const parse = require('./parse');
const path = require('path');
module.exports = function instrumentSolidity(contractSource, fileName) {
const contract = {};
contract.source = contractSource;
contract.instrumented = contractSource;
contract.runnableLines = [];
contract.fnMap = {};
contract.fnId = 0;
contract.branchMap = {};
contract.branchId = 0;
contract.statementMap = {};
contract.statementId = 0;
contract.injectionPoints = {};
// First, we run over the original contract to get the source mapping.
let ast = SolidityParser.parse(contract.source, {range: true});
parse[ast.type](contract, ast);
const retValue = JSON.parse(JSON.stringify(contract));
// Now, we reset almost everything and use the preprocessor first to increase our effectiveness.
contract.runnableLines = [];
contract.fnMap = {};
contract.fnId = 0;
contract.branchMap = {};
contract.branchId = 0;
contract.statementMap = {};
contract.statementId = 0;
contract.injectionPoints = {};
contract.preprocessed = preprocessor.run(contract.source);
contract.instrumented = contract.preprocessed;
ast = SolidityParser.parse(contract.preprocessed, {range: true});
const contractStatement = ast.children.filter(node => (node.type === 'ContractDefinition' ||
node.type === 'LibraryDefinition' ||
node.type === 'InterfaceDefinition'));
contract.contractName = contractStatement[0].name;
parse[ast.type](contract, ast);
// We have to iterate through these injection points in descending order to not mess up
// the injection process.
const sortedPoints = Object.keys(contract.injectionPoints).sort((a, b) => b - a);
sortedPoints.forEach(injectionPoint => {
// Line instrumentation has to happen first
contract.injectionPoints[injectionPoint].sort((a, b) => {
const eventTypes = ['openParen', 'callBranchEvent', 'callEmptyBranchEvent', 'callEvent'];
return eventTypes.indexOf(b.type) - eventTypes.indexOf(a.type);
});
contract.injectionPoints[injectionPoint].forEach(injection => {
injector[injection.type](contract, fileName, injectionPoint, injection);
});
});
retValue.runnableLines = contract.runnableLines;
retValue.contract = contract.instrumented;
retValue.contractName = contractStatement[0].name;
return retValue;
};

@ -1,254 +1,110 @@
const instrumenter = {};
// These functions work out where in an expression we can inject our
// instrumenation events.
const SolidityParser = require('solidity-parser-antlr');
const path = require('path');
const Injector = require('./injector');
const preprocess = require('./preprocessor');
const parse = require('./parse');
/**
* Top level controller for the instrumentation sequence. Also hosts the instrumentation data map
* which the vm step listener writes its output to. This only needs to be instantiated once
* per coverage run.
*/
class Instrumenter {
constructor(){
this.instrumentationData = {};
this.injector = new Injector();
}
function createOrAppendInjectionPoint(contract, key, value) {
if (contract.injectionPoints[key]) {
contract.injectionPoints[key].push(value);
} else {
contract.injectionPoints[key] = [value];
_isRootNode(node){
return (node.type === 'ContractDefinition' ||
node.type === 'LibraryDefinition' ||
node.type === 'InterfaceDefinition');
}
}
instrumenter.prePosition = function prePosition(expression) {
if (expression.right.type === 'ConditionalExpression' &&
expression.left.type === 'MemberExpression') {
expression.range[0] -= 2;
_initializeCoverageFields(contract){
contract.runnableLines = [];
contract.fnMap = {};
contract.fnId = 0;
contract.branchMap = {};
contract.branchId = 0;
contract.statementMap = {};
contract.statementId = 0;
contract.injectionPoints = {};
}
};
instrumenter.instrumentAssignmentExpression = function instrumentAssignmentExpression(contract, expression) {
// This is suspended for 0.5.0 which tries to accomodate the new `emit` keyword.
// Solc is not allowing us to use the construction `emit SomeEvent()` within the parens :/
return;
// --------------------------------------------------------------------------------------------
// The only time we instrument an assignment expression is if there's a conditional expression on
// the right
/*if (expression.right.type === 'ConditionalExpression') {
if (expression.left.type === 'DeclarativeExpression' || expression.left.type === 'Identifier') {
// Then we need to go from bytes32 varname = (conditional expression)
// to bytes32 varname; (,varname) = (conditional expression)
createOrAppendInjectionPoint(contract, expression.left.range[1], {
type: 'literal', string: '; (,' + expression.left.name + ')',
});
instrumenter.instrumentConditionalExpression(contract, expression.right);
} else if (expression.left.type === 'MemberExpression') {
createOrAppendInjectionPoint(contract, expression.left.range[0], {
type: 'literal', string: '(,',
/**
* Per `contractSource`:
* - wraps any unbracketed singleton consequents of if, for, while stmts (preprocessor.js)
* - walks the file's AST, creating an instrumentation map (parse.js, registrar.js)
* - injects `instrumentation` solidity statements into the target solidity source (injector.js)
*
* @param {String} contractSource solidity source code
* @param {String} fileName absolute path to source file
* @return {Object} instrumented `contract` object
* {
* contract: instrumented solidity source code,
* contractName: contract name,
* runnableLines: integer
* }
*
*/
instrument(contractSource, fileName) {
const contract = {};
contract.source = contractSource;
contract.instrumented = contractSource;
this._initializeCoverageFields(contract);
// First, we run over the original contract to get the source mapping.
let ast = SolidityParser.parse(contract.source, {range: true});
parse[ast.type](contract, ast);
const retValue = JSON.parse(JSON.stringify(contract)); // Possibly apotropaic.
// Now, we reset almost everything and use the preprocessor to increase our effectiveness.
this._initializeCoverageFields(contract);
contract.instrumented = preprocess(contract.source);
// Walk the AST, recording injection points
ast = SolidityParser.parse(contract.instrumented, {range: true});
const root = ast.children.filter(node => this._isRootNode(node));
// Handle contracts which only contain import statements
contract.contractName = (root.length) ? root[0].name : null;
parse[ast.type](contract, ast);
// We have to iterate through these points in descending order
const sortedPoints = Object.keys(contract.injectionPoints).sort((a, b) => b - a);
sortedPoints.forEach(injectionPoint => {
// Line instrumentation has to happen first
contract.injectionPoints[injectionPoint].sort((a, b) => {
const injections = ['injectBranch', 'injectEmptyBranch', 'injectLine'];
return injections.indexOf(b.type) - injections.indexOf(a.type);
});
createOrAppendInjectionPoint(contract, expression.left.range[1], {
type: 'literal', string: ')',
contract.injectionPoints[injectionPoint].forEach(injection => {
this.injector[injection.type](
contract,
fileName,
injectionPoint,
injection,
this.instrumentationData
);
});
instrumenter.instrumentConditionalExpression(contract, expression.right);
} else {
const err = 'Error instrumenting assignment expression @ solidity-coverage/lib/instrumenter.js';
console.log(err, contract, expression.left);
process.exit();
}
}*/
};
instrumenter.instrumentConditionalExpression = function instrumentConditionalExpression(contract, expression) {
// ----------------------------------------------------------------------------------------------
// This is suspended for 0.5.0 which tries to accomodate the new `emit` keyword.
// Solc is not allowing us to use the construction `emit SomeEvent()` within the parens :/
// Very sad, this is the coolest thing in here.
return;
// ----------------------------------------------------------------------------------------------
/*contract.branchId += 1;
const startline = (contract.instrumented.slice(0, expression.range[0]).match(/\n/g) || []).length + 1;
const startcol = expression.range[0] - contract.instrumented.slice(0, expression.range[0]).lastIndexOf('\n') - 1;
const consequentStartCol = startcol + (contract, expression.trueBody.range[0] - expression.range[0]);
const consequentEndCol = consequentStartCol + (contract, expression.trueBody.range[1] - expression.trueBody.range[0]);
const alternateStartCol = startcol + (contract, expression.falseBody.range[0] - expression.range[0]);
const alternateEndCol = alternateStartCol + (contract, expression.falseBody.range[1] - expression.falseBody.range[0]);
// NB locations for conditional branches in istanbul are length 1 and associated with the : and ?.
contract.branchMap[contract.branchId] = {
line: startline,
type: 'cond-expr',
locations: [{
start: {
line: startline, column: consequentStartCol,
},
end: {
line: startline, column: consequentEndCol,
},
}, {
start: {
line: startline, column: alternateStartCol,
},
end: {
line: startline, column: alternateEndCol,
},
}],
};
// Right, this could be being used just by itself or as an assignment. In the case of the latter, because
// the comma operator doesn't exist, we're going to have to get funky.
// if we're on a line by ourselves, this is easier
//
// Now if we've got to wrap the expression it's being set equal to, do that...
// Wrap the consequent
createOrAppendInjectionPoint(contract, expression.trueBody.range[0], {
type: 'openParen',
});
createOrAppendInjectionPoint(contract, expression.trueBody.range[0], {
type: 'callBranchEvent', comma: true, branchId: contract.branchId, locationIdx: 0,
});
createOrAppendInjectionPoint(contract, expression.trueBody.range[1], {
type: 'closeParen',
});
// Wrap the alternate
createOrAppendInjectionPoint(contract, expression.falseBody.range[0], {
type: 'openParen',
});
createOrAppendInjectionPoint(contract, expression.falseBody.range[0], {
type: 'callBranchEvent', comma: true, branchId: contract.branchId, locationIdx: 1,
});
createOrAppendInjectionPoint(contract, expression.falseBody.range[1], {
type: 'closeParen',
});*/
};
instrumenter.instrumentStatement = function instrumentStatement(contract, expression) {
contract.statementId += 1;
// We need to work out the lines and columns the expression starts and ends
const startline = (contract.instrumented.slice(0, expression.range[0]).match(/\n/g) || []).length + 1;
const startcol = expression.range[0] - contract.instrumented.slice(0, expression.range[0]).lastIndexOf('\n') - 1;
const expressionContent = contract.instrumented.slice(expression.range[0], expression.range[1] + 1);
const endline = startline + (contract, expressionContent.match('/\n/g') || []).length;
let endcol;
if (expressionContent.lastIndexOf('\n') >= 0) {
endcol = contract.instrumented.slice(expressionContent.lastIndexOf('\n'), expression.range[1]).length;
} else {
endcol = startcol + (contract, expressionContent.length - 1);
}
contract.statementMap[contract.statementId] = {
start: {
line: startline, column: startcol,
},
end: {
line: endline, column: endcol,
},
};
createOrAppendInjectionPoint(contract, expression.range[0], {
type: 'statement', statementId: contract.statementId,
});
};
instrumenter.instrumentLine = function instrumentLine(contract, expression) {
// what's the position of the most recent newline?
const startchar = expression.range[0];
const endchar = expression.range[1] + 1;
const lastNewLine = contract.instrumented.slice(0, startchar).lastIndexOf('\n');
const nextNewLine = startchar + contract.instrumented.slice(startchar).indexOf('\n');
const contractSnipped = contract.instrumented.slice(lastNewLine, nextNewLine);
const restOfLine = contract.instrumented.slice(endchar, nextNewLine);
if (contract.instrumented.slice(lastNewLine, startchar).trim().length === 0 &&
(restOfLine.replace(';', '').trim().length === 0 || restOfLine.replace(';', '').trim().substring(0, 2) === '//')) {
createOrAppendInjectionPoint(contract, lastNewLine + 1, {
type: 'callEvent',
});
} else if (contract.instrumented.slice(lastNewLine, startchar).replace('{', '').trim().length === 0 &&
contract.instrumented.slice(endchar, nextNewLine).replace(/[;}]/g, '').trim().length === 0) {
createOrAppendInjectionPoint(contract, expression.range[0], {
type: 'callEvent',
});
}
// Is everything before us and after us on this line whitespace?
};
instrumenter.instrumentFunctionDeclaration = function instrumentFunctionDeclaration(contract, expression) {
contract.fnId += 1;
const startline = (contract.instrumented.slice(0, expression.range[0]).match(/\n/g) || []).length + 1;
// We need to work out the lines and columns the function declaration starts and ends
const startcol = expression.range[0] - contract.instrumented.slice(0, expression.range[0]).lastIndexOf('\n') - 1;
const endlineDelta = contract.instrumented.slice(expression.range[0]).indexOf('{');
const functionDefinition = contract.instrumented.slice(expression.range[0], expression.range[0] + endlineDelta);
const endline = startline + (functionDefinition.match(/\n/g) || []).length;
const endcol = functionDefinition.length - functionDefinition.lastIndexOf('\n');
contract.fnMap[contract.fnId] = {
name: expression.isConstructor ? 'constructor' : expression.name,
line: startline,
loc: {
start: {
line: startline, column: startcol,
},
end: {
line: endline, column: endcol,
},
},
};
createOrAppendInjectionPoint(contract, expression.range[0] + endlineDelta + 1, {
type: 'callFunctionEvent', fnId: contract.fnId,
});
};
instrumenter.addNewBranch = function addNewBranch(contract, expression) {
contract.branchId += 1;
const startline = (contract.instrumented.slice(0, expression.range[0]).match(/\n/g) || []).length + 1;
const startcol = expression.range[0] - contract.instrumented.slice(0, expression.range[0]).lastIndexOf('\n') - 1;
// NB locations for if branches in istanbul are zero length and associated with the start of the if.
contract.branchMap[contract.branchId] = {
line: startline,
type: 'if',
locations: [{
start: {
line: startline, column: startcol,
},
end: {
line: startline, column: startcol,
},
}, {
start: {
line: startline, column: startcol,
},
end: {
line: startline, column: startcol,
},
}],
};
};
instrumenter.instrumentAssertOrRequire = function instrumentAssertOrRequire(contract, expression) {
instrumenter.addNewBranch(contract, expression);
createOrAppendInjectionPoint(contract, expression.range[0], {
type: 'callAssertPreEvent', branchId: contract.branchId,
});
createOrAppendInjectionPoint(contract, expression.range[1] + 2, {
type: 'callAssertPostEvent', branchId: contract.branchId,
});
};
instrumenter.instrumentIfStatement = function instrumentIfStatement(contract, expression) {
instrumenter.addNewBranch(contract, expression);
if (expression.trueBody.type === 'Block') {
createOrAppendInjectionPoint(contract, expression.trueBody.range[0] + 1, {
type: 'callBranchEvent', branchId: contract.branchId, locationIdx: 0,
});
}
if (expression.falseBody && expression.falseBody.type === 'IfStatement') {
// Do nothing - we must be pre-preprocessor, so don't bother instrumenting -
// when we're actually instrumenting, this will never happen (we've wrapped it in
// a block statement)
} else if (expression.falseBody && expression.falseBody.type === 'Block') {
createOrAppendInjectionPoint(contract, expression.falseBody.range[0] + 1, {
type: 'callBranchEvent', branchId: contract.branchId, locationIdx: 1,
});
} else {
createOrAppendInjectionPoint(contract, expression.trueBody.range[1] + 1, {
type: 'callEmptyBranchEvent', branchId: contract.branchId, locationIdx: 1,
});
retValue.runnableLines = contract.runnableLines;
retValue.contract = contract.instrumented;
retValue.contractName = contract.contractName;
return retValue;
}
};
}
module.exports = instrumenter;
module.exports = Instrumenter;

@ -1,39 +1,36 @@
/* eslint no-unused-expressions: ["error", { "allowShortCircuit": true }] */
/**
* Methods in this file walk the AST and call the instrumenter
* functions where appropriate, which determine where to inject events.
* (Listed in alphabetical order)
*/
const Registrar = require('./registrar');
const register = new Registrar();
const parse = {};
const instrumenter = require('./instrumenter');
parse.AssignmentExpression = function parseAssignmentExpression(contract, expression) {
instrumenter.prePosition(expression);
instrumenter.instrumentStatement(contract, expression);
instrumenter.instrumentAssignmentExpression(contract, expression);
parse.AssignmentExpression = function(contract, expression) {
register.statement(contract, expression);
};
parse.Block = function parseBlock(contract, expression) {
parse.Block = function(contract, expression) {
for (let x = 0; x < expression.statements.length; x++) {
instrumenter.instrumentLine(contract, expression.statements[x]);
register.line(contract, expression.statements[x]);
parse[expression.statements[x].type] &&
parse[expression.statements[x].type](contract, expression.statements[x]);
}
};
parse.BinaryOperation = function parseBinaryOperation(contract, expression) {
instrumenter.instrumentStatement(contract, expression);
parse.BinaryOperation = function(contract, expression) {
register.statement(contract, expression);
}
parse.FunctionCall = function parseCallExpression(contract, expression) {
// In any given chain of call expressions, only the last one will fail this check. This makes sure
// we don't instrument a chain of expressions multiple times.
parse.FunctionCall = function(contract, expression) {
// In any given chain of call expressions, only the last one will fail this check.
// This makes sure we don't instrument a chain of expressions multiple times.
if (expression.expression.type !== 'FunctionCall') {
instrumenter.instrumentStatement(contract, expression);
register.statement(contract, expression);
if (expression.expression.name === 'assert' || expression.expression.name === 'require') {
instrumenter.instrumentAssertOrRequire(contract, expression);
register.assertOrRequire(contract, expression);
}
parse[expression.expression.type] &&
parse[expression.expression.type](contract, expression.expression);
@ -43,26 +40,28 @@ parse.FunctionCall = function parseCallExpression(contract, expression) {
}
};
parse.ConditionalExpression = function parseConditionalExpression(contract, expression) {
instrumenter.instrumentStatement(contract, expression);
instrumenter.instrumentConditionalExpression(contract, expression);
parse.Conditional = function(contract, expression) {
register.statement(contract, expression);
// TODO: Investigate node structure
// There are potential substatements here we aren't measuring
};
parse.ContractDefinition = function ParseContractStatement(contract, expression) {
parse.ContractDefinition = function(contract, expression) {
parse.ContractOrLibraryStatement(contract, expression);
};
parse.ContractOrLibraryStatement = function parseContractOrLibraryStatement(contract, expression) {
// From the start of this contract statement, find the first '{', and inject there.
const injectionPoint = expression.range[0] + contract.instrumented.slice(expression.range[0]).indexOf('{') + 1;
if (contract.injectionPoints[injectionPoint]) {
contract.injectionPoints[expression.range[0] + contract.instrumented.slice(expression.range[0]).indexOf('{') + 1].push({
type: 'eventDefinition',
});
} else {
contract.injectionPoints[expression.range[0] + contract.instrumented.slice(expression.range[0]).indexOf('{') + 1] = [{
type: 'eventDefinition',
}];
parse.ContractOrLibraryStatement = function(contract, expression) {
// We need to define a method to pass coverage hashes into at top of each contract.
// This lets us get a fresh stack for the hash and avoid stack-too-deep errors.
if (expression.kind !== 'interface'){
const start = expression.range[0];
const end = contract.instrumented.slice(expression.range[0]).indexOf('{') + 1;
const loc = start + end;;
(contract.injectionPoints[loc])
? contract.injectionPoints[loc].push({ type: 'injectHashMethod'})
: contract.injectionPoints[loc] = [{ type: 'injectHashMethod'}];
}
if (expression.subNodes) {
@ -73,33 +72,33 @@ parse.ContractOrLibraryStatement = function parseContractOrLibraryStatement(cont
}
};
parse.EmitStatement = function parseExpressionStatement(contract, expression){
instrumenter.instrumentStatement(contract, expression);
parse.EmitStatement = function(contract, expression){
register.statement(contract, expression);
};
parse.ExpressionStatement = function parseExpressionStatement(contract, content) {
parse.ExpressionStatement = function(contract, content) {
parse[content.expression.type] &&
parse[content.expression.type](contract, content.expression);
};
parse.ForStatement = function parseForStatement(contract, expression) {
instrumenter.instrumentStatement(contract, expression);
parse.ForStatement = function(contract, expression) {
register.statement(contract, expression);
parse[expression.body.type] &&
parse[expression.body.type](contract, expression.body);
};
parse.FunctionDefinition = function parseFunctionDefinition(contract, expression) {
parse.FunctionDefinition = function(contract, expression) {
parse.Modifiers(contract, expression.modifiers);
if (expression.body) {
instrumenter.instrumentFunctionDeclaration(contract, expression);
register.functionDeclaration(contract, expression);
parse[expression.body.type] &&
parse[expression.body.type](contract, expression.body);
}
};
parse.IfStatement = function parseIfStatement(contract, expression) {
instrumenter.instrumentStatement(contract, expression);
instrumenter.instrumentIfStatement(contract, expression);
parse.IfStatement = function(contract, expression) {
register.statement(contract, expression);
register.ifStatement(contract, expression);
parse[expression.trueBody.type] &&
parse[expression.trueBody.type](contract, expression.trueBody);
@ -109,20 +108,13 @@ parse.IfStatement = function parseIfStatement(contract, expression) {
}
};
parse.InterfaceStatement = function parseInterfaceStatement(contract, expression) {
parse.ContractOrLibraryStatement(contract, expression);
};
parse.LibraryStatement = function parseLibraryStatement(contract, expression) {
parse.ContractOrLibraryStatement(contract, expression);
};
parse.MemberExpression = function parseMemberExpression(contract, expression) {
// TODO: Investigate Node structure
/*parse.MemberAccess = function(contract, expression) {
parse[expression.object.type] &&
parse[expression.object.type](contract, expression.object);
};
};*/
parse.Modifiers = function parseModifier(contract, modifiers) {
parse.Modifiers = function(contract, modifiers) {
if (modifiers) {
modifiers.forEach(modifier => {
parse[modifier.type] && parse[modifier.type](contract, modifier);
@ -130,52 +122,45 @@ parse.Modifiers = function parseModifier(contract, modifiers) {
}
};
parse.ModifierDefinition = function parseModifierDefinition(contract, expression) {
instrumenter.instrumentFunctionDeclaration(contract, expression);
parse.ModifierDefinition = function(contract, expression) {
register.functionDeclaration(contract, expression);
parse[expression.body.type] &&
parse[expression.body.type](contract, expression.body);
};
parse.NewExpression = function parseNewExpression(contract, expression) {
parse.NewExpression = function(contract, expression) {
parse[expression.typeName.type] &&
parse[expression.typeName.type](contract, expression.typeName);
};
parse.SourceUnit = function parseSourceUnit(contract, expression) {
parse.SourceUnit = function(contract, expression) {
expression.children.forEach(construct => {
parse[construct.type] &&
parse[construct.type](contract, construct);
});
};
parse.ReturnStatement = function parseReturnStatement(contract, expression) {
instrumenter.instrumentStatement(contract, expression);
parse.ReturnStatement = function(contract, expression) {
register.statement(contract, expression);
};
parse.UnaryExpression = function parseUnaryExpression(contract, expression) {
parse[expression.argument.type] &&
parse[expression.argument.type](contract, expression.argument);
};
// TODO:Investigate node structure
/*parse.UnaryOperation = function(contract, expression) {
parse[subExpression.argument.type] &&
parse[subExpression.argument.type](contract, expression.argument);
};*/
parse.UsingStatement = function parseUsingStatement(contract, expression) {
parse.UsingStatement = function (contract, expression) {
parse[expression.for.type] &&
parse[expression.for.type](contract, expression.for);
};
parse.VariableDeclarationStatement = function parseVariableDeclarationStatement(contract, expression) {
instrumenter.instrumentStatement(contract, expression);
// parse[expression.declarations[0].id.type] &&
// parse[expression.declarations[0].id.type](contract, expression.declarations[0].id);
};
parse.VariableDeclarationTuple = function parseVariableDeclarationTuple(contract, expression) {
instrumenter.instrumentStatement(contract, expression);
parse[expression.declarations[0].id.type] &&
parse[expression.declarations[0].id.type](contract, expression.declarations[0].id);
parse.VariableDeclarationStatement = function (contract, expression) {
register.statement(contract, expression);
};
parse.WhileStatement = function parseWhileStatement(contract, expression) {
instrumenter.instrumentStatement(contract, expression);
parse.WhileStatement = function (contract, expression) {
register.statement(contract, expression);
parse[expression.body.type] &&
parse[expression.body.type](contract, expression.body);
};

@ -1,4 +1,3 @@
const SolExplore = require('sol-explore');
const SolidityParser = require('solidity-parser-antlr');
const crRegex = /[\r\n ]+$/g;
@ -6,52 +5,31 @@ const OPEN = '{';
const CLOSE = '}';
/**
* Splices enclosing brackets into `contract` around `expression`;
* Inserts an open or close brace e.g. `{` or `}` at specified position in solidity source
*
* @param {String} contract solidity source
* @param {Object} node AST node to bracket
* @param {Object} item AST node to bracket
* @param {Number} offset tracks the number of previously inserted braces
* @return {String} contract
*/
function insertBrace(contract, item, offset) {
return contract.slice(0,item.pos + offset) + item.type + contract.slice(item.pos + offset)
}
/** Remove 'pure' and 'view' from the function declaration.
* @param {String} contract solidity source
* @param {Object} function AST node
* @return {String} contract with the modifiers removed from the given function.
*/
function removePureView(contract, node){
let fDefStart = node.range[0];
if (node.body){
fDefEnd = node.body.range[0];
} else if (node.returnParameters) {
fDefEnd = node.returnParameters.range[0];
} else {
fDefEnd = node.range[1];
}
let fDef = contract.slice(fDefStart, fDefEnd + 1);
fDef = fDef.replace(/\bview\b/i, ' ');
fDef = fDef.replace(/\bpure\b/i, ' ');
return contract.slice(0, fDefStart) + fDef + contract.slice(fDefEnd + 1);
}
/**
* Locates unbracketed singleton statements attached to if, else, for and while statements
* and brackets them. Instrumenter needs to inject events at these locations and having
* them pre-bracketed simplifies the process. Each time a modification is made the contract
* is passed back to the parser and re-walked because all the starts and ends get shifted.
*
* Also removes pure and view modifiers.
* them pre-bracketed simplifies the process.
*
* @param {String} contract solidity code
* @return {String} contract
* @param {String} contract solidity source code
* @return {String} modified solidity source code
*/
module.exports.run = function r(contract) {
function preprocess(contract) {
try {
const ast = SolidityParser.parse(contract, { range: true });
insertions = [];
viewPureToRemove = [];
SolidityParser.visit(ast, {
IfStatement: function(node) {
if (node.trueBody.type !== 'Block') {
@ -74,23 +52,18 @@ module.exports.run = function r(contract) {
insertions.push({type: OPEN, pos: node.body.range[0]});
insertions.push({type: CLOSE, pos: node.body.range[1] + 1});
}
},
FunctionDefinition: function(node){
if (node.stateMutability === 'view' || node.stateMutability === 'pure'){
viewPureToRemove.push(node);
}
}
})
// Firstly, remove pures and views. Note that we replace 'pure' and 'view' with spaces, so
// character counts remain the same, so we can do this in any order
viewPureToRemove.forEach(node => contract = removePureView(contract, node));
// Sort the insertion points.
insertions.sort((a,b) => a.pos - b.pos);
insertions.forEach((item, idx) => contract = insertBrace(contract, item, idx));
} catch (err) {
contract = err;
keepRunning = false;
}
return contract;
};
module.exports = preprocess;

@ -0,0 +1,238 @@
/**
* Registers injections points (e.g source location, type) and their associated data with
* a contract / instrumentation target. Run during the `parse` step. This data is
* consumed by the Injector as it modifies the source code in instrumentation's final step.
*/
class Registrar {
constructor(){}
/**
* Adds injection point to injection points map
* @param {Object} contract instrumentation target
* @param {String} key injection point `type`
* @param {Number} value injection point `id`
*/
_createInjectionPoint(contract, key, value) {
(contract.injectionPoints[key])
? contract.injectionPoints[key].push(value)
: contract.injectionPoints[key] = [value];
}
/**
* Registers injections for statement measurements
* @param {Object} contract instrumentation target
* @param {Object} expression AST node
*/
statement(contract, expression) {
const startContract = contract.instrumented.slice(0, expression.range[0]);
const startline = ( startContract.match(/\n/g) || [] ).length + 1;
const startcol = expression.range[0] - startContract.lastIndexOf('\n') - 1;
const expressionContent = contract.instrumented.slice(
expression.range[0],
expression.range[1] + 1
);
const endline = startline + (contract, expressionContent.match('/\n/g') || []).length;
let endcol;
if (expressionContent.lastIndexOf('\n') >= 0) {
endcol = contract.instrumented.slice(
expressionContent.lastIndexOf('\n'),
expression.range[1]
).length;
} else endcol = startcol + (contract, expressionContent.length - 1);
contract.statementId += 1;
contract.statementMap[contract.statementId] = {
start: { line: startline, column: startcol },
end: { line: endline, column: endcol },
};
this._createInjectionPoint(contract, expression.range[0], {
type: 'injectStatement', statementId: contract.statementId,
});
};
/**
* Registers injections for line measurements
* @param {Object} contract instrumentation target
* @param {Object} expression AST node
*/
line(contract, expression) {
const startchar = expression.range[0];
const endchar = expression.range[1] + 1;
const lastNewLine = contract.instrumented.slice(0, startchar).lastIndexOf('\n');
const nextNewLine = startchar + contract.instrumented.slice(startchar).indexOf('\n');
const contractSnipped = contract.instrumented.slice(lastNewLine, nextNewLine);
const restOfLine = contract.instrumented.slice(endchar, nextNewLine);
if (
contract.instrumented.slice(lastNewLine, startchar).trim().length === 0 &&
(
restOfLine.replace(';', '').trim().length === 0 ||
restOfLine.replace(';', '').trim().substring(0, 2) === '//'
)
)
{
this._createInjectionPoint(contract, lastNewLine + 1, { type: 'injectLine' });
} else if (
contract.instrumented.slice(lastNewLine, startchar).replace('{', '').trim().length === 0 &&
contract.instrumented.slice(endchar, nextNewLine).replace(/[;}]/g, '').trim().length === 0)
{
this._createInjectionPoint(contract, expression.range[0], { type: 'injectLine' });
}
};
/**
* Registers injections for function measurements
* @param {Object} contract instrumentation target
* @param {Object} expression AST node
*/
functionDeclaration(contract, expression) {
const startContract = contract.instrumented.slice(0, expression.range[0]);
const startline = ( startContract.match(/\n/g) || [] ).length + 1;
const startcol = expression.range[0] - startContract.lastIndexOf('\n') - 1;
const endlineDelta = contract.instrumented.slice(expression.range[0]).indexOf('{');
const functionDefinition = contract.instrumented.slice(
expression.range[0],
expression.range[0] + endlineDelta
);
const endline = startline + (functionDefinition.match(/\n/g) || []).length;
const endcol = functionDefinition.length - functionDefinition.lastIndexOf('\n');
contract.fnId += 1;
contract.fnMap[contract.fnId] = {
name: expression.isConstructor ? 'constructor' : expression.name,
line: startline,
loc: {
start: { line: startline, column: startcol },
end: { line: endline, column: endcol },
},
};
this._createInjectionPoint(
contract,
expression.range[0] + endlineDelta + 1,
{
type: 'injectFunction',
fnId: contract.fnId,
}
);
};
/**
* Registers injections for branch measurements. This generic is consumed by
* the `assert/require` and `if` registration methods.
* @param {Object} contract instrumentation target
* @param {Object} expression AST node
*/
addNewBranch(contract, expression) {
const startContract = contract.instrumented.slice(0, expression.range[0]);
const startline = ( startContract.match(/\n/g) || [] ).length + 1;
const startcol = expression.range[0] - startContract.lastIndexOf('\n') - 1;
contract.branchId += 1;
// NB locations for if branches in istanbul are zero
// length and associated with the start of the if.
contract.branchMap[contract.branchId] = {
line: startline,
type: 'if',
locations: [{
start: {
line: startline, column: startcol,
},
end: {
line: startline, column: startcol,
},
}, {
start: {
line: startline, column: startcol,
},
end: {
line: startline, column: startcol,
},
}],
};
};
/**
* Registers injections for assert/require statement measurements (branches)
* @param {Object} contract instrumentation target
* @param {Object} expression AST node
*/
assertOrRequire(contract, expression) {
this.addNewBranch(contract, expression);
this._createInjectionPoint(
contract,
expression.range[0],
{
type: 'injectAssertPre',
branchId: contract.branchId,
}
);
this._createInjectionPoint(
contract,
expression.range[1] + 2,
{
type: 'injectAssertPost',
branchId: contract.branchId,
}
);
};
/**
* Registers injections for if statement measurements (branches)
* @param {Object} contract instrumentation target
* @param {Object} expression AST node
*/
ifStatement(contract, expression) {
this.addNewBranch(contract, expression);
if (expression.trueBody.type === 'Block') {
this._createInjectionPoint(
contract,
expression.trueBody.range[0] + 1,
{
type: 'injectBranch',
branchId: contract.branchId,
locationIdx: 0,
}
);
}
if (expression.falseBody && expression.falseBody.type === 'IfStatement') {
// Do nothing - we must be pre-preprocessing
} else if (expression.falseBody && expression.falseBody.type === 'Block') {
this._createInjectionPoint(
contract,
expression.falseBody.range[0] + 1,
{
type: 'injectBranch',
branchId: contract.branchId,
locationIdx: 1,
}
);
} else {
this._createInjectionPoint(
contract,
expression.trueBody.range[1] + 1,
{
type: 'injectEmptyBranch',
branchId: contract.branchId,
locationIdx: 1,
}
);
}
}
}
module.exports = Registrar;

@ -1,6 +1,6 @@
/* eslint-env node, mocha */
const path = require('path');
/*const path = require('path');
const getInstrumentedVersion = require('./../lib/instrumentSolidity.js');
const util = require('./util/util.js');
const CoverageMap = require('./../lib/coverageMap');
@ -181,23 +181,5 @@ describe.skip('conditional statements', () => {
}).catch(done);
});
// Solcover has trouble with this case. The conditional coverage strategy relies on being able to
// reference the left-hand variable before its value is assigned. Solidity doesn't allow this
// for 'var'.
/* it('should cover a var decl assignment by conditional that reaches the alternate', (done) => {
const contract = util.getCode('conditional/variable-decl-assignment-alternate.sol');
const info = getInstrumentedVersion(contract, filePath);
const coverage = new CoverageMap();
coverage.addContract(info, filePath);
// Runs var z = (x) ? y = false : y = false;
vm.execute(info.contract, 'a', []).then(events => {
const mapping = coverage.generate(events, pathPrefix);
assert.deepEqual(mapping[filePath].l, {5: 1, 6: 1, 7: 1});
assert.deepEqual(mapping[filePath].b, {'1': [0, 1]});
assert.deepEqual(mapping[filePath].s, {1: 1, 2: 1, 3: 1});
assert.deepEqual(mapping[filePath].f, {1: 1});
done();
}).catch(done);
}); */
});
*/

@ -0,0 +1,121 @@
/**
* This is logic to instrument ternary conditional assignment statements. Preserving
* here for the time being, because instrumentation of these became impossible in
* solc >= 0.5.0
*/
function instrumentAssignmentExpression(contract, expression) {
// This is suspended for 0.5.0 which tries to accomodate the new `emit` keyword.
// Solc is not allowing us to use the construction `emit SomeEvent()` within the parens :/
return;
// --------------------------------------------------------------------------------------------
// The only time we instrument an assignment expression is if there's a conditional expression on
// the right
/*if (expression.right.type === 'ConditionalExpression') {
if (expression.left.type === 'DeclarativeExpression' || expression.left.type === 'Identifier') {
// Then we need to go from bytes32 varname = (conditional expression)
// to bytes32 varname; (,varname) = (conditional expression)
createOrAppendInjectionPoint(contract, expression.left.range[1], {
type: 'literal', string: '; (,' + expression.left.name + ')',
});
instrumenter.instrumentConditionalExpression(contract, expression.right);
} else if (expression.left.type === 'MemberExpression') {
createOrAppendInjectionPoint(contract, expression.left.range[0], {
type: 'literal', string: '(,',
});
createOrAppendInjectionPoint(contract, expression.left.range[1], {
type: 'literal', string: ')',
});
instrumenter.instrumentConditionalExpression(contract, expression.right);
} else {
const err = 'Error instrumenting assignment expression @ solidity-coverage/lib/instrumenter.js';
console.log(err, contract, expression.left);
process.exit();
}
}*/
};
function instrumentConditionalExpression(contract, expression) {
// ----------------------------------------------------------------------------------------------
// This is suspended for 0.5.0 which tries to accomodate the new `emit` keyword.
// Solc is not allowing us to use the construction `emit SomeEvent()` within the parens :/
// Very sad, this is the coolest thing in here.
return;
// ----------------------------------------------------------------------------------------------
/*contract.branchId += 1;
const startline = (contract.instrumented.slice(0, expression.range[0]).match(/\n/g) || []).length + 1;
const startcol = expression.range[0] - contract.instrumented.slice(0, expression.range[0]).lastIndexOf('\n') - 1;
const consequentStartCol = startcol + (contract, expression.trueBody.range[0] - expression.range[0]);
const consequentEndCol = consequentStartCol + (contract, expression.trueBody.range[1] - expression.trueBody.range[0]);
const alternateStartCol = startcol + (contract, expression.falseBody.range[0] - expression.range[0]);
const alternateEndCol = alternateStartCol + (contract, expression.falseBody.range[1] - expression.falseBody.range[0]);
// NB locations for conditional branches in istanbul are length 1 and associated with the : and ?.
contract.branchMap[contract.branchId] = {
line: startline,
type: 'cond-expr',
locations: [{
start: {
line: startline, column: consequentStartCol,
},
end: {
line: startline, column: consequentEndCol,
},
}, {
start: {
line: startline, column: alternateStartCol,
},
end: {
line: startline, column: alternateEndCol,
},
}],
};
// Right, this could be being used just by itself or as an assignment. In the case of the latter, because
// the comma operator doesn't exist, we're going to have to get funky.
// if we're on a line by ourselves, this is easier
//
// Now if we've got to wrap the expression it's being set equal to, do that...
// Wrap the consequent
createOrAppendInjectionPoint(contract, expression.trueBody.range[0], {
type: 'openParen',
});
createOrAppendInjectionPoint(contract, expression.trueBody.range[0], {
type: 'callBranchEvent', comma: true, branchId: contract.branchId, locationIdx: 0,
});
createOrAppendInjectionPoint(contract, expression.trueBody.range[1], {
type: 'closeParen',
});
// Wrap the alternate
createOrAppendInjectionPoint(contract, expression.falseBody.range[0], {
type: 'openParen',
});
createOrAppendInjectionPoint(contract, expression.falseBody.range[0], {
type: 'callBranchEvent', comma: true, branchId: contract.branchId, locationIdx: 1,
});
createOrAppendInjectionPoint(contract, expression.falseBody.range[1], {
type: 'closeParen',
});*/
};
// Paren / Literal injectors
/*
injector.openParen = function injectOpenParen(contract, fileName, injectionPoint, injection) {
contract.instrumented = contract.instrumented.slice(0, injectionPoint) + '(' + contract.instrumented.slice(injectionPoint);
};
injector.closeParen = function injectCloseParen(contract, fileName, injectionPoint, injection) {
contract.instrumented = contract.instrumented.slice(0, injectionPoint) + ')' + contract.instrumented.slice(injectionPoint);
};
injector.literal = function injectLiteral(contract, fileName, injectionPoint, injection) {
contract.instrumented = contract.instrumented.slice(0, injectionPoint) + injection.string + contract.instrumented.slice(injectionPoint);
};
*/

@ -0,0 +1,119 @@
const chalk = require('chalk');
const emoji = require('node-emoji');
/**
* Coverage tool output formatters. These classes support any the logging solidity-coverage API
* (or plugins which consume it) do on their own behalf. NB, most output is generated by the host
* dev stack (ex: the truffle compile command, or istanbul).
*/
class UI {
constructor(log){
this.log = log || console.log;
this.chalk = chalk;
}
/**
* Writes a formatted message
* @param {String} kind message selector
* @param {String[]} args info to inject into template
*/
report(kind, args=[]){}
/**
* Returns a formatted message. Useful for error messages.
* @param {String} kind message selector
* @param {String[]} args info to inject into template
* @return {String} message
*/
generate(kind, args=[]){}
_write(msg){
this.log(this._format(msg))
}
_format(msg){
return emoji.emojify(msg)
}
}
/**
* UI for solidity-coverage/lib/app.js
*/
class AppUI extends UI {
constructor(log){
super(log);
}
/**
* Writes a formatted message via log
* @param {String} kind message selector
* @param {String[]} args info to inject into template
*/
report(kind, args=[]){
const c = this.chalk;
const ct = c.bold.green('>');
const ds = c.bold.yellow('>');
const w = ":warning:";
const kinds = {
'vm-fail': `${w} ${c.red('There was a problem attaching to the ganache VM.')}\n` +
`${w} ${c.red('For help, see the "client" & "providerOptions" syntax in solidity-coverage docs.')}\n`+
`${w} ${c.red(`Using ganache-cli (v${args[0]}) instead.`)}\n`,
'instr-start': `\n${c.bold('Instrumenting for coverage...')}` +
`\n${c.bold('=============================')}\n`,
'instr-item': `${ct} ${args[0]}`,
'istanbul': `${ct} ${c.grey('Istanbul reports written to')} ./coverage/ ` +
`${c.grey('and')} ./coverage.json`,
'finish': `${ct} ${c.grey('solidity-coverage cleaning up, shutting down ganache server')}`,
'server': `${ct} ${c.bold('server: ')} ${c.grey(args[0])}`,
'command': `\n${w} ${c.red.bold('solidity-coverage >= 0.7.0 is no longer a shell command.')} ${w}\n` +
`${c.bold('=============================================================')}\n\n` +
`Instead, you should use the plugin produced for your development stack\n` +
`(like Truffle, Buidler) or design a custom workflow using the package API\n\n` +
`> See https://github.com/sc-forks/solidity-coverage for help with configuration.\n\n` +
`${c.green.bold('Thanks! - sc-forks')}\n`,
};
this._write(kinds[kind]);
}
/**
* Returns a formatted message. Useful for error message.
* @param {String} kind message selector
* @param {String[]} args info to inject into template
* @return {String} message
*/
generate(kind, args=[]){
const c = this.chalk;
const kinds = {
'config-fail':`${c.red('A config option (.solcover.js) is incorrectly formatted: ')}` +
`${c.red(args[0])}.`,
'instr-fail': `${c.red('Could not instrument:')} ${args[0]}. ` +
`${c.red('(Please verify solc can compile this file without errors.) ')}`,
'istanbul-fail': `${c.red('Istanbul coverage reports could not be generated. ')}`,
'sources-fail': `${c.red('Cannot locate expected contract sources folder: ')} ${args[0]}`,
'server-fail': `${c.red('Port')} ${args[0]} ${c.red('is already in use.\n')}` +
`${c.red('\tRun: "lsof -i" to find the pid of the process using it.\n')}` +
`${c.red('\tRun: "kill -9 <pid>" to kill it.\n')}`
}
return this._format(kinds[kind])
}
}
module.exports = {
AppUI: AppUI,
UI: UI
};

@ -0,0 +1,68 @@
const Validator = require('jsonschema').Validator;
const AppUI = require('./ui').AppUI;
const util = require('util')
Validator.prototype.customFormats.isFunction = function(input) {
return typeof input === "function"
};
const configSchema = {
id: "/solcoverjs",
type: "object",
properties: {
client: {type: "object"},
cwd: {type: "string"},
host: {type: "string"},
port: {type: "number"},
providerOptions: {type: "object"},
silent: {type: "boolean"},
autoLaunchServer: {type: "boolean"},
istanbulFolder: {type: "string"},
// Hooks:
onServerReady: {type: "function", format: "isFunction"},
onCompileComplete: {type: "function", format: "isFunction"},
onTestComplete: {type: "function", format: "isFunction"},
onIstanbulComplete: {type: "function", format: "isFunction"},
// Arrays
skipFiles: {
type: "array",
items: {type: "string"}
},
istanbulReporter: {
type: "array",
items: {type: "string"}
},
},
};
class ConfigValidator {
constructor(){
this.validator = new Validator();
this.validator.addSchema(configSchema);
this.ui = new AppUI();
}
validate(config){
let result = this.validator.validate(config, configSchema);
if (result.errors.length){
let msg;
const option = `"${result.errors[0].property.replace('instance.', '')}"`;
(result.errors[0].argument === 'isFunction')
? msg = `${option} is not a function`
: msg = `${option} ${result.errors[0].message}`;
throw new Error(this.ui.generate('config-fail', [msg]));
}
return true;
}
}
module.exports = ConfigValidator;

@ -1,16 +1,19 @@
{
"name": "solidity-coverage",
"version": "0.6.7",
"version": "0.7.0-beta.3",
"description": "",
"main": "plugins/buidler.plugin.js",
"bin": {
"solidity-coverage": "./bin/exec.js"
"solidity-coverage": "./plugins/bin.js"
},
"directories": {
"test": "test"
},
"scripts": {
"test": "mocha --timeout 60000",
"test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --timeout 60000 --exit"
"nyc": "SILENT=true nyc --exclude '**/sc_temp/**' --exclude '**/test/**'",
"test": "SILENT=true node --max-old-space-size=4096 ./node_modules/.bin/nyc -- mocha test/units/* --timeout 100000 --no-warnings --exit",
"test:ci": "SILENT=true node --max-old-space-size=4096 ./node_modules/.bin/nyc --reporter=lcov --exclude '**/sc_temp/**' --exclude '**/test/**/' -- mocha test/units/* --timeout 100000 --no-warnings --exit",
"test:debug": "node --max-old-space-size=4096 ./node_modules/.bin/mocha test/units/* --timeout 100000 --no-warnings --exit"
},
"homepage": "https://github.com/sc-forks/solidity-coverage",
"repository": {
@ -20,28 +23,37 @@
"author": "",
"license": "ISC",
"dependencies": {
"@truffle/provider": "^0.1.17",
"chalk": "^2.4.2",
"death": "^1.1.0",
"ethereumjs-testrpc-sc": "6.5.1-sc.1",
"detect-port": "^1.3.0",
"fs-extra": "^8.1.0",
"ganache-cli": "6.7.0",
"ghost-testrpc": "^0.0.2",
"global-modules": "^2.0.0",
"globby": "^10.0.1",
"istanbul": "^0.4.5",
"keccakjs": "^0.2.1",
"req-cwd": "^1.0.1",
"jsonschema": "^1.2.4",
"lodash": "^4.17.15",
"node-emoji": "^1.10.0",
"pify": "^4.0.1",
"recursive-readdir": "^2.2.2",
"shelljs": "^0.8.3",
"sol-explore": "^1.6.2",
"solidity-parser-antlr": "0.4.7",
"tree-kill": "^1.2.0",
"web3": "1.2.1",
"web3-eth-abi": "1.0.0-beta.55"
"web3-utils": "^1.0.0"
},
"devDependencies": {
"crypto-js": "^3.1.9-1",
"ethereumjs-account": "~2.0.4",
"ethereumjs-tx": "^1.2.2",
"ethereumjs-util": "^5.0.1",
"ethereumjs-vm": "https://github.com/sc-forks/ethereumjs-vm-sc.git#336d8841ab2c37da079d290ea5c5af6b34f20495",
"merkle-patricia-tree": "~2.1.2",
"mocha": "^4.1.0",
"request": "^2.88.0",
"solc": "^0.5.3",
"truffle": "^5.0.30"
"@nomiclabs/buidler": "^1.0.1",
"@nomiclabs/buidler-truffle5": "^1.0.1",
"@nomiclabs/buidler-web3": "^1.0.1",
"@truffle/contract": "^4.0.36",
"decache": "^4.5.1",
"ganache-core-sc": "^2.7.0-sc.0",
"mocha": "5.2.0",
"nyc": "^14.1.1",
"solc": "^0.5.10",
"truffle": "5.0.31",
"truffle-config": "^1.1.18"
}
}

@ -0,0 +1,8 @@
/*
Logs a warning / informational message when user tries to
invoke 'solidity-coverage' as a shell command. This file
is listed as the package.json "bin".
*/
const AppUI = require('../lib/ui').AppUI;
(new AppUI()).report('command');

@ -0,0 +1,142 @@
const API = require('./../lib/api');
const utils = require('./resources/plugin.utils');
const buidlerUtils = require('./resources/buidler.utils');
const PluginUI = require('./resources/buidler.ui');
const pkg = require('./../package.json');
const death = require('death');
const path = require('path');
const Web3 = require('web3');
const ganache = require('ganache-cli');
const { task, types } = require("@nomiclabs/buidler/config");
const { ensurePluginLoadedWithUsePlugin } = require("@nomiclabs/buidler/plugins");
const {
TASK_TEST,
TASK_COMPILE,
} = require("@nomiclabs/buidler/builtin-tasks/task-names");
ensurePluginLoadedWithUsePlugin();
function plugin() {
// UI for the task flags...
const ui = new PluginUI();
task("coverage", "Generates a code coverage report for tests")
.addOptionalParam("testfiles", ui.flags.file, null, types.string)
.addOptionalParam("solcoverjs", ui.flags.solcoverjs, null, types.string)
.addOptionalParam('temp', ui.flags.temp, null, types.string)
.setAction(async function(args, env){
let error;
let ui;
let api;
let config;
try {
death(buidlerUtils.finish.bind(null, config, api)); // Catch interrupt signals
config = buidlerUtils.normalizeConfig(env.config, args);
ui = new PluginUI(config.logger.log);
api = new API(utils.loadSolcoverJS(config));
// ==============
// Server launch
// ==============
const network = buidlerUtils.setupNetwork(env, api, ui);
const client = api.client || ganache;
const address = await api.ganache(client);
const web3 = new Web3(address);
const accounts = await web3.eth.getAccounts();
const nodeInfo = await web3.eth.getNodeInfo();
const ganacheVersion = nodeInfo.split('/')[1];
// Set default account
network.from = accounts[0];
// Version Info
ui.report('versions', [
ganacheVersion,
pkg.version
]);
ui.report('network', [
env.network.name,
api.port
]);
// Run post-launch server hook;
await api.onServerReady(config);
// ================
// Instrumentation
// ================
const skipFiles = api.skipFiles || [];
let {
targets,
skipped
} = utils.assembleFiles(config, skipFiles);
targets = api.instrument(targets);
utils.reportSkipped(config, skipped);
// ==============
// Compilation
// ==============
config.temp = args.temp;
const {
tempArtifactsDir,
tempContractsDir
} = utils.getTempLocations(config);
utils.setupTempFolders(config, tempContractsDir, tempArtifactsDir)
utils.save(targets, config.paths.sources, tempContractsDir);
utils.save(skipped, config.paths.sources, tempContractsDir);
config.paths.sources = tempContractsDir;
config.paths.artifacts = tempArtifactsDir;
config.paths.cache = buidlerUtils.tempCacheDir(config);
config.solc.optimizer.enabled = false;
await env.run(TASK_COMPILE);
await api.onCompileComplete(config);
// ======
// Tests
// ======
const testfiles = args.testfiles ? [args.testfiles] : [];
try {
await env.run(TASK_TEST, {testFiles: testfiles})
} catch (e) {
error = e;
}
await api.onTestsComplete(config);
// ========
// Istanbul
// ========
await api.report();
await api.onIstanbulComplete(config);
} catch(e) {
error = e;
}
await buidlerUtils.finish(config, api);
if (error !== undefined ) throw error;
if (process.exitCode > 0) throw new Error(ui.generate('tests-fail', [process.exitCode]));
})
}
module.exports = plugin;

@ -0,0 +1,85 @@
const UI = require('./../../lib/ui').UI;
/**
* Buidler Plugin logging
*/
class PluginUI extends UI {
constructor(log){
super(log);
this.flags = {
file: `Path (or glob) defining a subset of tests to run`,
solcoverjs: `Relative path from working directory to config. ` +
`Useful for monorepo packages that share settings.`,
temp: `Path to a disposable folder to store compilation artifacts in. ` +
`Useful when your test setup scripts include hard-coded paths to ` +
`a build directory.`,
}
}
/**
* Writes a formatted message via log
* @param {String} kind message selector
* @param {String[]} args info to inject into template
*/
report(kind, args=[]){
const c = this.chalk;
const ct = c.bold.green('>');
const ds = c.bold.yellow('>');
const w = ":warning:";
const kinds = {
'instr-skip': `\n${c.bold('Coverage skipped for:')}` +
`\n${c.bold('=====================')}\n`,
'instr-skipped': `${ds} ${c.grey(args[0])}`,
'versions': `${ct} ${c.bold('ganache-core')}: ${args[0]}\n` +
`${ct} ${c.bold('solidity-coverage')}: v${args[1]}`,
'network': `\n${c.bold('Network Info')}` +
`\n${c.bold('============')}\n` +
`${ct} ${c.bold('port')}: ${args[1]}\n` +
`${ct} ${c.bold('network')}: ${args[0]}\n`,
'port-clash': `${w} ${c.red("The 'port' values in your Buidler url ")}` +
`${c.red("and .solcover.js are different. Using Buidler's: ")} ${c.bold(args[0])}.\n`,
}
this._write(kinds[kind]);
}
/**
* Returns a formatted message. Useful for error message.
* @param {String} kind message selector
* @param {String[]} args info to inject into template
* @return {String} message
*/
generate(kind, args=[]){
const c = this.chalk;
const x = ":x:";
const kinds = {
'sources-fail': `${c.red('Cannot locate expected contract sources folder: ')} ${args[0]}`,
'solcoverjs-fail': `${c.red('Could not load .solcover.js config file. ')}` +
`${c.red('This can happen if it has a syntax error or ')}` +
`${c.red('the path you specified for it is wrong.')}`,
'tests-fail': `${x} ${c.bold(args[0])} ${c.red('test(s) failed under coverage.')}`,
}
return this._format(kinds[kind])
}
}
module.exports = PluginUI;

@ -0,0 +1,108 @@
const shell = require('shelljs');
const globby = require('globby');
const pluginUtils = require("./plugin.utils");
const path = require('path');
const util = require('util');
const { createProvider } = require("@nomiclabs/buidler/internal/core/providers/construction");
// =============================
// Buidler Plugin Utils
// =============================
/**
* Normalizes buidler paths / logging for use by the plugin utilities and
* attaches them to the config
* @param {BuidlerConfig} config
* @return {BuidlerConfig} updated config
*/
function normalizeConfig(config, args={}){
config.workingDir = config.paths.root;
config.contractsDir = config.paths.sources;
config.testDir = config.paths.tests;
config.artifactsDir = config.paths.artifacts;
config.logger = config.logger ? config.logger : {log: null};
config.solcoverjs = args.solcoverjs
return config;
}
function setupNetwork(env, api, ui){
let networkConfig = {};
let networkName = (env.buidlerArguments.network !== 'buidlerevm')
? env.buidlerArguments.network
: api.defaultNetworkName;
if (networkName !== api.defaultNetworkName){
networkConfig = env.config.networks[networkName];
const configPort = networkConfig.url.split(':')[2];
// Warn: port conflicts
if (api.port !== api.defaultPort && api.port !== configPort){
ui.report('port-clash', [ configPort ])
}
// Prefer network port
api.port = parseInt(configPort);
}
networkConfig.url = `http://${api.host}:${api.port}`;
networkConfig.gas = api.gasLimit;
networkConfig.gasPrice = api.gasPrice;
const provider = createProvider(networkName, networkConfig);
env.config.networks[networkName] = networkConfig;
env.config.defaultNetwork = networkName;
env.network = {
name: networkName,
config: networkConfig,
provider: provider,
}
env.ethereum = provider;
// Return a reference so we can set the from account
return env.network;
}
/**
* Generates a path to a temporary compilation cache directory
* @param {BuidlerConfig} config
* @return {String} .../.coverage_cache
*/
function tempCacheDir(config){
return path.join(config.paths.root, '.coverage_cache');
}
/**
* Silently removes temporary folders and calls api.finish to shut server down
* @param {BuidlerConfig} config
* @param {SolidityCoverage} api
* @return {Promise}
*/
async function finish(config, api){
const {
tempContractsDir,
tempArtifactsDir
} = pluginUtils.getTempLocations(config);
shell.config.silent = true;
shell.rm('-Rf', tempContractsDir);
shell.rm('-Rf', tempArtifactsDir);
shell.rm('-Rf', path.join(config.paths.root, '.coverage_cache'));
shell.config.silent = false;
if (api) await api.finish();
}
module.exports = {
normalizeConfig: normalizeConfig,
finish: finish,
tempCacheDir: tempCacheDir,
setupNetwork: setupNetwork
}

@ -0,0 +1,273 @@
/**
* A collection of utilities for common tasks plugins will need in the course
* of composing a workflow using the solidity-coverage API
*/
const PluginUI = require('./truffle.ui');
const path = require('path');
const fs = require('fs-extra');
const shell = require('shelljs');
const util = require('util')
// ===
// UI
// ===
/**
* Displays a list of skipped contracts
* @param {TruffleConfig} config
* @return {Object[]} skipped array of objects generated by `assembleTargets` method
*/
function reportSkipped(config, skipped=[]){
let started = false;
const ui = new PluginUI(config.logger.log);
for (let item of skipped){
if (!started) {
ui.report('instr-skip', []);
started = true;
}
ui.report('instr-skipped', [item.relativePath]);
}
}
// ========
// File I/O
// ========
/**
* Loads source
* @param {String} _path absolute path
* @return {String} source file
*/
function loadSource(_path){
return fs.readFileSync(_path).toString();
}
/**
* Sets up temporary folders for instrumented contracts and their compilation artifacts
* @param {PlatformConfig} config
* @param {String} tempContractsDir
* @param {String} tempArtifactsDir
*/
function setupTempFolders(config, tempContractsDir, tempArtifactsDir){
checkContext(config, tempContractsDir, tempArtifactsDir);
shell.mkdir(tempContractsDir);
shell.mkdir(tempArtifactsDir);
}
/**
* Save a set of instrumented files to a temporary directory.
* @param {Object[]} targets array of targets generated by `assembleTargets`
* @param {[type]} originalDir absolute path to original contracts directory
* @param {[type]} tempDir absolute path to temp contracts directory
*/
function save(targets, originalDir, tempDir){
let _path;
for (target of targets) {
_path = path.normalize(target.canonicalPath)
.replace(originalDir, tempDir);
fs.outputFileSync(_path, target.source);
}
}
/**
* Relativizes an absolute file path, given an absolute parent path
* @param {String} pathToFile
* @param {String} pathToParent
* @return {String} relative path
*/
function toRelativePath(pathToFile, pathToParent){
return pathToFile.replace(`${pathToParent}${path.sep}`, '');
}
/**
* Returns a pair of canonically named temporary directory paths for contracts
* and artifacts. Instrumented assets can be written & compiled to these.
* Then the unit tests can be run, consuming them as sources.
* @param {TruffleConfig} config
* @return {Object} temp paths
*/
function getTempLocations(config){
const cwd = config.workingDir;
const contractsDirName = '.coverage_contracts';
const artifactsDirName = config.temp || '.coverage_artifacts';
return {
tempContractsDir: path.join(cwd, contractsDirName),
tempArtifactsDir: path.join(cwd, artifactsDirName)
}
}
/**
* Checks for existence of contract sources, and sweeps away debris
* left over from an uncontrolled crash.
*/
function checkContext(config, tempContractsDir, tempArtifactsDir){
const ui = new PluginUI(config.logger.log);
if (!shell.test('-e', config.contractsDir)){
const msg = ui.generate('sources-fail', [config.contractsDir])
throw new Error(msg);
}
if (shell.test('-e', tempContractsDir)){
shell.rm('-Rf', tempContractsDir);
}
if (shell.test('-e', tempArtifactsDir)){
shell.rm('-Rf', tempArtifactsDir);
}
}
// =============================
// Instrumentation Set Assembly
// =============================
function assembleFiles(config, skipFiles=[]){
const targetsPath = path.join(config.contractsDir, '**', '*.sol');
const targets = shell.ls(targetsPath);
skipFiles = assembleSkipped(config, targets, skipFiles);
return assembleTargets(config, targets, skipFiles)
}
function assembleTargets(config, targets=[], skipFiles=[]){
const skipped = [];
const filtered = [];
const cd = config.contractsDir;
for (let target of targets){
if (skipFiles.includes(target)){
skipped.push({
canonicalPath: target,
relativePath: toRelativePath(target, cd),
source: loadSource(target)
})
} else {
filtered.push({
canonicalPath: target,
relativePath: toRelativePath(target, cd),
source: loadSource(target)
})
}
}
return {
skipped: skipped,
targets: filtered
}
}
/**
* Parses the skipFiles option (which also accepts folders)
*/
function assembleSkipped(config, targets, skipFiles=[]){
// Make paths absolute
skipFiles = skipFiles.map(contract => path.join(config.contractsDir, contract));
// Enumerate files in skipped folders
const skipFolders = skipFiles.filter(item => path.extname(item) !== '.sol')
for (let folder of skipFolders){
for (let target of targets ) {
if (target.indexOf(folder) === 0)
skipFiles.push(target);
}
};
return skipFiles;
}
function loadSolcoverJS(config={}){
let solcoverjs;
let coverageConfig;
let log = config.logger ? config.logger.log : console.log;
let ui = new PluginUI(log);
// Handle --solcoverjs flag
(config.solcoverjs)
? solcoverjs = path.join(config.workingDir, config.solcoverjs)
: solcoverjs = path.join(config.workingDir, '.solcover.js');
// Catch solcoverjs syntax errors
if (shell.test('-e', solcoverjs)){
try {
coverageConfig = require(solcoverjs);
} catch(error){
error.message = ui.generate('solcoverjs-fail') + error.message;
throw new Error(error)
}
// Config is optional
} else {
coverageConfig = {};
}
// Truffle writes to coverage config
coverageConfig.log = log;
coverageConfig.cwd = config.workingDir;
coverageConfig.originalContractsDir = config.contractsDir;
// Solidity-Coverage writes to Truffle config
config.mocha = config.mocha || {};
if (coverageConfig.mocha && typeof coverageConfig.mocha === 'object'){
config.mocha = Object.assign(
config.mocha,
coverageConfig.mocha
);
}
return coverageConfig;
}
// ==========================
// Finishing / Cleanup
// ==========================
/**
* Silently removes temporary folders and calls api.finish to shut server down
* @param {TruffleConfig} config
* @param {SolidityCoverage} api
* @return {Promise}
*/
async function finish(config, api){
const {
tempContractsDir,
tempArtifactsDir
} = getTempLocations(config);
shell.config.silent = true;
shell.rm('-Rf', tempContractsDir);
shell.rm('-Rf', tempArtifactsDir);
shell.config.silent = false;
if (api) await api.finish();
}
module.exports = {
assembleFiles: assembleFiles,
assembleSkipped: assembleSkipped,
assembleTargets: assembleTargets,
checkContext: checkContext,
finish: finish,
getTempLocations: getTempLocations,
loadSource: loadSource,
loadSolcoverJS: loadSolcoverJS,
reportSkipped: reportSkipped,
save: save,
toRelativePath: toRelativePath,
setupTempFolders: setupTempFolders
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

@ -0,0 +1,107 @@
const UI = require('./../../lib/ui').UI;
/**
* Truffle Plugin logging
*/
class PluginUI extends UI {
constructor(log){
super(log);
}
/**
* Writes a formatted message via log
* @param {String} kind message selector
* @param {String[]} args info to inject into template
*/
report(kind, args=[]){
const c = this.chalk;
const ct = c.bold.green('>');
const ds = c.bold.yellow('>');
const w = ":warning:";
const kinds = {
'instr-skip': `\n${c.bold('Coverage skipped for:')}` +
`\n${c.bold('=====================')}\n`,
'instr-skipped': `${ds} ${c.grey(args[0])}`,
'sol-tests': `${w} ${c.red("This plugin cannot run Truffle's native solidity tests: ")}`+
`${args[0]} test(s) will be skipped.\n`,
'id-clash': `${w} ${c.red("The 'network_id' values in your truffle network ")}` +
`${c.red("and .solcover.js are different. Using truffle's: ")} ${c.bold(args[0])}.\n`,
'port-clash': `${w} ${c.red("The 'port' values in your truffle network ")}` +
`${c.red("and .solcover.js are different. Using truffle's: ")} ${c.bold(args[0])}.\n`,
'no-port': `${w} ${c.red("No 'port' was declared in your truffle network. ")}` +
`${c.red("Using solidity-coverage's: ")} ${c.bold(args[0])}.\n`,
'lib-local': `\n${ct} ${c.grey('Using Truffle library from local node_modules.')}\n`,
'lib-global': `\n${ct} ${c.grey('Using Truffle library from global node_modules.')}\n`,
'lib-warn': `${w} ${c.red('Unable to require Truffle library locally or globally.\n')}`+
`${w} ${c.red('Using fallback Truffle library module instead (v5.0.31)')}\n` +
`${w} ${c.red('Truffle V5 must be a local dependency for fallback to work.')}\n`,
'help': `Usage: truffle run coverage [options]\n\n` +
`Options:\n` +
` --file: path (or glob) to subset of JS test files. (Quote your globs)\n` +
` --solcoverjs: relative path to .solcover.js (ex: ./../.solcover.js)\n` +
` --version: version info\n`,
'versions': `${ct} ${c.bold('truffle')}: v${args[0]}\n` +
`${ct} ${c.bold('ganache-core')}: ${args[1]}\n` +
`${ct} ${c.bold('solidity-coverage')}: v${args[2]}`,
'network': `\n${c.bold('Network Info')}` +
`\n${c.bold('============')}\n` +
`${ct} ${c.bold('id')}: ${args[1]}\n` +
`${ct} ${c.bold('port')}: ${args[2]}\n` +
`${ct} ${c.bold('network')}: ${args[0]}\n`,
}
this._write(kinds[kind]);
}
/**
* Returns a formatted message. Useful for error message.
* @param {String} kind message selector
* @param {String[]} args info to inject into template
* @return {String} message
*/
generate(kind, args=[]){
const c = this.chalk;
const x = ":x:";
const kinds = {
'sources-fail': `${c.red('Cannot locate expected contract sources folder: ')} ${args[0]}`,
'lib-fail': `${c.red('Unable to load plugin copy of Truffle library module. ')}` +
`${c.red('Try installing Truffle >= v5.0.31 locally or globally.\n')}` +
`Caught error message: ${args[0]}\n`,
'solcoverjs-fail': `${c.red('Could not load .solcover.js config file. ')}` +
`${c.red('This can happen if it has a syntax error or ')}` +
`${c.red('the path you specified for it is wrong.')}`,
'tests-fail': `${x} ${c.bold(args[0])} ${c.red('test(s) failed under coverage.')}`,
'no-network': `${c.red('Network: ')} ${args[0]} ` +
`${c.red(' is not defined in your truffle-config networks. ')}`,
}
return this._format(kinds[kind])
}
}
module.exports = PluginUI;

@ -0,0 +1,217 @@
const PluginUI = require('./truffle.ui');
const globalModules = require('global-modules');
const TruffleProvider = require('@truffle/provider');
const recursive = require('recursive-readdir');
const globby = require('globby');
const path = require('path');
// =============================
// Truffle Specific Plugin Utils
// ==============================
/**
* Returns a list of test files to pass to mocha.
* @param {Object} config truffleConfig
* @return {String[]} list of files to pass to mocha
*/
async function getTestFilePaths(config){
let target;
let ui = new PluginUI(config.logger.log);
// Handle --file <path|glob> cli option (subset of tests)
(typeof config.file === 'string')
? target = globby.sync([config.file])
: target = await recursive(config.testDir);
// Filter native solidity tests and warn that they're skipped
const solregex = /.*\.(sol)$/;
const hasSols = target.filter(f => f.match(solregex) != null);
if (hasSols.length > 0) ui.report('sol-tests', [hasSols.length]);
// Return list of test files
const testregex = /.*\.(js|ts|es|es6|jsx)$/;
return target.filter(f => f.match(testregex) != null);
}
/**
* Configures the network. Runs before the server is launched.
* User can request a network from truffle-config with "--network <name>".
* There are overlapiing options in solcoverjs (like port and providerOptions.network_id).
* Where there are mismatches user is warned & the truffle network settings are preferred.
*
* Also generates a default config & sets the default gas high / gas price low.
*
* @param {TruffleConfig} config
* @param {SolidityCoverage} api
*/
function setNetwork(config, api){
const ui = new PluginUI(config.logger.log);
// --network <network-name>
if (config.network){
const network = config.networks[config.network];
// Check network:
if (!network){
throw new Error(ui.generate('no-network', [config.network]));
}
// Check network id
if (!isNaN(parseInt(network.network_id))){
// Warn: non-matching provider options id and network id
if (api.providerOptions.network_id &&
api.providerOptions.network_id !== parseInt(network.network_id)){
ui.report('id-clash', [ parseInt(network.network_id) ]);
}
// Prefer network defined id.
api.providerOptions.network_id = parseInt(network.network_id);
} else {
network.network_id = "*";
}
// Check port: use solcoverjs || default if undefined
if (!network.port) {
ui.report('no-port', [api.port]);
network.port = api.port;
}
// Warn: port conflicts
if (api.port !== api.defaultPort && api.port !== network.port){
ui.report('port-clash', [ network.port ])
}
// Prefer network port if defined;
api.port = network.port;
network.gas = api.gasLimit;
network.gasPrice = api.gasPrice;
setOuterConfigKeys(config, api, network.network_id);
return;
}
// Default Network Configuration
config.network = 'soliditycoverage';
setOuterConfigKeys(config, api, "*");
config.networks[config.network] = {
network_id: "*",
port: api.port,
host: api.host,
gas: api.gasLimit,
gasPrice: api.gasPrice
}
}
/**
* Sets the default `from` account field in the truffle network that will be used.
* This needs to be done after accounts are fetched from the launched client.
* @param {TruffleConfig} config
* @param {Array} accounts
*/
function setNetworkFrom(config, accounts){
if (!config.networks[config.network].from){
config.networks[config.network].from = accounts[0];
}
}
// Truffle complains that these outer keys *are not* set when running plugin fn directly.
// But throws saying they *cannot* be manually set when running as truffle command.
function setOuterConfigKeys(config, api, id){
try {
config.network_id = id;
config.port = api.port;
config.host = api.host;
config.provider = TruffleProvider.create(config);
} catch (err){}
}
/**
* Tries to load truffle module library and reports source. User can force use of
* a non-local version using cli flags (see option). It's necessary to maintain
* a fail-safe lib because feature was only introduced in 5.0.30. Load order is:
*
* 1. local node_modules
* 2. global node_modules
* 3. fail-safe (truffle lib v 5.0.31 at ./plugin-assets/truffle.library)
*
* @param {Object} truffleConfig config
* @return {Module}
*/
function loadLibrary(config){
const ui = new PluginUI(config.logger.log);
// Local
try {
if (config.useGlobalTruffle || config.usePluginTruffle) throw null;
const lib = require("truffle");
ui.report('lib-local');
return lib;
} catch(err) {};
// Global
try {
if (config.usePluginTruffle) throw null;
const globalTruffle = path.join(globalModules, 'truffle');
const lib = require(globalTruffle);
ui.report('lib-global');
return lib;
} catch(err) {};
// Plugin Copy @ v 5.0.31
try {
if (config.forceLibFailure) throw null; // For err unit testing
ui.report('lib-warn');
return require("./truffle.library")
} catch(err) {
throw new Error(ui.generate('lib-fail', [err]));
};
}
/**
* Maps truffle specific keys for the paths to things like sources to the generic
* keys required by the plugin utils
* @return {Object} truffle-config.js
*/
function normalizeConfig(config){
config.workingDir = config.working_directory;
config.contractsDir = config.contracts_directory;
config.testDir = config.test_directory;
config.artifactsDir = config.build_directory;
// eth-gas-reporter freezes the in-process client because it uses sync calls
if (typeof config.mocha === "object" && config.mocha.reporter === 'eth-gas-reporter'){
config.mocha.reporter = 'spec';
delete config.mocha.reporterOptions;
}
// Truffle V4 style solc settings are honored over V5 settings. Apparently it's common
// for both to be present in the same config (as an error).
if (typeof config.solc === "object" ){
config.solc.optimizer = { enabled: false };
}
return config;
}
module.exports = {
getTestFilePaths: getTestFilePaths,
setNetwork: setNetwork,
setNetworkFrom: setNetworkFrom,
loadLibrary: loadLibrary,
normalizeConfig: normalizeConfig,
}

@ -0,0 +1,128 @@
const API = require('./../lib/api');
const utils = require('./resources/plugin.utils');
const truffleUtils = require('./resources/truffle.utils');
const PluginUI = require('./resources/truffle.ui');
const pkg = require('./../package.json');
const death = require('death');
const path = require('path');
const Web3 = require('web3');
/**
* Truffle Plugin: `truffle run coverage [options]`
* @param {Object} config @truffle/config config
* @return {Promise}
*/
async function plugin(config){
let ui;
let api;
let error;
let truffle;
let testsErrored = false;
try {
death(utils.finish.bind(null, config, api)); // Catch interrupt signals
config = truffleUtils.normalizeConfig(config);
ui = new PluginUI(config.logger.log);
if(config.help) return ui.report('help'); // Exit if --help
truffle = truffleUtils.loadLibrary(config);
api = new API(utils.loadSolcoverJS(config));
truffleUtils.setNetwork(config, api);
// Server launch
const client = api.client || truffle.ganache;
const address = await api.ganache(client);
const web3 = new Web3(address);
const accounts = await web3.eth.getAccounts();
const nodeInfo = await web3.eth.getNodeInfo();
const ganacheVersion = nodeInfo.split('/')[1];
truffleUtils.setNetworkFrom(config, accounts);
// Version Info
ui.report('versions', [
truffle.version,
ganacheVersion,
pkg.version
]);
// Exit if --version
if (config.version) return await utils.finish(config, api);
ui.report('network', [
config.network,
config.networks[config.network].network_id,
config.networks[config.network].port
]);
// Run post-launch server hook;
await api.onServerReady(config);
// Instrument
const skipFiles = api.skipFiles || [];
skipFiles.push('Migrations.sol');
let {
targets,
skipped
} = utils.assembleFiles(config, skipFiles);
targets = api.instrument(targets);
utils.reportSkipped(config, skipped);
// Filesystem & Compiler Re-configuration
const {
tempArtifactsDir,
tempContractsDir
} = utils.getTempLocations(config);
utils.setupTempFolders(config, tempContractsDir, tempArtifactsDir)
utils.save(targets, config.contracts_directory, tempContractsDir);
utils.save(skipped, config.contracts_directory, tempContractsDir);
config.contracts_directory = tempContractsDir;
config.build_directory = tempArtifactsDir;
config.contracts_build_directory = path.join(
tempArtifactsDir,
path.basename(config.contracts_build_directory)
);
config.all = true;
config.test_files = await truffleUtils.getTestFilePaths(config);
config.compilers.solc.settings.optimizer.enabled = false;
// Compile Instrumented Contracts
await truffle.contracts.compile(config);
await api.onCompileComplete(config);
// Run tests
try {
failures = await truffle.test.run(config)
} catch (e) {
error = e.stack;
}
await api.onTestsComplete(config);
// Run Istanbul
await api.report();
await api.onIstanbulComplete(config);
} catch(e){
error = e;
}
// Finish
await utils.finish(config, api);
if (error !== undefined) throw error;
if (failures > 0) throw new Error(ui.generate('tests-fail', [failures]));
}
module.exports = plugin;

@ -0,0 +1,88 @@
#!/usr/bin/env bash
#
# E2E CI: installs PR candidate on sc-forks/buidler-e2e (a simple example,
# similar to Metacoin) and runs coverage
#
set -o errexit
function verifyCoverageExists {
if [ ! -d "coverage" ]; then
echo "ERROR: no coverage folder was created."
exit 1
fi
}
# Get rid of any caches
sudo rm -rf node_modules
echo "NVM CURRENT >>>>>" && nvm current
# Use PR env variables (for forks) or fallback on local if PR not available
SED_REGEX="s/git@github.com:/https:\/\/github.com\//"
if [[ -v CIRCLE_PR_REPONAME ]]; then
PR_PATH="https://github.com/$CIRCLE_PR_USERNAME/$CIRCLE_PR_REPONAME#$CIRCLE_SHA1"
else
PR_PATH=$(echo "$CIRCLE_REPOSITORY_URL#$CIRCLE_SHA1" | sudo sed "$SED_REGEX")
fi
echo "PR_PATH >>>>> $PR_PATH"
echo ""
echo ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"
echo "Simple buidler/buidler-trufflev5 "
echo ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"
echo ""
# Install buidler-e2e
git clone https://github.com/sc-forks/buidler-e2e.git
cd buidler-e2e
npm install
# Install and run solidity-coverage @ PR
npm install --save-dev $PR_PATH
cat package.json
npx buidler coverage
verifyCoverageExists
# Install buidler-ethers
echo ""
echo ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"
echo "Simple buidler/buidler-ethers "
echo ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"
echo ""
cd ..
git clone https://github.com/sc-forks/example-buidler-ethers.git
cd example-buidler-ethers
npm install
# Install and run solidity-coverage @ PR
npm install --save-dev $PR_PATH
cat package.json
npx buidler coverage
verifyCoverageExists
echo ""
echo ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"
echo "Complex: MolochDao/moloch "
echo ">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"
echo ""
# Install sc-forks/moloch
cd ..
git clone https://github.com/sc-forks/moloch.git
cd moloch
npm install
npm uninstall --save-dev solidity-coverage
# Install and run solidity-coverage @ PR
# Should run on network 'localhost'
npm install --save-dev $PR_PATH
npm run coverage
verifyCoverageExists

@ -2,9 +2,13 @@
#
# E2E CI: installs PR candidate on Truffle's MetaCoin and runs coverage
#
# Also verifies that everything works w/ truffle installed globally.
#
set -o errexit
# Get rid of any caches
sudo rm -rf node_modules
rm -rf node_modules
echo "NVM CURRENT >>>>>" && nvm current
# Use PR env variables (for forks) or fallback on local if PR not available
@ -13,19 +17,39 @@ SED_REGEX="s/git@github.com:/https:\/\/github.com\//"
if [[ -v CIRCLE_PR_REPONAME ]]; then
PR_PATH="https://github.com/$CIRCLE_PR_USERNAME/$CIRCLE_PR_REPONAME#$CIRCLE_SHA1"
else
PR_PATH=$(echo "$CIRCLE_REPOSITORY_URL#$CIRCLE_SHA1" | sudo sed "$SED_REGEX")
PR_PATH=$(echo "$CIRCLE_REPOSITORY_URL#$CIRCLE_SHA1" | sed "$SED_REGEX")
fi
echo "PR_PATH >>>>> $PR_PATH"
# Install truffle and metacoin box
npm install -g yarn
npm install -g truffle
mkdir metacoin
cd metacoin
truffle unbox metacoin --force
rm test/TestMetacoin.sol
npm init --yes
# Install config with plugin
rm truffle-config.js
echo "module.exports={plugins:['solidity-coverage']}" > truffle-config.js
cat truffle-config.js
# Install and run solidity-coverage @ PR
npm install --save-dev $PR_PATH
npx solidity-coverage
npm init --yes
yarn add $PR_PATH --dev
yarn add truffle --dev
# require("truffle") not working on global install in Circle's Windows env
if [ "$CIRCLE_JOB" == "e2e-metacoin-windows" ]; then
yarn add truffle --dev
fi
npx truffle run coverage
# Test that coverage/ was generated
if [ ! -d "coverage" ]; then
echo "ERROR: no coverage folder was created."
exit 1
fi

@ -3,6 +3,8 @@
# E2E CI: installs PR candidate on openzeppelin-solidity and runs coverage
#
set -o errexit
# Get rid of any caches
sudo rm -rf node_modules
echo "NVM CURRENT >>>>>" && nvm current
@ -18,24 +20,26 @@ fi
echo "PR_PATH >>>>> $PR_PATH"
# Install Zeppelin
git clone https://github.com/OpenZeppelin/openzeppelin-solidity.git
cd openzeppelin-solidity
npm install -g yarn;
# Install sc-forks Zeppelin fork (temporarily). It's setup to
# consume the plugin and skips a small set of GSN tests that rely on
# the client being stand-alone. (See OZ issue #1918 for discussion)
git clone https://github.com/sc-forks/openzeppelin-contracts.git
cd openzeppelin-contracts
# Update Zeppelin's script to use 0.6.x
sed -i 's/if/# /g' scripts/coverage.sh
sed -i 's/curl/# /g' scripts/coverage.sh
sed -i 's/fi/# /g' scripts/coverage.sh
sed -i 's/npx oz compile/npx oz compile --no-interactive/g' scripts/compile.sh
echo ">>>>> checkout provider-benchmarks branch"
git checkout provider-benchmarks
# Swap installed coverage for PR branch version
echo ">>>>> npm install"
npm install
echo ">>>>> yarn install"
yarn install
echo ">>>>> npm uninstall --save-dev solidity-coverage"
npm uninstall --save-dev solidity-coverage
echo ">>>>> yarn remove --dev solidity-coverage"
yarn remove solidity-coverage --dev
echo ">>>>> npm install --save-dev PR_PATH"
npm install --save-dev "$PR_PATH"
echo ">>>>> yarn add -dev $PR_PATH"
yarn add "$PR_PATH" --dev
npm run coverage
# Track perf
time npx truffle run coverage --network development

@ -1,468 +0,0 @@
/* eslint-env node, mocha */
const assert = require('assert');
const shell = require('shelljs');
const fs = require('fs');
const childprocess = require('child_process');
const mock = require('./util/mockTruffle.js');
// shell.test alias for legibility
function pathExists(path) { return shell.test('-e', path); }
// tests run out of memory in CI without this
function collectGarbage() {
if (global.gc) { global.gc(); }
}
describe('app', () => {
let testrpcProcess = null;
const script = 'node ./bin/exec.js';
const port = 8555;
const config = {
dir: './mock',
port,
testing: true,
silent: true, // <-- Set to false to debug tests
norpc: true,
};
before(done => {
const command = `npx testrpc-sc --gasLimit 0xfffffffffff --port ${port}`;
testrpcProcess = childprocess.exec(command);
testrpcProcess.stdout.on('data', data => {
if (data.includes('Listening')) {
done();
}
});
});
afterEach(() => {
mock.remove();
});
after(() => {
testrpcProcess.kill();
});
// #1: The 'config' tests ask exec.js to run testrpc on special ports, the subsequent tests use
// the testrpc launched in the before() block. For some reason config tests fail randomly
// unless they are at the top of the suite. Hard to debug since they pass if logging is turned
// on - there might be a timing issue around resource cleanup or something.
//
// #2: Creating repeated instances of testrpc hits the container memory limit on
// CI so these tests are disabled for that context
it('config with testrpc options string: should generate coverage, cleanup & exit(0)', () => {
if (!process.env.CI) {
const privateKey = '0x3af46c9ac38ee1f01b05f9915080133f644bf57443f504d339082cb5285ccae4';
const balance = '0xfffffffffffffff';
const testConfig = Object.assign({}, config);
testConfig.testrpcOptions = `--account="${privateKey},${balance}" --port 8777`;
testConfig.dir = './mock';
testConfig.norpc = false;
testConfig.port = 8777;
// Installed test will process.exit(1) and crash truffle if the test isn't
// loaded with the account specified above
mock.install('Simple.sol', 'testrpc-options.js', testConfig);
shell.exec(script);
assert(shell.error() === null, 'script should not error');
collectGarbage();
}
});
it('config with test command options string: should run test', () => {
if (!process.env.CI) {
assert(pathExists('./allFiredEvents') === false, 'should start without: events log');
const testConfig = Object.assign({}, config);
testConfig.testCommand = 'mocha --timeout 5000';
testConfig.dir = './mock';
testConfig.norpc = false;
testConfig.port = 8888;
// Installed test will write a fake allFiredEvents to ./ after 4000ms
// allowing test to pass
mock.install('Simple.sol', 'command-options.js', testConfig);
shell.exec(script);
assert(shell.error() === null, 'script should not error');
collectGarbage();
}
});
it('config racing test command: should run test after testrpc has started', () => {
if (!process.env.CI) {
assert(pathExists('./allFiredEvents') === false, 'should start without: events log');
const testConfig = Object.assign({}, config);
testConfig.testCommand = 'node ../test/util/mockTestCommand.js';
testConfig.dir = './mock';
testConfig.norpc = false;
testConfig.port = 8888;
// Installed test will write a fake allFiredEvents to ./ after 4000ms
// allowing test to pass
mock.install('Simple.sol', 'command-options.js', testConfig);
shell.exec(script);
assert(shell.error() === null, 'script should not error');
collectGarbage();
}
});
it('contract tests events: tests should pass without errors', () => {
if (!process.env.CI) {
assert(pathExists('./coverage') === false, 'should start without: coverage');
assert(pathExists('./coverage.json') === false, 'should start without: coverage.json');
const testConfig = Object.assign({}, config);
testConfig.dir = './mock';
testConfig.norpc = false;
testConfig.port = 8889;
mock.install('Events.sol', 'events.js', testConfig);
shell.exec(script);
assert(shell.error() === null, 'script should not error');
// Directory should have coverage report
assert(pathExists('./coverage') === true, 'script should gen coverage folder');
assert(pathExists('./coverage.json') === true, 'script should gen coverage.json');
// Coverage should be real.
// This test is tightly bound to the function names in Simple.sol
const produced = JSON.parse(fs.readFileSync('./coverage.json', 'utf8'));
const path = Object.keys(produced)[0];
assert(produced[path].fnMap['1'].name === 'test', 'coverage.json should map "test"');
assert(produced[path].fnMap['2'].name === 'getX', 'coverage.json should map "getX"');
collectGarbage();
}
});
it('trufflejs specifies coverage network: should generate coverage, cleanup and exit(0)', () => {
if (!process.env.CI) {
const trufflejs =
`module.exports = {
networks: {
development: {
host: "localhost",
port: 8545,
network_id: "*"
},
coverage: {
host: "localhost",
port: 8999,
network_id: "*",
gas: 0xfffffffffff,
gasPrice: 0x01
}
},
compilers: {
solc: {
version: "0.5.3",
}
}
};`;
const testConfig = Object.assign({}, config);
testConfig.dir = './mock';
testConfig.norpc = false;
testConfig.port = 8555; // Manually inspect that port is actually set to 8999
// Directory should be clean
assert(pathExists('./coverage') === false, 'should start without: coverage');
assert(pathExists('./coverage.json') === false, 'should start without: coverage.json');
// Run script (exits 0);
mock.install('Simple.sol', 'simple.js', testConfig, trufflejs);
shell.exec(script);
assert(shell.error() === null, 'script should not error');
// Directory should have coverage report
assert(pathExists('./coverage') === true, 'script should gen coverage folder');
assert(pathExists('./coverage.json') === true, 'script should gen coverage.json');
// Coverage should be real.
// This test is tightly bound to the function names in Simple.sol
const produced = JSON.parse(fs.readFileSync('./coverage.json', 'utf8'));
const path = Object.keys(produced)[0];
assert(produced[path].fnMap['1'].name === 'test', 'coverage.json should map "test"');
assert(produced[path].fnMap['2'].name === 'getX', 'coverage.json should map "getX"');
collectGarbage();
}
});
it('large contract w/ many unbracketed statements (Oraclize)', () => {
const trufflejs =
`module.exports = {
networks: {
coverage: {
host: "localhost",
network_id: "*",
port: 8555,
gas: 0xfffffffffff,
gasPrice: 0x01
},
},
compilers: {
solc: {
version: "0.4.24",
}
}
};`;
// Directory should be clean
assert(pathExists('./coverage') === false, 'should start without: coverage');
assert(pathExists('./coverage.json') === false, 'should start without: coverage.json');
// Run script (exits 0);
mock.install('Oraclize.sol', 'oraclize.js', config, trufflejs, null, true);
shell.exec(script);
assert(shell.error() === null, 'script should not error');
});
it('simple contract: should generate coverage, cleanup & exit(0)', () => {
// Directory should be clean
assert(pathExists('./coverage') === false, 'should start without: coverage');
assert(pathExists('./coverage.json') === false, 'should start without: coverage.json');
// Run script (exits 0);
mock.install('Simple.sol', 'simple.js', config);
shell.exec(script);
assert(shell.error() === null, 'script should not error');
// Directory should have coverage report
assert(pathExists('./coverage') === true, 'script should gen coverage folder');
assert(pathExists('./coverage.json') === true, 'script should gen coverage.json');
// Coverage should be real.
// This test is tightly bound to the function names in Simple.sol
const produced = JSON.parse(fs.readFileSync('./coverage.json', 'utf8'));
const path = Object.keys(produced)[0];
assert(produced[path].fnMap['1'].name === 'test', 'coverage.json should map "test"');
assert(produced[path].fnMap['2'].name === 'getX', 'coverage.json should map "getX"');
collectGarbage();
});
it('project uses truffle-config.js: should generate coverage, cleanup and exit(0)', () => {
// Directory should be clean
assert(pathExists('./coverage') === false, 'should start without: coverage');
assert(pathExists('./coverage.json') === false, 'should start without: coverage.json');
// Run script (exits 0);
mock.install('Simple.sol', 'simple.js', config, null, 'truffle-config.js');
shell.exec(script);
assert(shell.error() === null, 'script should not error');
// Directory should have coverage report
assert(pathExists('./coverage') === true, 'script should gen coverage folder');
assert(pathExists('./coverage.json') === true, 'script should gen coverage.json');
// Coverage should be real.
// This test is tightly bound to the function names in Simple.sol
const produced = JSON.parse(fs.readFileSync('./coverage.json', 'utf8'));
const path = Object.keys(produced)[0];
assert(produced[path].fnMap['1'].name === 'test', 'coverage.json should map "test"');
assert(produced[path].fnMap['2'].name === 'getX', 'coverage.json should map "getX"');
collectGarbage();
});
it('testrpc-sc signs and recovers messages correctly', () => {
// sign.js signs and recovers
mock.install('Simple.sol', 'sign.js', config);
shell.exec(script);
assert(shell.error() === null, 'script should not error');
collectGarbage();
});
it('tests use pure and view modifiers, including with libraries', () => {
// Directory should be clean
assert(pathExists('./coverage') === false, 'should start without: coverage');
assert(pathExists('./coverage.json') === false, 'should start without: coverage.json');
// Run script (exits 0);
mock.installLibraryTest(config);
shell.exec(script);
assert(shell.error() === null, 'script should not error');
// Directory should have coverage report
assert(pathExists('./coverage') === true, 'script should gen coverage folder');
assert(pathExists('./coverage.json') === true, 'script should gen coverage.json');
// Coverage should be real.
// This test is tightly bound to the function names in TotallyPure.sol
const produced = JSON.parse(fs.readFileSync('./coverage.json', 'utf8'));
const path = Object.keys(produced)[0];
assert(produced[path].fnMap['1'].name === 'usesThem', 'coverage.json should map "usesThem"');
assert(produced[path].fnMap['2'].name === 'isPure', 'coverage.json should map "getX"');
collectGarbage();
});
it('tests require assets outside of test folder: should generate coverage, cleanup & exit(0)', () => {
// Directory should be clean
assert(pathExists('./coverage') === false, 'should start without: coverage');
assert(pathExists('./coverage.json') === false, 'should start without: coverage.json');
// Run script (exits 0);
mock.install('Simple.sol', 'requires-externally.js', config);
shell.exec(script);
assert(shell.error() === null, 'script should not error');
// Directory should have coverage report
assert(pathExists('./coverage') === true, 'script should gen coverage folder');
assert(pathExists('./coverage.json') === true, 'script should gen coverage.json');
// Coverage should be real.
// This test is tightly bound to the function names in Simple.sol
const produced = JSON.parse(fs.readFileSync('./coverage.json', 'utf8'));
const path = Object.keys(produced)[0];
assert(produced[path].fnMap['1'].name === 'test', 'coverage.json should map "test"');
assert(produced[path].fnMap['2'].name === 'getX', 'coverage.json should map "getX"');
collectGarbage();
});
it('contract only uses .call: should generate coverage, cleanup & exit(0)', () => {
// Run against contract that only uses method.call.
assert(pathExists('./coverage') === false, 'should start without: coverage');
assert(pathExists('./coverage.json') === false, 'should start without: coverage.json');
mock.install('OnlyCall.sol', 'only-call.js', config);
shell.exec(script);
assert(shell.error() === null, 'script should not error');
assert(pathExists('./coverage') === true, 'script should gen coverage folder');
assert(pathExists('./coverage.json') === true, 'script should gen coverage.json');
const produced = JSON.parse(fs.readFileSync('./coverage.json', 'utf8'));
const path = Object.keys(produced)[0];
assert(produced[path].fnMap['1'].name === 'addTwo', 'coverage.json should map "addTwo"');
collectGarbage();
});
it('contract sends / transfers to instrumented fallback: coverage, cleanup & exit(0)', () => {
// Skipped due to https://github.com/sc-forks/solidity-coverage/issues/106
// Validate ethereumjs-vm hack to remove gas constraints on transfer() and send()
assert(pathExists('./coverage') === false, 'should start without: coverage');
assert(pathExists('./coverage.json') === false, 'should start without: coverage.json');
mock.install('Wallet.sol', 'wallet.js', config);
shell.exec(script);
assert(shell.error() === null, 'script should not error');
assert(pathExists('./coverage') === true, 'script should gen coverage folder');
assert(pathExists('./coverage.json') === true, 'script should gen coverage.json');
const produced = JSON.parse(fs.readFileSync('./coverage.json', 'utf8'));
const path = Object.keys(produced)[0];
assert(produced[path].fnMap['1'].name === 'transferPayment', 'should map "transferPayment"');
collectGarbage();
});
it('contract uses inheritance: should generate coverage, cleanup & exit(0)', () => {
// Run against a contract that 'is' another contract
assert(pathExists('./coverage') === false, 'should start without: coverage');
assert(pathExists('./coverage.json') === false, 'should start without: coverage.json');
mock.installInheritanceTest(config);
shell.exec(script);
assert(shell.error() === null, 'script should not error');
assert(pathExists('./coverage') === true, 'script should gen coverage folder');
assert(pathExists('./coverage.json') === true, 'script should gen coverage.json');
const produced = JSON.parse(fs.readFileSync('./coverage.json', 'utf8'));
const ownedPath = Object.keys(produced)[0];
const proxyPath = Object.keys(produced)[1];
assert(produced[ownedPath].fnMap['1'].name === 'constructor', 'coverage.json should map "constructor"');
assert(produced[proxyPath].fnMap['1'].name === 'isOwner', 'coverage.json should map "isOwner"');
collectGarbage();
});
it('contracts are skipped: should generate coverage, cleanup & exit(0)', () => {
// Skip instrumentation of some contracts
assert(pathExists('./coverage') === false, 'should start without: coverage');
assert(pathExists('./coverage.json') === false, 'should start without: coverage.json');
const testConfig = Object.assign({}, config);
testConfig.skipFiles = ['Owned.sol'];
mock.installInheritanceTest(testConfig);
shell.exec(script);
assert(shell.error() === null, 'script should not error');
assert(pathExists('./coverage') === true, 'script should gen coverage folder');
assert(pathExists('./coverage.json') === true, 'script should gen coverage.json');
const produced = JSON.parse(fs.readFileSync('./coverage.json', 'utf8'));
const firstKey = Object.keys(produced)[0];
assert(Object.keys(produced).length === 1, 'coverage.json should only contain instrumentation for one contract');
assert(firstKey.substr(firstKey.length - 9) === 'Proxy.sol', 'coverage.json should only contain instrumentation for Proxy.sol');
collectGarbage();
});
it('truffle tests failing: should generate coverage, cleanup & exit(1)', () => {
assert(pathExists('./coverage') === false, 'should start without: coverage');
assert(pathExists('./coverage.json') === false, 'should start without: coverage.json');
// Run with Simple.sol and a failing assertion in a truffle test
mock.install('Simple.sol', 'truffle-test-fail.js', config);
shell.exec(script);
assert(shell.error() !== null, 'script should exit 1');
assert(pathExists('./coverage') === true, 'script should gen coverage folder');
assert(pathExists('./coverage.json') === true, 'script should gen coverage.json');
const produced = JSON.parse(fs.readFileSync('./coverage.json', 'utf8'));
const path = Object.keys(produced)[0];
assert(produced[path].fnMap['1'].name === 'test', 'coverage.json should map "test"');
assert(produced[path].fnMap['2'].name === 'getX', 'coverage.json should map "getX"');
collectGarbage();
});
it('deployment cost > block gasLimit: should generate coverage, cleanup & exit(0)', () => {
// Just making sure Expensive.sol compiles and deploys here.
mock.install('Expensive.sol', 'block-gas-limit.js', config);
shell.exec(script);
assert(shell.error() === null, 'script should not error');
collectGarbage();
});
it('truffle crashes: should generate NO coverage, cleanup and exit(1)', () => {
assert(pathExists('./coverage') === false, 'should start without: coverage');
assert(pathExists('./coverage.json') === false, 'should start without: coverage.json');
// Run with Simple.sol and a syntax error in the truffle test
mock.install('Simple.sol', 'truffle-crash.js', config);
shell.exec(script);
assert(shell.error() !== null, 'script should error');
assert(pathExists('./coverage') !== true, 'script should NOT gen coverage folder');
assert(pathExists('./coverage.json') !== true, 'script should NOT gen coverage.json');
collectGarbage();
});
it('instrumentation errors: should generate NO coverage, cleanup and exit(1)', () => {
assert(pathExists('./coverage') === false, 'should start without: coverage');
assert(pathExists('./coverage.json') === false, 'should start without: coverage.json');
// Run with SimpleError.sol (has syntax error) and working truffle test
mock.install('SimpleError.sol', 'simple.js', config);
shell.exec(script);
assert(shell.error() !== null, 'script should error');
assert(pathExists('./coverage') !== true, 'script should NOT gen coverage folder');
assert(pathExists('./coverage.json') !== true, 'script should NOT gen coverage.json');
collectGarbage();
});
it('no events log produced: should generate NO coverage, cleanup and exit(1)', () => {
// Run contract and test that pass but fire no events
assert(pathExists('./coverage') === false, 'should start without: coverage');
assert(pathExists('./coverage.json') === false, 'should start without: coverage.json');
mock.install('Empty.sol', 'empty.js', config);
shell.exec(script);
assert(shell.error() !== null, 'script should error');
assert(pathExists('./coverage') !== true, 'script should NOT gen coverage folder');
assert(pathExists('./coverage.json') !== true, 'script should NOT gen coverage.json');
collectGarbage();
});
});

@ -1,30 +0,0 @@
/* eslint-env node, mocha */
const solc = require('solc');
const getInstrumentedVersion = require('./../lib/instrumentSolidity.js');
const util = require('./util/util.js');
const path = require('path');
/**
* NB: passing '1' to solc as an option activates the optimiser
* NB: solc will throw if there is a compilation error, causing the test to fail
* and passing the error to mocha.
*/
describe('generic expressions', () => {
const filePath = path.resolve('./test.sol');
it('should compile after instrumenting an assembly function with spaces in parameters', () => {
const contract = util.getCode('assembly/spaces-in-function.sol');
const info = getInstrumentedVersion(contract, filePath);
const output = JSON.parse(solc.compile(util.codeToCompilerInput(info.contract)));
util.report(output.errors);
});
it('should compile after instrumenting an assembly if statement', () => {
const contract = util.getCode('assembly/if.sol');
const info = getInstrumentedVersion(contract, filePath);
const output = JSON.parse(solc.compile(util.codeToCompilerInput(info.contract)));
util.report(output.errors);
});
});

@ -1,109 +0,0 @@
/* eslint-env node, mocha */
const path = require('path');
const getInstrumentedVersion = require('./../lib/instrumentSolidity.js');
const util = require('./util/util.js');
const CoverageMap = require('./../lib/coverageMap');
const vm = require('./util/vm');
const assert = require('assert');
describe('asserts and requires', () => {
const filePath = path.resolve('./test.sol');
const pathPrefix = './';
it('should cover assert statements as if they are if statements when they pass', done => {
const contract = util.getCode('assert/Assert.sol');
const info = getInstrumentedVersion(contract, filePath);
const coverage = new CoverageMap();
coverage.addContract(info, filePath);
vm.execute(info.contract, 'a', [true]).then(events => {
const mapping = coverage.generate(events, pathPrefix);
assert.deepEqual(mapping[filePath].l, {
5: 1,
});
assert.deepEqual(mapping[filePath].b, {
1: [1, 0],
});
assert.deepEqual(mapping[filePath].s, {
1: 1,
});
assert.deepEqual(mapping[filePath].f, {
1: 1,
});
done();
}).catch(done);
});
it('should cover assert statements as if they are if statements when they fail', done => {
const contract = util.getCode('assert/Assert.sol');
const info = getInstrumentedVersion(contract, filePath);
const coverage = new CoverageMap();
coverage.addContract(info, filePath);
vm.execute(info.contract, 'a', [false]).then(events => {
const mapping = coverage.generate(events, pathPrefix);
assert.deepEqual(mapping[filePath].l, {
5: 1,
});
assert.deepEqual(mapping[filePath].b, {
1: [0, 1],
});
assert.deepEqual(mapping[filePath].s, {
1: 1,
});
assert.deepEqual(mapping[filePath].f, {
1: 1,
});
done();
}).catch(done);
});
it('should cover multi-line require statements as if they are if statements when they pass', done => {
const contract = util.getCode('assert/RequireMultiline.sol');
const info = getInstrumentedVersion(contract, filePath);
const coverage = new CoverageMap();
coverage.addContract(info, filePath);
vm.execute(info.contract, 'a', [true, true, true]).then(events => {
const mapping = coverage.generate(events, pathPrefix);
assert.deepEqual(mapping[filePath].l, {
5: 1,
});
assert.deepEqual(mapping[filePath].b, {
1: [1, 0],
});
assert.deepEqual(mapping[filePath].s, {
1: 1,
});
assert.deepEqual(mapping[filePath].f, {
1: 1,
});
done();
}).catch(done);
});
it('should cover multi-line require statements as if they are if statements when they fail', done => {
const contract = util.getCode('assert/RequireMultiline.sol');
const info = getInstrumentedVersion(contract, filePath);
const coverage = new CoverageMap();
coverage.addContract(info, filePath);
vm.execute(info.contract, 'a', [true, true, false]).then(events => {
const mapping = coverage.generate(events, pathPrefix);
assert.deepEqual(mapping[filePath].l, {
5: 1,
});
assert.deepEqual(mapping[filePath].b, {
1: [0, 1],
});
assert.deepEqual(mapping[filePath].s, {
1: 1,
});
assert.deepEqual(mapping[filePath].f, {
1: 1,
});
done();
}).catch(done);
});
});

@ -1,9 +0,0 @@
/* eslint-env node, mocha */
/* global artifacts, contract */
const Expensive = artifacts.require('./Expensive.sol');
contract('Expensive', () => {
it('should deploy', async () => {
const instance = await Expensive.new()
});
});

@ -1,24 +0,0 @@
/* eslint-env node, mocha */
const assert = require('assert');
const fs = require('fs');
// Fake event for Simple.sol
const fakeEvent = {"address":"6d6cf716c2a7672047e15a255d4c9624db60f215","topics":["34b35f4b1a8c3eb2caa69f05fb5aadc827cedd2d8eb3bb3623b6c4bba3baec17"],"data":"00000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000003a2f55736572732f757365722f53697465732f73632d666f726b732f6d657461636f696e2f636f6e7472616374732f4d657461436f696e2e736f6c000000000000"}
/* {
address: '7c548f8a5ba3a37774440587743bb50f58c7e91c',
topics: ['1accf53d733f86cbefdf38d52682bc905cf6715eb3d860be0b5b052e58b0741d'],
data: '0',
};*/
// Tests whether or not the testCommand option is invoked by exec.js
// Mocha's default timeout is 2000 - here we fake the creation of
// allFiredEvents at 4000.
describe('Test uses mocha', () => {
it('should run "mocha --timeout 5000" successfully', done => {
setTimeout(() => {
fs.writeFileSync('./../allFiredEvents', JSON.stringify(fakeEvent) + '\n');
done();
}, 4000);
});
});

@ -1,8 +0,0 @@
/* eslint-env node, mocha */
/* global artifacts, contract */
const Empty = artifacts.require('./Empty.sol');
contract('Empty', () => {
it('should deploy', () => Empty.deployed());
});

@ -1,21 +0,0 @@
/* eslint-env node, mocha */
/* global artifacts, contract, assert */
const Events = artifacts.require('./Events.sol');
contract('Events', accounts => {
it('logs events correctly', done => {
const loggedEvents = [];
Events.deployed().then(instance => {
const allEvents = instance.allEvents();
allEvents.on("data", event => { loggedEvents.push(event); });
instance.test(5).then(() => {
const bad = loggedEvents.filter(e => e.event !== 'LogEventOne' && e.event !== 'LogEventTwo');
assert(bad.length === 0, 'Did not filter events correctly');
done();
});
});
});
});

@ -1,14 +0,0 @@
/* eslint-env node, mocha */
/* global artifacts, contract, assert */
const Owned = artifacts.require('./Owned.sol');
const Proxy = artifacts.require('./Proxy.sol');
contract('Proxy', accounts => {
it('Should compile and run when one contract inherits from another', () => Owned.deployed()
.then(() => Proxy.deployed())
.then(instance => instance.isOwner.call({
from: accounts[0],
}))
.then(val => assert.equal(val, true)));
});

@ -1,17 +0,0 @@
/* eslint-env node, mocha */
/* global artifacts, contract, assert */
const OnlyCall = artifacts.require('./OnlyCall.sol');
contract('OnlyCall', accounts => {
it('should return val + 2', done => {
OnlyCall.deployed().then(instance => {
instance.addTwo.call(5, {
from: accounts[0],
}).then(val => {
assert.equal(val, 7);
done();
});
});
});
});

@ -1,10 +0,0 @@
/* eslint-env node, mocha */
/* global artifacts, contract, assert */
const usingOraclize = artifacts.require('usingOraclize');
contract('Nothing', () => {
it('nothing', async () => {
const ora = await usingOraclize.new();
await ora.test();
});
});

@ -1,17 +0,0 @@
/* eslint-env node, mocha */
/* global artifacts, contract, assert */
const asset = require('../assets/asset.js');
const Simple = artifacts.require('./Simple.sol');
contract('Simple', () => {
it('should be able to require an external asset', () => {
let simple;
return Simple.deployed().then(instance => {
simple = instance;
assert.equal(asset.value, true);
return simple.test(5); // Make sure we generate an event;
});
});
});

@ -1,31 +0,0 @@
/* eslint-env node, mocha */
/* global artifacts, contract, assert */
const ethUtil = require('ethereumjs-util');
const Simple = artifacts.require('./Simple.sol');
contract('Simple', accounts => {
it('should set x to 5', () => {
let simple;
let messageSha3;
return Simple.deployed()
.then(instance => instance.test(5)) // We need this line to generate some coverage
.then(() => {
const message = 'Enclosed is my formal application for permanent residency in New Zealand';
messageSha3 = web3.utils.sha3(message);
const signature = web3.eth.sign(messageSha3, accounts[0]);
return signature;
})
.then((signature) => {
const messageBuffer = new Buffer(messageSha3.replace('0x', ''), 'hex');
const messagePersonalHash = ethUtil.hashPersonalMessage(messageBuffer);
const sigParams = ethUtil.fromRpcSig(signature);
const publicKey = ethUtil.ecrecover(messagePersonalHash, sigParams.v, sigParams.r, sigParams.s);
const senderBuffer = ethUtil.pubToAddress(publicKey);
const sender = ethUtil.bufferToHex(senderBuffer);
assert.equal(sender, accounts[0].toLowerCase());
});
});
});

@ -1,16 +0,0 @@
/* eslint-env node, mocha */
/* global artifacts, contract, assert */
const Simple = artifacts.require('./Simple.sol');
contract('Simple', () => {
it('should set x to 5', () => {
let simple;
return Simple.deployed().then(instance => {
simple = instance;
return simple.test(5);
})
.then(() => simple.getX.call())
.then(val => assert.equal(val.toNumber(), 5));
});
});

@ -1,17 +0,0 @@
/* eslint-env node, mocha */
/* global artifacts, contract, assert */
const Simple = artifacts.require('./Simple.sol');
// This test is constructed correctly but the SimpleError.sol has a syntax error
contract('SimpleError', () => {
it('should set x to 5', () => {
let simple;
return Simple.deployed().then(instance => {
simple = instance;
return simple.test(5);
})
.then(() => simple.getX.call())
.then(val => assert.equal(val, 5));
});
});

@ -1,22 +0,0 @@
/* eslint-env node, mocha */
/* global artifacts, contract, assert */
const Simple = artifacts.require('./Simple.sol');
contract('Simple', accounts => {
// Crash truffle if the account loaded in the options string isn't found here.
it('should load with expected account', () => {
assert(accounts[0] === '0xA4860CEDd5143Bd63F347CaB453Bf91425f8404f');
});
// Generate some coverage so the script doesn't exit(1) because there are no events
it('should set x to 5', () => {
let simple;
return Simple.deployed().then(instance => {
simple = instance;
return simple.test(5);
})
.then(() => simple.getX.call())
.then(val => assert.equal(val.toNumber(), 5));
});
});

@ -1,16 +0,0 @@
/* eslint-env node, mocha */
/* global artifacts, contract, assert */
const Simple = artifacts.require('./Simple.sol');
contract('Simple', () => {
it('should set x to 5', () => {
let simple;
return Simple.deployed().then(instance => {
simple = instance;
return simple.test(5);
})
.then(() => simple.getX.call())
.then(val => assert.equal(val.toNumber(), 4)); // <-- Wrong result: test fails
});
});

@ -1,38 +0,0 @@
/* eslint-env node, mocha */
const path = require('path');
const getInstrumentedVersion = require('./../lib/instrumentSolidity.js');
const util = require('./util/util.js');
const solc = require('solc');
const assert = require('assert');
describe('comments', () => {
const filePath = path.resolve('./test.sol');
const pathPrefix = './';
it('should cover functions even if comments are present immediately after the opening {', () => {
const contract = util.getCode('comments/postFunctionDeclarationComment.sol');
const info = getInstrumentedVersion(contract, filePath);
const output = JSON.parse(solc.compile(util.codeToCompilerInput(info.contract)));
util.report(output.errors);
});
it('should cover lines even if comments are present', () => {
const contract = util.getCode('comments/postLineComment.sol');
const info = getInstrumentedVersion(contract, filePath);
const output = JSON.parse(solc.compile(util.codeToCompilerInput(info.contract)));
assert.deepEqual([6, 5], info.runnableLines);
util.report(output.errors);
});
it('should cover contracts even if comments are present', () => {
const contract = util.getCode('comments/postContractComment.sol');
const info = getInstrumentedVersion(contract, filePath);
const output = JSON.parse(solc.compile(util.codeToCompilerInput(info.contract)));
util.report(output.errors);
});
it('should cover if statements even if comments are present immediately after opening { ', () => {
const contract = util.getCode('comments/postIfStatementComment.sol');
const info = getInstrumentedVersion(contract, filePath);
const output = JSON.parse(solc.compile(util.codeToCompilerInput(info.contract)));
util.report(output.errors);
});
});

@ -1,29 +0,0 @@
/* eslint-env node, mocha */
const solc = require('solc');
const getInstrumentedVersion = require('./../lib/instrumentSolidity.js');
const util = require('./util/util.js');
const path = require('path');
/**
* NB: passing '1' to solc as an option activates the optimiser
* NB: solc will throw if there is a compilation error, causing the test to fail
* and passing the error to mocha.
*/
describe('generic expressions', () => {
const filePath = path.resolve('./test.sol');
it('should compile after instrumenting a single binary expression', () => {
const contract = util.getCode('expressions/single-binary-expression.sol');
const info = getInstrumentedVersion(contract, filePath);
const output = JSON.parse(solc.compile(util.codeToCompilerInput(info.contract)));
util.report(output.errors);
});
it('should compile after instrumenting a new expression', () => {
const contract = util.getCode('expressions/new-expression.sol');
const info = getInstrumentedVersion(contract, filePath);
const output = JSON.parse(solc.compile(util.codeToCompilerInput(info.contract)));
util.report(output.errors);
});
});

@ -1,192 +0,0 @@
/* eslint-env node, mocha */
const solc = require('solc');
const getInstrumentedVersion = require('./../lib/instrumentSolidity.js');
const util = require('./util/util.js');
const path = require('path');
const CoverageMap = require('./../lib/coverageMap');
const vm = require('./util/vm');
const assert = require('assert');
/**
* NB: passing '1' to solc as an option activates the optimiser
* NB: solc will throw if there is a compilation error, causing the test to fail
* and passing the error to mocha.
*/
describe('function declarations', () => {
const filePath = path.resolve('./test.sol');
const pathPrefix = './';
it('should compile after instrumenting an ordinary function declaration', () => {
const contract = util.getCode('function/function.sol');
const info = getInstrumentedVersion(contract, 'test.sol');
const output = JSON.parse(solc.compile(util.codeToCompilerInput(info.contract)));
util.report(output.errors);
});
it('should compile after instrumenting an abstract function declaration', () => {
const contract = util.getCode('function/abstract.sol');
const info = getInstrumentedVersion(contract, 'test.sol');
const output = JSON.parse(solc.compile(util.codeToCompilerInput(info.contract)));
util.report(output.errors);
});
it('should compile after instrumenting a function declaration with an empty body', () => {
const contract = util.getCode('function/empty-body.sol');
const info = getInstrumentedVersion(contract, 'test.sol');
const output = JSON.parse(solc.compile(util.codeToCompilerInput(info.contract)));
util.report(output.errors);
});
it('should compile after instrumenting lots of declarations in row', () => {
const contract = util.getCode('function/multiple.sol');
const info = getInstrumentedVersion(contract, 'test.sol');
const output = JSON.parse(solc.compile(util.codeToCompilerInput(info.contract)));
util.report(output.errors);
});
it('should compile after instrumenting a new->constructor-->method chain', () => {
const contract = util.getCode('function/chainable-new.sol');
const info = getInstrumentedVersion(contract, 'test.sol');
const output = JSON.parse(solc.compile(util.codeToCompilerInput(info.contract)));
util.report(output.errors);
});
it('should compile after instrumenting a constructor call that chains to a method call', () => {
const contract = util.getCode('function/chainable.sol');
const info = getInstrumentedVersion(contract, 'test.sol');
const output = JSON.parse(solc.compile(util.codeToCompilerInput(info.contract)));
util.report(output.errors);
});
it('should compile after instrumenting a function with calldata keyword', () => {
const contract = util.getCode('function/calldata.sol');
const info = getInstrumentedVersion(contract, 'test.sol');
const output = JSON.parse(solc.compile(util.codeToCompilerInput(info.contract)));
util.report(output.errors);
});
it('should compile after instrumenting a constructor-->method-->value chain', () => {
const contract = util.getCode('function/chainable-value.sol');
const info = getInstrumentedVersion(contract, 'test.sol');
const output = JSON.parse(solc.compile(util.codeToCompilerInput(info.contract)));
util.report(output.errors);
});
it('should cover a simple invoked function call', done => {
const contract = util.getCode('function/function-call.sol');
const info = getInstrumentedVersion(contract, filePath);
const coverage = new CoverageMap();
coverage.addContract(info, filePath);
vm.execute(info.contract, 'a', []).then(events => {
const mapping = coverage.generate(events, pathPrefix);
assert.deepEqual(mapping[filePath].l, {
7: 1,
});
assert.deepEqual(mapping[filePath].b, {});
assert.deepEqual(mapping[filePath].s, {
1: 1,
});
assert.deepEqual(mapping[filePath].f, {
1: 1,
2: 1,
});
done();
}).catch(done);
});
it('should cover a modifier used on a function', done => {
const contract = util.getCode('function/modifier.sol');
const info = getInstrumentedVersion(contract, filePath);
const coverage = new CoverageMap();
coverage.addContract(info, filePath);
vm.execute(info.contract, 'a', [0]).then(events => {
const mapping = coverage.generate(events, pathPrefix);
assert.deepEqual(mapping[filePath].l, {
5: 1, 6: 1, 9: 1,
});
assert.deepEqual(mapping[filePath].b, {});
assert.deepEqual(mapping[filePath].s, {
1: 1
});
assert.deepEqual(mapping[filePath].f, {
1: 1,
2: 1,
});
done();
}).catch(done);
});
it('should cover a constructor that uses the `constructor` keyword', done => {
const contract = util.getCode('function/constructor-keyword.sol');
const info = getInstrumentedVersion(contract, filePath);
const coverage = new CoverageMap();
coverage.addContract(info, filePath);
vm.execute(info.contract, 'a', []).then(events => {
const mapping = coverage.generate(events, pathPrefix);
assert.deepEqual(mapping[filePath].l, {
6: 1, 11: 1
});
assert.deepEqual(mapping[filePath].b, {});
assert.deepEqual(mapping[filePath].s, {
1: 1, 2: 1
});
assert.deepEqual(mapping[filePath].f, {
1: 1,
2: 1,
});
done();
}).catch(done);
});
it('should cover a constructor call that chains to a method call', done => {
const contract = util.getCode('function/chainable.sol');
const info = getInstrumentedVersion(contract, filePath);
const coverage = new CoverageMap();
coverage.addContract(info, filePath);
// We try and call a contract at an address where it doesn't exist and the VM
// throws, but we can verify line / statement / fn coverage is getting mapped.
vm.execute(info.contract, 'a', []).then(events => {
const mapping = coverage.generate(events, pathPrefix);
assert.deepEqual(mapping[filePath].l, {
9: 1,
});
assert.deepEqual(mapping[filePath].b, {});
assert.deepEqual(mapping[filePath].s, {
1: 1,
});
assert.deepEqual(mapping[filePath].f, {
1: 0,
2: 1,
});
done();
}).catch(done);
});
it('should cover a constructor call that chains to a method call', done => {
const contract = util.getCode('function/chainable-value.sol');
const info = getInstrumentedVersion(contract, filePath);
const coverage = new CoverageMap();
coverage.addContract(info, filePath);
// The vm runs out of gas here - but we can verify line / statement / fn
// coverage is getting mapped.
vm.execute(info.contract, 'a', []).then(events => {
const mapping = coverage.generate(events, pathPrefix);
assert.deepEqual(mapping[filePath].l, {
10: 1,
});
assert.deepEqual(mapping[filePath].b, {});
assert.deepEqual(mapping[filePath].s, {
1: 1,
});
assert.deepEqual(mapping[filePath].f, {
1: 0,
2: 1,
});
done();
}).catch(done);
});
});

@ -1,274 +0,0 @@
/* eslint-env node, mocha */
const solc = require('solc');
const path = require('path');
const getInstrumentedVersion = require('./../lib/instrumentSolidity.js');
const util = require('./util/util.js');
const CoverageMap = require('./../lib/coverageMap');
const vm = require('./util/vm');
const assert = require('assert');
describe('if, else, and else if statements', () => {
const filePath = path.resolve('./test.sol');
const pathPrefix = './';
it('should compile after instrumenting multiple if-elses', () => {
const contract = util.getCode('if/else-if-unbracketed-multi.sol');
const info = getInstrumentedVersion(contract, filePath);
const output = JSON.parse(solc.compile(util.codeToCompilerInput(info.contract)));
util.report(output.errors);
});
it('should compile after instrumenting unbracketed if-elses', () => {
const contract = util.getCode('if/if-else-no-brackets.sol');
const info = getInstrumentedVersion(contract, filePath);
const output = JSON.parse(solc.compile(util.codeToCompilerInput(info.contract)));
util.report(output.errors);
});
it('should cover an if statement with a bracketed consequent', done => {
const contract = util.getCode('if/if-with-brackets.sol');
const info = getInstrumentedVersion(contract, filePath);
const coverage = new CoverageMap();
coverage.addContract(info, filePath);
// Runs: a(1) => if (x == 1) { x = 3; }
vm.execute(info.contract, 'a', [1]).then(events => {
const mapping = coverage.generate(events, pathPrefix);
assert.deepEqual(mapping[filePath].l, {
5: 1,
});
assert.deepEqual(mapping[filePath].b, {
1: [1, 0],
});
assert.deepEqual(mapping[filePath].s, {
1: 1, 2: 1,
});
assert.deepEqual(mapping[filePath].f, {
1: 1,
});
done();
}).catch(done);
});
// Runs: a(1) => if (x == 1) x = 2;
it('should cover an unbracketed if consequent (single line)', done => {
const contract = util.getCode('if/if-no-brackets.sol');
const info = getInstrumentedVersion(contract, filePath);
const coverage = new CoverageMap();
coverage.addContract(info, filePath);
// Same results as previous test
vm.execute(info.contract, 'a', [1]).then(events => {
const mapping = coverage.generate(events, pathPrefix);
assert.deepEqual(mapping[filePath].l, {
5: 1,
});
assert.deepEqual(mapping[filePath].b, {
1: [1, 0],
});
assert.deepEqual(mapping[filePath].s, {
1: 1, 2: 1,
});
assert.deepEqual(mapping[filePath].f, {
1: 1,
});
done();
}).catch(done);
});
it('should cover an if statement with multiline bracketed consequent', done => {
const contract = util.getCode('if/if-with-brackets-multiline.sol');
const info = getInstrumentedVersion(contract, filePath);
const coverage = new CoverageMap();
coverage.addContract(info, filePath);
// Runs: a(1) => if (x == 1){\n x = 3; }
vm.execute(info.contract, 'a', [1]).then(events => {
const mapping = coverage.generate(events, pathPrefix);
assert.deepEqual(mapping[filePath].l, {
5: 1, 6: 1,
});
assert.deepEqual(mapping[filePath].b, {
1: [1, 0],
});
assert.deepEqual(mapping[filePath].s, {
1: 1, 2: 1,
});
assert.deepEqual(mapping[filePath].f, {
1: 1,
});
done();
}).catch(done);
});
// Runs: a(1) => if (x == 1)\n x = 3;
it('should cover an unbracketed if consequent (multi-line)', done => {
const contract = util.getCode('if/if-no-brackets-multiline.sol');
const info = getInstrumentedVersion(contract, filePath);
const coverage = new CoverageMap();
coverage.addContract(info, filePath);
// Same results as previous test
vm.execute(info.contract, 'a', [1]).then(events => {
const mapping = coverage.generate(events, pathPrefix);
assert.deepEqual(mapping[filePath].l, {
5: 1, 6: 1,
});
assert.deepEqual(mapping[filePath].b, {
1: [1, 0],
});
assert.deepEqual(mapping[filePath].s, {
1: 1, 2: 1,
});
assert.deepEqual(mapping[filePath].f, {
1: 1,
});
done();
}).catch(done);
});
it('should cover a simple if statement with a failing condition', done => {
const contract = util.getCode('if/if-with-brackets.sol');
const info = getInstrumentedVersion(contract, filePath);
const coverage = new CoverageMap();
coverage.addContract(info, filePath);
// Runs: a(2) => if (x == 1) { x = 3; }
vm.execute(info.contract, 'a', [2]).then(events => {
const mapping = coverage.generate(events, pathPrefix);
assert.deepEqual(mapping[filePath].l, {
5: 1,
});
assert.deepEqual(mapping[filePath].b, {
1: [0, 1],
});
assert.deepEqual(mapping[filePath].s, {
1: 1, 2: 0,
});
assert.deepEqual(mapping[filePath].f, {
1: 1,
});
done();
}).catch(done);
});
// Runs: a(2) => if (x == 1){\n throw;\n }else{\n x = 5; \n}
it('should cover an if statement with a bracketed alternate', done => {
const contract = util.getCode('if/else-with-brackets.sol');
const info = getInstrumentedVersion(contract, filePath);
const coverage = new CoverageMap();
coverage.addContract(info, filePath);
vm.execute(info.contract, 'a', [2]).then(events => {
const mapping = coverage.generate(events, pathPrefix);
assert.deepEqual(mapping[filePath].l, {
5: 1, 6: 0, 8: 1,
});
assert.deepEqual(mapping[filePath].b, {
1: [0, 1],
});
assert.deepEqual(mapping[filePath].s, {
1: 1, 2: 0, 3: 1,
});
assert.deepEqual(mapping[filePath].f, {
1: 1,
});
done();
}).catch(done);
});
it('should cover an if statement with an unbracketed alternate', done => {
const contract = util.getCode('if/else-without-brackets.sol');
const info = getInstrumentedVersion(contract, filePath);
const coverage = new CoverageMap();
coverage.addContract(info, filePath);
vm.execute(info.contract, 'a', [2]).then(events => {
const mapping = coverage.generate(events, pathPrefix);
assert.deepEqual(mapping[filePath].l, {
5: 1, 6: 0, 8: 1,
});
assert.deepEqual(mapping[filePath].b, {
1: [0, 1],
});
assert.deepEqual(mapping[filePath].s, {
1: 1, 2: 0, 3: 1,
});
assert.deepEqual(mapping[filePath].f, {
1: 1,
});
done();
}).catch(done);
});
it('should cover an else if statement with an unbracketed alternate', done => {
const contract = util.getCode('if/else-if-without-brackets.sol');
const info = getInstrumentedVersion(contract, filePath);
const coverage = new CoverageMap();
coverage.addContract(info, filePath);
vm.execute(info.contract, 'a', [2]).then(events => {
const mapping = coverage.generate(events, pathPrefix);
assert.deepEqual(mapping[filePath].l, {
5: 1, 6: 0, 8: 0,
});
assert.deepEqual(mapping[filePath].b, {
1: [0, 1], 2: [0, 1]
});
assert.deepEqual(mapping[filePath].s, {
1: 1, 2: 0, 3: 1, 4: 0
});
assert.deepEqual(mapping[filePath].f, {
1: 1,
});
done();
}).catch(done);
});
it('should cover nested if statements with missing else statements', done => {
const contract = util.getCode('if/nested-if-missing-else.sol');
const info = getInstrumentedVersion(contract, filePath);
const coverage = new CoverageMap();
coverage.addContract(info, filePath);
vm.execute(info.contract, 'a', [2, 3, 3]).then(events => {
const mapping = coverage.generate(events, pathPrefix);
assert.deepEqual(mapping[filePath].l, {
5: 1, 7: 1,
});
assert.deepEqual(mapping[filePath].b, {
1: [0, 1], 2: [1, 0], 3: [1, 0],
});
assert.deepEqual(mapping[filePath].s, {
1: 1, 2: 1, 3: 1,
});
assert.deepEqual(mapping[filePath].f, {
1: 1,
});
done();
}).catch(done);
});
it('should cover if-elseif-else statements that are at the same depth as each other', done => {
const contract = util.getCode('if/if-elseif-else.sol');
const info = getInstrumentedVersion(contract, filePath);
const coverage = new CoverageMap();
coverage.addContract(info, filePath);
vm.execute(info.contract, 'a', [2, 3, 3]).then(events => {
const mapping = coverage.generate(events, pathPrefix);
assert.deepEqual(mapping[filePath].l, {
5: 1, 6: 0, 8: 1, 10: 0, 13: 1, 14: 0, 16: 1, 18: 0,
});
assert.deepEqual(mapping[filePath].b, {
1: [0, 1], 2: [1, 0], 3: [0, 1], 4: [1, 0]
});
assert.deepEqual(mapping[filePath].s, {
1: 1, 2: 0, 3: 1, 4: 1, 5: 0, 6: 1, 7: 0, 8: 1, 9: 1, 10: 0,
});
assert.deepEqual(mapping[filePath].f, {
1: 1,
});
done();
}).catch(done);
});
});

@ -3,12 +3,12 @@ pragma solidity ^0.5.0;
contract SimpleError {
uint x = 0;
function test(uint val) public {
x = x + val // <-- no semi-colon
x = x + val // <-- no semi-colon
}
function getX() public returns (uint){
return x;
}
}
}

@ -0,0 +1 @@
module.exports = { value: true };

@ -0,0 +1,25 @@
pragma solidity >=0.4.22 <0.6.0;
contract Migrations {
address public owner;
uint public last_completed_migration;
modifier restricted() {
if (msg.sender == owner) { _; }
}
constructor() public {
owner = msg.sender;
}
function setCompleted(uint completed) public restricted {
last_completed_migration = completed;
}
function upgrade(address new_address) public restricted {
Migrations upgraded = Migrations(new_address);
upgraded.setCompleted(last_completed_migration);
}
}

@ -0,0 +1,4 @@
const Migrations = artifacts.require('./Migrations.sol');
module.exports = async function(deployer) {
await deployer.deploy(Migrations);
};

@ -0,0 +1 @@
// because circle won't copy the folder w/out contents

@ -0,0 +1,3 @@
module.exports = {
wrong, noooooo oh noooooooo.!!!!!
}

@ -0,0 +1,8 @@
const { loadPluginFile } = require("@nomiclabs/buidler/plugins-testing");
loadPluginFile(__dirname + "/../plugins/buidler.plugin");
usePlugin("@nomiclabs/buidler-truffle5");
module.exports={
defaultNetwork: "buidlerevm",
logger: process.env.SILENT ? { log: () => {} } : console,
};

@ -0,0 +1,7 @@
module.exports = {
networks: {},
mocha: {},
compilers: {
solc: {}
}
}

@ -0,0 +1,5 @@
module.exports = {
client: require('ganache-cli'),
silent: process.env.SILENT ? true : false,
istanbulReporter: ['json-summary', 'text'],
}

@ -0,0 +1,8 @@
const { loadPluginFile } = require("@nomiclabs/buidler/plugins-testing");
loadPluginFile(__dirname + "/../plugins/buidler.plugin");
usePlugin("@nomiclabs/buidler-truffle5");
module.exports={
defaultNetwork: "buidlerevm",
logger: process.env.SILENT ? { log: () => {} } : console,
};

@ -0,0 +1,17 @@
pragma solidity ^0.5.0;
contract ContractA {
uint x;
constructor() public {
}
function sendFn() public {
x = 5;
}
function callFn() public pure returns (uint){
uint y = 5;
return y;
}
}

@ -0,0 +1,17 @@
pragma solidity ^0.5.0;
contract ContractB {
uint x;
constructor() public {
}
function sendFn() public {
x = 5;
}
function callFn() public pure returns (uint){
uint y = 5;
return y;
}
}

@ -0,0 +1,17 @@
pragma solidity ^0.5.0;
contract ContractC {
uint x;
constructor() public {
}
function sendFn() public {
x = 5;
}
function callFn() public pure returns (uint){
uint y = 5;
return y;
}
}

@ -0,0 +1,23 @@
pragma solidity >=0.4.21 <0.6.0;
contract Migrations {
address public owner;
uint public last_completed_migration;
constructor() public {
owner = msg.sender;
}
modifier restricted() {
if (msg.sender == owner) _;
}
function setCompleted(uint completed) public restricted {
last_completed_migration = completed;
}
function upgrade(address new_address) public restricted {
Migrations upgraded = Migrations(new_address);
upgraded.setCompleted(last_completed_migration);
}
}

@ -0,0 +1,15 @@
const ContractA = artifacts.require("ContractA");
contract("contracta", function(accounts) {
let instance;
before(async () => instance = await ContractA.new())
it('sends [ @skipForCoverage ]', async function(){
await instance.sendFn();
});
it('calls [ @skipForCoverage ]', async function(){
await instance.callFn();
})
});

@ -0,0 +1,15 @@
const ContractB = artifacts.require("ContractB");
contract("contractB [ @skipForCoverage ]", function(accounts) {
let instance;
before(async () => instance = await ContractB.new())
it('sends', async function(){
await instance.sendFn();
});
it('calls', async function(){
await instance.callFn();
})
});

@ -0,0 +1,20 @@
const ContractC = artifacts.require("ContractC");
contract("contractc", function(accounts) {
let instance;
before(async () => instance = await ContractC.new())
it('sends', async function(){
await instance.sendFn();
});
it('calls', async function(){
await instance.callFn();
})
it('sends', async function(){
await instance.sendFn();
});
});

@ -0,0 +1,7 @@
module.exports = {
networks: {},
mocha: {},
compilers: {
solc: {}
}
}

@ -0,0 +1,4 @@
module.exports = {
silent: process.env.SILENT ? true : false,
istanbulReporter: ['json-summary', 'text']
}

@ -0,0 +1,10 @@
pragma solidity >=0.4.21 <0.6.0;
contract RelativePathImport {
uint r;
constructor() public {}
function isRelativePathMethod() public {
r = 5;
}
}

@ -0,0 +1,8 @@
const { loadPluginFile } = require("@nomiclabs/buidler/plugins-testing");
loadPluginFile(__dirname + "/../plugins/buidler.plugin");
usePlugin("@nomiclabs/buidler-truffle5");
module.exports={
defaultNetwork: "buidlerevm",
logger: process.env.SILENT ? { log: () => {} } : console,
};

@ -0,0 +1,23 @@
pragma solidity >=0.4.21 <0.6.0;
contract Migrations {
address public owner;
uint public last_completed_migration;
constructor() public {
owner = msg.sender;
}
modifier restricted() {
if (msg.sender == owner) _;
}
function setCompleted(uint completed) public restricted {
last_completed_migration = completed;
}
function upgrade(address new_address) public restricted {
Migrations upgraded = Migrations(new_address);
upgraded.setCompleted(last_completed_migration);
}
}

@ -0,0 +1,3 @@
pragma solidity >=0.4.21 <0.6.0;
import "package/AnotherImport.sol";

@ -0,0 +1,17 @@
pragma solidity >=0.4.21 <0.6.0;
import "../assets/RelativePathImport.sol";
import "package/NodeModulesImport.sol";
contract UsesImports is RelativePathImport, NodeModulesImport {
constructor() public {}
function wrapsRelativePathMethod() public {
isRelativePathMethod();
}
function wrapsNodeModulesMethod() public {
isNodeModulesMethod();
}
}

@ -0,0 +1,5 @@
const Migrations = artifacts.require("Migrations");
module.exports = function(deployer) {
deployer.deploy(Migrations);
};

@ -0,0 +1,10 @@
pragma solidity >=0.4.21 <0.6.0;
contract AnotherImport {
uint x;
constructor() public {}
function isNodeModulesMethod() public {
x = 5;
}
}

@ -0,0 +1,10 @@
pragma solidity >=0.4.21 <0.6.0;
contract NodeModulesImport {
uint x;
constructor() public {}
function isNodeModulesMethod() public {
x = 5;
}
}

@ -0,0 +1,16 @@
var UsesImports = artifacts.require("UsesImports");
contract("UsesImports", function(accounts) {
let instance;
before(async () => instance = await UsesImports.new());
it('uses a method from a relative import', async () => {
await instance.wrapsRelativePathMethod();
})
it('uses an import from node_modules', async () => {
await instance.wrapsNodeModulesMethod();
})
});

@ -0,0 +1,7 @@
module.exports = {
networks: {},
mocha: {},
compilers: {
solc: {}
}
}

@ -0,0 +1,7 @@
const { loadPluginFile } = require("@nomiclabs/buidler/plugins-testing");
loadPluginFile(__dirname + "/../plugins/buidler.plugin");
usePlugin("@nomiclabs/buidler-truffle5");
module.exports={
logger: process.env.SILENT ? { log: () => {} } : console,
};

@ -0,0 +1,23 @@
pragma solidity >=0.4.21 <0.6.0;
contract Migrations {
address public owner;
uint public last_completed_migration;
constructor() public {
owner = msg.sender;
}
modifier restricted() {
if (msg.sender == owner) _;
}
function setCompleted(uint completed) public restricted {
last_completed_migration = completed;
}
function upgrade(address new_address) public restricted {
Migrations upgraded = Migrations(new_address);
upgraded.setCompleted(last_completed_migration);
}
}

@ -1,10 +1,10 @@
pragma solidity ^0.5.0;
import "./../assets/Face.sol";
import "./../assets/PureView.sol";
import "./../assets/CLibrary.sol";
import "./_Interface.sol";
import "./PureView.sol";
import "./CLibrary.sol";
contract TotallyPure is PureView, Face {
contract UsesPure is PureView, _Interface {
uint onehundred = 99;
function usesThem() public view {

@ -0,0 +1,6 @@
pragma solidity ^0.5.0;
interface _Interface {
function stare(uint a, uint b) external;
function cry() external;
}

@ -0,0 +1,5 @@
const Migrations = artifacts.require("Migrations");
module.exports = function(deployer) {
deployer.deploy(Migrations);
};

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save