Merge with develop

pull/534/head
Nikhil Parasaram 6 years ago
commit 2f2375eb87
  1. 35
      .circleci/config.yml
  2. 16
      Dockerfile
  3. 29
      README.md
  4. 12
      docker_build_and_deploy.sh
  5. 2
      mythril/__init__.py
  6. 261
      mythril/analysis/callgraph.py
  7. 56
      mythril/analysis/modules/delegatecall.py
  8. 106
      mythril/analysis/modules/dependence_on_predictable_vars.py
  9. 26
      mythril/analysis/modules/deprecated_ops.py
  10. 143
      mythril/analysis/modules/ether_send.py
  11. 46
      mythril/analysis/modules/exceptions.py
  12. 142
      mythril/analysis/modules/external_calls.py
  13. 165
      mythril/analysis/modules/integer.py
  14. 42
      mythril/analysis/modules/multiple_sends.py
  15. 38
      mythril/analysis/modules/suicide.py
  16. 39
      mythril/analysis/modules/transaction_order_dependence.py
  17. 58
      mythril/analysis/modules/unchecked_retval.py
  18. 18
      mythril/analysis/ops.py
  19. 89
      mythril/analysis/report.py
  20. 83
      mythril/analysis/solver.py
  21. 27
      mythril/analysis/swc_data.py
  22. 139
      mythril/analysis/symbolic.py
  23. 2
      mythril/analysis/templates/callgraph.html
  24. 2
      mythril/analysis/templates/report_as_markdown.jinja2
  25. 1
      mythril/analysis/templates/report_as_text.jinja2
  26. 129
      mythril/analysis/traceexplore.py
  27. 107
      mythril/disassembler/asm.py
  28. 110
      mythril/disassembler/disassembly.py
  29. 156
      mythril/ether/asm.py
  30. 42
      mythril/ether/ethcontract.py
  31. 54
      mythril/ether/evm.py
  32. 66
      mythril/ether/soliditycontract.py
  33. 28
      mythril/ether/util.py
  34. 0
      mythril/ethereum/__init__.py
  35. 0
      mythril/ethereum/interface/__init__.py
  36. 0
      mythril/ethereum/interface/leveldb/__init__.py
  37. 78
      mythril/ethereum/interface/leveldb/accountindexing.py
  38. 185
      mythril/ethereum/interface/leveldb/client.py
  39. 16
      mythril/ethereum/interface/leveldb/eth_db.py
  40. 108
      mythril/ethereum/interface/leveldb/state.py
  41. 0
      mythril/ethereum/interface/rpc/__init__.py
  42. 53
      mythril/ethereum/interface/rpc/base_client.py
  43. 38
      mythril/ethereum/interface/rpc/client.py
  44. 4
      mythril/ethereum/interface/rpc/constants.py
  45. 0
      mythril/ethereum/interface/rpc/exceptions.py
  46. 21
      mythril/ethereum/interface/rpc/utils.py
  47. 381
      mythril/interfaces/cli.py
  48. 249
      mythril/interfaces/epic.py
  49. 101
      mythril/laser/ethereum/call.py
  50. 21
      mythril/laser/ethereum/cfg.py
  51. 4
      mythril/laser/ethereum/evm_exceptions.py
  52. 690
      mythril/laser/ethereum/instructions.py
  53. 1
      mythril/laser/ethereum/keccak.py
  54. 8
      mythril/laser/ethereum/natives.py
  55. 220
      mythril/laser/ethereum/state.py
  56. 25
      mythril/laser/ethereum/strategy/__init__.py
  57. 91
      mythril/laser/ethereum/strategy/basic.py
  58. 221
      mythril/laser/ethereum/svm.py
  59. 148
      mythril/laser/ethereum/taint_analysis.py
  60. 5
      mythril/laser/ethereum/transaction/__init__.py
  61. 44
      mythril/laser/ethereum/transaction/concolic.py
  62. 34
      mythril/laser/ethereum/transaction/symbolic.py
  63. 110
      mythril/laser/ethereum/transaction/transaction_models.py
  64. 20
      mythril/laser/ethereum/util.py
  65. 342
      mythril/mythril.py
  66. 8
      mythril/rpc/constants.py
  67. 19
      mythril/support/loader.py
  68. 117
      mythril/support/signatures.py
  69. 68
      mythril/support/truffle.py
  70. 2
      mythril/version.py
  71. 5
      requirements.txt
  72. 135
      setup.py
  73. 2
      solidity_examples/rubixi.sol
  74. 2
      static/Ownable.html
  75. 3
      static/assertions.html
  76. 2
      static/mythril.html
  77. BIN
      static/mythril.png
  78. BIN
      static/mythril_new.png
  79. 16
      tests/__init__.py
  80. 107
      tests/analysis/test_delegatecall.py
  81. 37
      tests/cmd_line_test.py
  82. 0
      tests/disassembler/__init__.py
  83. 124
      tests/disassembler/asm.py
  84. 61
      tests/disassembler/disassembly.py
  85. 5
      tests/disassembler_test.py
  86. 28
      tests/ethcontract_test.py
  87. 18
      tests/graph_test.py
  88. 52
      tests/laser/evm_testsuite/VMTests/vmSha3Test/sha3_0.json
  89. 52
      tests/laser/evm_testsuite/VMTests/vmSha3Test/sha3_1.json
  90. 52
      tests/laser/evm_testsuite/VMTests/vmSha3Test/sha3_2.json
  91. 37
      tests/laser/evm_testsuite/VMTests/vmSha3Test/sha3_3.json
  92. 37
      tests/laser/evm_testsuite/VMTests/vmSha3Test/sha3_4.json
  93. 37
      tests/laser/evm_testsuite/VMTests/vmSha3Test/sha3_5.json
  94. 37
      tests/laser/evm_testsuite/VMTests/vmSha3Test/sha3_6.json
  95. 37
      tests/laser/evm_testsuite/VMTests/vmSha3Test/sha3_bigOffset.json
  96. 52
      tests/laser/evm_testsuite/VMTests/vmSha3Test/sha3_bigOffset2.json
  97. 37
      tests/laser/evm_testsuite/VMTests/vmSha3Test/sha3_bigSize.json
  98. 52
      tests/laser/evm_testsuite/VMTests/vmSha3Test/sha3_memSizeNoQuadraticCost31.json
  99. 52
      tests/laser/evm_testsuite/VMTests/vmSha3Test/sha3_memSizeQuadraticCost32.json
  100. 52
      tests/laser/evm_testsuite/VMTests/vmSha3Test/sha3_memSizeQuadraticCost32_zeroSize.json
  101. Some files were not shown because too many files have changed in this diff Show More

@ -33,6 +33,12 @@ jobs:
- .tox/py*
- /root/.cache/pip/wheels/
- run:
name: Black style check
command: |
pip3 install --user black
python3 -m black --check /home/mythril/
- run:
background: true
name: Launch of background geth instance
@ -61,10 +67,10 @@ jobs:
name: Sonar analysis
command: if [ -z "$CIRCLE_PR_NUMBER" ]; then if [ -z "$CIRCLE_TAG" ]; then sonar-scanner -Dsonar.projectKey=$SONAR_PROJECT_KEY -Dsonar.organization=$SONAR_ORGANIZATION -Dsonar.branch.name=$CIRCLE_BRANCH -Dsonar.projectBaseDir=/home/mythril -Dsonar.sources=mythril -Dsonar.host.url=$SONAR_HOST_URL -Dsonar.tests=/home/mythril/tests -Dsonar.login=$SONAR_LOGIN; fi; fi
- run:
name: Integration tests
command: if [ -z "$CIRCLE_PR_NUMBER" ]; then ./run-integration-tests.sh; fi
working_directory: /home
# - run:
# name: Integration tests
# command: if [ -z "$CIRCLE_PR_NUMBER" ]; then ./run-integration-tests.sh; fi
# working_directory: /home
pypi_release:
<<: *defaults
@ -84,6 +90,8 @@ jobs:
command: twine upload dist/*
working_directory: /home/mythril
# Release of the mainstream (current stable) version as mythril/myth
# container.
dockerhub_release:
docker:
- image: docker:stable
@ -92,7 +100,18 @@ jobs:
- setup_remote_docker
- run:
name: Building Docker Image
command: ./docker_build_and_deploy.sh
command: ./docker_build_and_deploy.sh mythril/myth
# Release of the latest development version as mythril/myth-dev container.
dockerhub_dev_release:
docker:
- image: docker:stable
steps:
- checkout
- setup_remote_docker
- run:
name: Building Docker Image
command: ./docker_build_and_deploy.sh mythril/myth-dev
workflows:
version: 2
@ -110,6 +129,12 @@ workflows:
only: /v[0-9]+(\.[0-9]+)*/
requires:
- test
- dockerhub_dev_release:
filters:
branches:
only: develop
# requires:
# - test
- dockerhub_release:
filters:
branches:

@ -1,7 +1,5 @@
FROM ubuntu:bionic
COPY . /opt/mythril
RUN apt-get update \
&& apt-get install -y \
build-essential \
@ -18,14 +16,20 @@ RUN apt-get update \
python3-dev \
pandoc \
git \
&& ln -s /usr/bin/python3 /usr/local/bin/python \
&& cd /opt/mythril \
&& pip3 install -r requirements.txt \
&& python setup.py install
&& ln -s /usr/bin/python3 /usr/local/bin/python
COPY ./requirements.txt /opt/mythril/requirements.txt
RUN cd /opt/mythril \
&& pip3 install -r requirements.txt
RUN locale-gen en_US.UTF-8
ENV LANG en_US.UTF-8
ENV LANGUAGE en_US.en
ENV LC_ALL en_US.UTF-8
COPY . /opt/mythril
RUN cd /opt/mythril \
&& python setup.py install
ENTRYPOINT ["/usr/local/bin/myth"]

@ -1,17 +1,21 @@
# Mythril OSS [![Tweet](https://img.shields.io/twitter/url/http/shields.io.svg?style=social)](https://twitter.com/intent/tweet?text=Mythril%20-%20Security%20Analyzer%20for%20Ethereum%20Smart%20Contracts&url=https://www.github.com/ConsenSys/mythril)
# Mythril Classic
<p align="center">
<img src="/static/mythril_new.png" height="320px"/>
</p>
[![Discord](https://img.shields.io/discord/481002907366588416.svg)](https://discord.gg/E3YrVtG)
[![PyPI](https://badge.fury.io/py/mythril.svg)](https://pypi.python.org/pypi/mythril)
![Master Build Status](https://img.shields.io/circleci/project/github/ConsenSys/mythril/master.svg)
[![Waffle.io - Columns and their card count](https://badge.waffle.io/ConsenSys/mythril.svg?columns=all)](https://waffle.io/ConsenSys/mythril)
![Master Build Status](https://img.shields.io/circleci/project/github/ConsenSys/mythril-classic/master.svg)
[![Waffle.io - Columns and their card count](https://badge.waffle.io/ConsenSys/mythril-classic.svg?columns=In%20Progress)](https://waffle.io/ConsenSys/mythril-classic/)
[![Sonarcloud - Maintainability](https://sonarcloud.io/api/project_badges/measure?project=mythril&metric=sqale_rating)](https://sonarcloud.io/dashboard?id=mythril)
[![PyPI Statistics](https://pypistats.com/badge/mythril.svg)](https://pypistats.com/package/mythril)
<img height="120px" align="right" src="https://github.com/ConsenSys/mythril/raw/master/static/mythril.png" alt="mythril" />
Mythril Classic is an open-source security analysis tool for Ethereum smart contracts. It uses concolic analysis, taint analysis and control flow checking to detect a variety of security vulnerabilities.
Mythril OSS is the classic security analysis tool for Ethereum smart contracts. It uses concolic analysis, taint analysis and control flow checking to detect a variety of security vulnerabilities.
Whether you want to contribute, need support, or want to learn what we have cooking for the future, our [Discord server](https://discord.gg/E3YrVtG) will serve your needs.
Whether you want to contribute, need support, or want to learn what we have cooking for the future, our [Discord server](https://discord.gg/E3YrVtG) will serve your needs!
Oh and by the way, we're now building a whole security tools ecosystem with [Mythril Platform API](https://mythril.ai). You should definitely check that out as well.
Oh and by the way, we're also building an easy-to-use security analysis platform (a.k.a. "the INFURA for smart contract security") that anybody can use to create purpose-built security tools. It's called [Mythril Platform](https://mythril.ai) and you should definitely [check it out](https://media.consensys.net/mythril-platform-api-is-upping-the-smart-contract-security-game-eee1d2642488).
## Installation and setup
@ -31,13 +35,10 @@ See the [Wiki](https://github.com/ConsenSys/mythril/wiki/Installation-and-Setup)
## Usage
Instructions for using the 'myth' tool are found on the [Wiki](https://github.com/ConsenSys/mythril/wiki).
Instructions for using Mythril Classic are found on the [Wiki](https://github.com/ConsenSys/mythril-classic/wiki).
For support or general discussions please join the Mythril community on [Discord](https://discord.gg/E3YrVtG).
## Presentations, papers and videos
- [HITBSecConf 2018 conference paper](https://github.com/b-mueller/smashing-smart-contracts/blob/master/smashing-smart-contracts-1of1.pdf)
- [HITBSecConf 2018 - Smashing Ethereum smart contracts for fun and real profit](https://www.youtube.com/watch?v=iqf6epACgds)
- [EDCon Toronto 2018 - Mythril: Find bugs and verify security properties in your contracts](https://www.youtube.com/watch?v=NJ9StJThxZY&feature=youtu.be&t=3h3m18s)
## Vulnerability Remediation
Visit the [Smart Contract Vulnerability Classification Registry](https://smartcontractsecurity.github.io/SWC-registry/) to find detailed information and remediation guidance for the vulnerabilities reported.

@ -1,9 +1,17 @@
#!/bin/sh
set -eo pipefail
NAME=mythril/myth
NAME=$1
if [ ! -z $CIRCLE_TAG ];
then
VERSION=${CIRCLE_TAG#?}
else
VERSION=${CIRCLE_SHA1}
fi
VERSION_TAG=${NAME}:${CIRCLE_TAG#?}
VERSION_TAG=${NAME}:${VERSION}
LATEST_TAG=${NAME}:latest
docker build -t ${VERSION_TAG} .

@ -1,6 +1,6 @@
# We use RsT document formatting in docstring. For example :param to mark parameters.
# See PEP 287
__docformat__ = 'restructuredtext'
__docformat__ = "restructuredtext"
# Accept mythril.VERSION to get mythril's current version number
from .version import VERSION # NOQA

@ -5,87 +5,119 @@ from mythril.laser.ethereum.svm import NodeFlags
import z3
default_opts = {
'autoResize': True,
'height': '100%',
'width': '100%',
'manipulation': False,
'layout': {
'improvedLayout': True,
'hierarchical': {
'enabled': True,
'levelSeparation': 450,
'nodeSpacing': 200,
'treeSpacing': 100,
'blockShifting': True,
'edgeMinimization': True,
'parentCentralization': False,
'direction': 'LR',
'sortMethod': 'directed'
}
"autoResize": True,
"height": "100%",
"width": "100%",
"manipulation": False,
"layout": {
"improvedLayout": True,
"hierarchical": {
"enabled": True,
"levelSeparation": 450,
"nodeSpacing": 200,
"treeSpacing": 100,
"blockShifting": True,
"edgeMinimization": True,
"parentCentralization": False,
"direction": "LR",
"sortMethod": "directed",
},
},
'nodes': {
'color': '#000000',
'borderWidth': 1,
'borderWidthSelected': 2,
'chosen': True,
'shape': 'box',
'font': {'align': 'left', 'color': '#FFFFFF'},
"nodes": {
"color": "#000000",
"borderWidth": 1,
"borderWidthSelected": 2,
"chosen": True,
"shape": "box",
"font": {"align": "left", "color": "#FFFFFF"},
},
'edges': {
'font': {
'color': '#FFFFFF',
'face': 'arial',
'background': 'none',
'strokeWidth': 0,
'strokeColor': '#ffffff',
'align': 'horizontal',
'multi': False,
'vadjust': 0,
"edges": {
"font": {
"color": "#FFFFFF",
"face": "arial",
"background": "none",
"strokeWidth": 0,
"strokeColor": "#ffffff",
"align": "horizontal",
"multi": False,
"vadjust": 0,
}
},
'physics': {'enabled': False}
"physics": {"enabled": False},
}
phrack_opts = {
'nodes': {
'color': '#000000',
'borderWidth': 1,
'borderWidthSelected': 1,
'shapeProperties': {
'borderDashes': False,
'borderRadius': 0,
"nodes": {
"color": "#000000",
"borderWidth": 1,
"borderWidthSelected": 1,
"shapeProperties": {"borderDashes": False, "borderRadius": 0},
"chosen": True,
"shape": "box",
"font": {"face": "courier new", "align": "left", "color": "#000000"},
},
'chosen': True,
'shape': 'box',
'font': {'face': 'courier new', 'align': 'left', 'color': '#000000'},
},
'edges': {
'font': {
'color': '#000000',
'face': 'courier new',
'background': 'none',
'strokeWidth': 0,
'strokeColor': '#ffffff',
'align': 'horizontal',
'multi': False,
'vadjust': 0,
}
"edges": {
"font": {
"color": "#000000",
"face": "courier new",
"background": "none",
"strokeWidth": 0,
"strokeColor": "#ffffff",
"align": "horizontal",
"multi": False,
"vadjust": 0,
}
},
}
default_colors = [
{'border': '#26996f', 'background': '#2f7e5b', 'highlight': {'border': '#26996f', 'background': '#28a16f'}},
{'border': '#9e42b3', 'background': '#842899', 'highlight': {'border': '#9e42b3', 'background': '#933da6'}},
{'border': '#b82323', 'background': '#991d1d', 'highlight': {'border': '#b82323', 'background': '#a61f1f'}},
{'border': '#4753bf', 'background': '#3b46a1', 'highlight': {'border': '#4753bf', 'background': '#424db3'}},
{'border': '#26996f', 'background': '#2f7e5b', 'highlight': {'border': '#26996f', 'background': '#28a16f'}},
{'border': '#9e42b3', 'background': '#842899', 'highlight': {'border': '#9e42b3', 'background': '#933da6'}},
{'border': '#b82323', 'background': '#991d1d', 'highlight': {'border': '#b82323', 'background': '#a61f1f'}},
{'border': '#4753bf', 'background': '#3b46a1', 'highlight': {'border': '#4753bf', 'background': '#424db3'}},
{
"border": "#26996f",
"background": "#2f7e5b",
"highlight": {"border": "#26996f", "background": "#28a16f"},
},
{
"border": "#9e42b3",
"background": "#842899",
"highlight": {"border": "#9e42b3", "background": "#933da6"},
},
{
"border": "#b82323",
"background": "#991d1d",
"highlight": {"border": "#b82323", "background": "#a61f1f"},
},
{
"border": "#4753bf",
"background": "#3b46a1",
"highlight": {"border": "#4753bf", "background": "#424db3"},
},
{
"border": "#26996f",
"background": "#2f7e5b",
"highlight": {"border": "#26996f", "background": "#28a16f"},
},
{
"border": "#9e42b3",
"background": "#842899",
"highlight": {"border": "#9e42b3", "background": "#933da6"},
},
{
"border": "#b82323",
"background": "#991d1d",
"highlight": {"border": "#b82323", "background": "#a61f1f"},
},
{
"border": "#4753bf",
"background": "#3b46a1",
"highlight": {"border": "#4753bf", "background": "#424db3"},
},
]
phrack_color = {'border': '#000000', 'background': '#ffffff',
'highlight': {'border': '#000000', 'background': '#ffffff'}}
phrack_color = {
"border": "#000000",
"background": "#ffffff",
"highlight": {"border": "#000000", "background": "#ffffff"},
}
def extract_nodes(statespace, color_map):
@ -95,28 +127,43 @@ def extract_nodes(statespace, color_map):
instructions = [state.get_current_instruction() for state in node.states]
code_split = []
for instruction in instructions:
if instruction['opcode'].startswith("PUSH"):
code_line = "%d %s %s" % (instruction['address'], instruction['opcode'], instruction['argument'])
elif instruction['opcode'].startswith("JUMPDEST") and NodeFlags.FUNC_ENTRY in node.flags and instruction['address'] == node.start_addr:
if instruction["opcode"].startswith("PUSH"):
code_line = "%d %s %s" % (
instruction["address"],
instruction["opcode"],
instruction["argument"],
)
elif (
instruction["opcode"].startswith("JUMPDEST")
and NodeFlags.FUNC_ENTRY in node.flags
and instruction["address"] == node.start_addr
):
code_line = node.function_name
else:
code_line = "%d %s" % (instruction['address'], instruction['opcode'])
code_line = "%d %s" % (instruction["address"], instruction["opcode"])
code_line = re.sub("([0-9a-f]{8})[0-9a-f]+", lambda m: m.group(1) + "(...)", code_line)
code_line = re.sub(
"([0-9a-f]{8})[0-9a-f]+", lambda m: m.group(1) + "(...)", code_line
)
code_split.append(code_line)
truncated_code = '\n'.join(code_split) if (len(code_split) < 7) \
else '\n'.join(code_split[:6]) + "\n(click to expand +)"
nodes.append({
'id': str(node_key),
'color': color_map[node.get_cfg_dict()['contract_name']],
'size': 150,
'fullLabel': '\n'.join(code_split),
'label': truncated_code,
'truncLabel': truncated_code,
'isExpanded': False
})
truncated_code = (
"\n".join(code_split)
if (len(code_split) < 7)
else "\n".join(code_split[:6]) + "\n(click to expand +)"
)
nodes.append(
{
"id": str(node_key),
"color": color_map[node.get_cfg_dict()["contract_name"]],
"size": 150,
"fullLabel": "\n".join(code_split),
"label": truncated_code,
"truncLabel": truncated_code,
"isExpanded": False,
}
)
return nodes
@ -131,21 +178,33 @@ def extract_edges(statespace):
except z3.Z3Exception:
label = str(edge.condition).replace("\n", "")
label = re.sub(r'([^_])([\d]{2}\d+)', lambda m: m.group(1) + hex(int(m.group(2))), label)
label = re.sub(
r"([^_])([\d]{2}\d+)", lambda m: m.group(1) + hex(int(m.group(2))), label
)
edges.append({
'from': str(edge.as_dict['from']),
'to': str(edge.as_dict['to']),
'arrows': 'to',
'label': label,
'smooth': {'type': 'cubicBezier'}
})
edges.append(
{
"from": str(edge.as_dict["from"]),
"to": str(edge.as_dict["to"]),
"arrows": "to",
"label": label,
"smooth": {"type": "cubicBezier"},
}
)
return edges
def generate_graph(statespace, title="Mythril / Ethereum LASER Symbolic VM", physics=False, phrackify=False):
env = Environment(loader=PackageLoader('mythril.analysis'), autoescape=select_autoescape(['html', 'xml']))
template = env.get_template('callgraph.html')
def generate_graph(
statespace,
title="Mythril / Ethereum LASER Symbolic VM",
physics=False,
phrackify=False,
):
env = Environment(
loader=PackageLoader("mythril.analysis"),
autoescape=select_autoescape(["html", "xml"]),
)
template = env.get_template("callgraph.html")
graph_opts = default_opts
accounts = statespace.accounts
@ -154,13 +213,17 @@ def generate_graph(statespace, title="Mythril / Ethereum LASER Symbolic VM", phy
color_map = {accounts[k].contract_name: phrack_color for k in accounts}
graph_opts.update(phrack_opts)
else:
color_map = {accounts[k].contract_name: default_colors[i % len(default_colors)] for i, k in enumerate(accounts)}
color_map = {
accounts[k].contract_name: default_colors[i % len(default_colors)]
for i, k in enumerate(accounts)
}
graph_opts['physics']['enabled'] = physics
graph_opts["physics"]["enabled"] = physics
return template.render(title=title,
return template.render(
title=title,
nodes=extract_nodes(statespace, color_map),
edges=extract_edges(statespace),
phrackify=phrackify,
opts=graph_opts
opts=graph_opts,
)

@ -1,14 +1,15 @@
import re
from mythril.analysis.swc_data import DELEGATECALL_TO_UNTRUSTED_CONTRACT
from mythril.analysis.ops import get_variable, VarType
from mythril.analysis.report import Issue
import logging
'''
"""
MODULE DESCRIPTION:
Check for invocations of delegatecall(msg.data) in the fallback function.
'''
"""
def execute(statespace):
@ -27,7 +28,7 @@ def execute(statespace):
continue
state = call.state
address = state.get_current_instruction()['address']
address = state.get_current_instruction()["address"]
meminstart = get_variable(state.mstate.stack[-3])
if meminstart.type == VarType.CONCRETE:
@ -40,16 +41,24 @@ def execute(statespace):
def _concrete_call(call, state, address, meminstart):
if not re.search(r'calldata.*_0', str(state.mstate.memory[meminstart.val])):
if not re.search(r"calldata.*_0", str(state.mstate.memory[meminstart.val])):
return []
issue = Issue(call.node.contract_name, call.node.function_name, address,
"Call data forwarded with delegatecall()", "Informational")
issue.description = \
"This contract forwards its call data via DELEGATECALL in its fallback function. " \
"This means that any function in the called contract can be executed. Note that the callee contract will have " \
issue = Issue(
contract=call.node.contract_name,
function_name=call.node.function_name,
address=address,
swc_id=DELEGATECALL_TO_UNTRUSTED_CONTRACT,
bytecode=state.environment.code.bytecode,
title="Call data forwarded with delegatecall()",
_type="Informational",
)
issue.description = (
"This contract forwards its call data via DELEGATECALL in its fallback function. "
"This means that any function in the called contract can be executed. Note that the callee contract will have "
"access to the storage of the calling contract.\n "
)
target = hex(call.to.val) if call.to.type == VarType.CONCRETE else str(call.to)
issue.description += "DELEGATECALL target: {}".format(target)
@ -58,22 +67,35 @@ def _concrete_call(call, state, address, meminstart):
def _symbolic_call(call, state, address, statespace):
issue = Issue(call.node.contract_name, call.node.function_name, address, call.type + " to a user-supplied address")
issue = Issue(
contract=call.node.contract_name,
function_name=call.node.function_name,
address=address,
swc_id=DELEGATECALL_TO_UNTRUSTED_CONTRACT,
bytecode=state.environment.code.bytecode,
title=call.type + " to a user-supplied address",
)
if "calldata" in str(call.to):
issue.description = \
"This contract delegates execution to a contract address obtained from calldata. "
issue.description = "This contract delegates execution to a contract address obtained from calldata. "
else:
m = re.search(r'storage_([a-z0-9_&^]+)', str(call.to))
m = re.search(r"storage_([a-z0-9_&^]+)", str(call.to))
if m:
idx = m.group(1)
func = statespace.find_storage_write(state.environment.active_account.address, idx)
func = statespace.find_storage_write(
state.environment.active_account.address, idx
)
if func:
issue.description = "This contract delegates execution to a contract address in storage slot " + str(
idx) + ". This storage slot can be written to by calling the function `" + func + "`. "
issue.description = (
"This contract delegates execution to a contract address in storage slot "
+ str(idx)
+ ". This storage slot can be written to by calling the function `"
+ func
+ "`. "
)
else:
logging.debug("[DELEGATECALL] No storage writes to index " + str(idx))

@ -3,10 +3,11 @@ from z3 import *
from mythril.analysis.ops import VarType
from mythril.analysis import solver
from mythril.analysis.report import Issue
from mythril.analysis.swc_data import TIMESTAMP_DEPENDENCE, PREDICTABLE_VARS_DEPENDENCE
from mythril.exceptions import UnsatError
import logging
'''
"""
MODULE DESCRIPTION:
Check for CALLs that send >0 Ether as a result of computation based on predictable variables such as
@ -16,7 +17,7 @@ TODO:
- block.blockhash(block.number-1)
- block.blockhash(some_block_past_256_blocks_from_now)==0
- external source of random numbers (e.g. Oraclize)
'''
"""
def execute(statespace):
@ -27,17 +28,16 @@ def execute(statespace):
for call in statespace.calls:
if ("callvalue" in str(call.value)):
if "callvalue" in str(call.value):
logging.debug("[DEPENDENCE_ON_PREDICTABLE_VARS] Skipping refund function")
continue
# We're only interested in calls that send Ether
if call.value.type == VarType.CONCRETE:
if call.value.val == 0:
if call.value.type == VarType.CONCRETE and call.value.val == 0:
continue
address = call.state.get_current_instruction()['address']
address = call.state.get_current_instruction()["address"]
description = "In the function `" + call.node.function_name + "` "
description += "the following predictable state variables are used to determine Ether recipient:\n"
@ -56,8 +56,21 @@ def execute(statespace):
for item in found:
description += "- block.{}\n".format(item)
if solve(call):
issue = Issue(call.node.contract_name, call.node.function_name, address, "Dependence on predictable environment variable", "Warning",
description)
swc_type = (
TIMESTAMP_DEPENDENCE
if item == "timestamp"
else PREDICTABLE_VARS_DEPENDENCE
)
issue = Issue(
contract=call.node.contract_name,
function_name=call.node.function_name,
address=address,
swc_id=swc_type,
bytecode=call.state.environment.code.bytecode,
title="Dependence on predictable environment variable",
_type="Warning",
description=description,
)
issues.append(issue)
# Second check: blockhash
@ -66,46 +79,77 @@ def execute(statespace):
if "blockhash" in str(constraint):
description = "In the function `" + call.node.function_name + "` "
if "number" in str(constraint):
m = re.search(r'blockhash\w+(\s-\s(\d+))*', str(constraint))
m = re.search(r"blockhash\w+(\s-\s(\d+))*", str(constraint))
if m and solve(call):
found = m.group(1)
if found: # block.blockhash(block.number - N)
description += "predictable expression 'block.blockhash(block.number - " + m.group(2) + \
")' is used to determine Ether recipient"
description += (
"predictable expression 'block.blockhash(block.number - "
+ m.group(2)
+ ")' is used to determine Ether recipient"
)
if int(m.group(2)) > 255:
description += ", this expression will always be equal to zero."
elif "storage" in str(constraint): # block.blockhash(block.number - storage_0)
description += "predictable expression 'block.blockhash(block.number - " + \
"some_storage_var)' is used to determine Ether recipient"
description += (
", this expression will always be equal to zero."
)
elif "storage" in str(
constraint
): # block.blockhash(block.number - storage_0)
description += (
"predictable expression 'block.blockhash(block.number - "
+ "some_storage_var)' is used to determine Ether recipient"
)
else: # block.blockhash(block.number)
description += "predictable expression 'block.blockhash(block.number)'" + \
" is used to determine Ether recipient"
description += ", this expression will always be equal to zero."
issue = Issue(call.node.contract_name, call.node.function_name, address, "Dependence on predictable variable",
"Warning", description)
description += (
"predictable expression 'block.blockhash(block.number)'"
+ " is used to determine Ether recipient"
)
description += (
", this expression will always be equal to zero."
)
issue = Issue(
contract=call.node.contract_name,
function_name=call.node.function_name,
address=address,
bytecode=call.state.environment.code.bytecode,
title="Dependence on predictable variable",
_type="Warning",
description=description,
swc_id=PREDICTABLE_VARS_DEPENDENCE,
)
issues.append(issue)
break
else:
r = re.search(r'storage_([a-z0-9_&^]+)', str(constraint))
r = re.search(r"storage_([a-z0-9_&^]+)", str(constraint))
if r: # block.blockhash(storage_0)
'''
"""
We actually can do better here by adding a constraint blockhash_block_storage_0 == 0
and checking model satisfiability. When this is done, severity can be raised
from 'Informational' to 'Warning'.
Checking that storage at given index can be tainted is not necessary, since it usually contains
block.number of the 'commit' transaction in commit-reveal workflow.
'''
"""
index = r.group(1)
if index and solve(call):
description += 'block.blockhash() is calculated using a value from storage ' \
'at index {}'.format(index)
issue = Issue(call.node.contract_name, call.node.function_name, address, "Dependence on predictable variable",
"Informational", description)
description += (
"block.blockhash() is calculated using a value from storage "
"at index {}".format(index)
)
issue = Issue(
contract=call.node.contract_name,
function_name=call.node.function_name,
address=address,
bytecode=call.state.environment.code.bytecode,
title="Dependence on predictable variable",
_type="Informational",
description=description,
swc_id=PREDICTABLE_VARS_DEPENDENCE,
)
issues.append(issue)
break
return issues
@ -115,9 +159,11 @@ def solve(call):
try:
model = solver.get_model(call.node.constraints)
logging.debug("[DEPENDENCE_ON_PREDICTABLE_VARS] MODEL: " + str(model))
pretty_model = solver.pretty_print_model(model)
for d in model.decls():
logging.debug("[DEPENDENCE_ON_PREDICTABLE_VARS] main model: %s = 0x%x" % (d.name(), model[d].as_long()))
logging.debug(
"[DEPENDENCE_ON_PREDICTABLE_VARS] main model: \n%s" % pretty_model
)
return True
except UnsatError:

@ -1,12 +1,13 @@
from mythril.analysis.report import Issue
from mythril.analysis.swc_data import TX_ORIGIN_USAGE
import logging
'''
"""
MODULE DESCRIPTION:
Check for constraints on tx.origin (i.e., access to some functionality is restricted to a specific origin).
'''
"""
def execute(statespace):
@ -22,12 +23,25 @@ def execute(statespace):
instruction = state.get_current_instruction()
if(instruction['opcode'] == "ORIGIN"):
issue = Issue(node.contract_name, node.function_name, instruction['address'], "Use of tx.origin", "Warning",
"Function " + node.function_name + " retrieves the transaction origin (tx.origin) using the ORIGIN opcode. Use msg.sender instead.\nSee also: https://solidity.readthedocs.io/en/develop/security-considerations.html#tx-origin"
if instruction["opcode"] == "ORIGIN":
description = (
"The function `{}` retrieves the transaction origin (tx.origin) using the ORIGIN opcode. "
"Use msg.sender instead.\nSee also: "
"https://solidity.readthedocs.io/en/develop/security-considerations.html#tx-origin".format(
node.function_name
)
)
issue = Issue(
contract=node.contract_name,
function_name=node.function_name,
address=instruction["address"],
title="Use of tx.origin",
bytecode=state.environment.code.bytecode,
_type="Warning",
swc_id=TX_ORIGIN_USAGE,
description=description,
)
issues.append(issue)
return issues

@ -1,125 +1,82 @@
from z3 import *
from mythril.analysis.ops import *
from mythril.analysis import solver
from mythril.analysis.report import Issue
from mythril.analysis.swc_data import UNPROTECTED_ETHER_WITHDRAWAL
from mythril.exceptions import UnsatError
import re
import logging
'''
"""
MODULE DESCRIPTION:
Check for CALLs that send >0 Ether to either the transaction sender, or to an address provided as a function argument.
If msg.sender is checked against a value in storage, check whether that storage index is tainted (i.e. there's an unconstrained write
to that index).
'''
"""
def execute(statespace):
def execute(state_space):
logging.debug("Executing module: ETHER_SEND")
issues = []
for call in statespace.calls:
for k in state_space.nodes:
node = state_space.nodes[k]
state = call.state
address = state.get_current_instruction()['address']
for state in node.states:
issues += _analyze_state(state, node)
if "callvalue" in str(call.value):
logging.debug("[ETHER_SEND] Skipping refund function")
continue
# We're only interested in calls that send Ether
if call.value.type == VarType.CONCRETE and call.value.val == 0:
continue
interesting = False
description = "In the function `" + call.node.function_name + "` "
if re.search(r'caller', str(call.to)):
description += "a non-zero amount of Ether is sent to msg.sender.\n"
interesting = True
elif re.search(r'calldata', str(call.to)):
description += "a non-zero amount of Ether is sent to an address taken from function arguments.\n"
interesting = True
else:
m = re.search(r'storage_([a-z0-9_&^]+)', str(call.to))
if m:
idx = m.group(1)
description += "a non-zero amount of Ether is sent to an address taken from storage slot " + str(idx) + ".\n"
func = statespace.find_storage_write(state.environment.active_account.address, idx)
if func:
description += "There is a check on storage index " + str(idx) + ". This storage slot can be written to by calling the function `" + func + "`.\n"
interesting = True
else:
logging.debug("[ETHER_SEND] No storage writes to index " + str(idx))
if interesting:
node = call.node
can_solve = True
constrained = False
index = 0
while can_solve and index < len(node.constraints):
constraint = node.constraints[index]
index += 1
logging.debug("[ETHER_SEND] Constraint: " + str(constraint))
m = re.search(r'storage_([a-z0-9_&^]+)', str(constraint))
if m:
constrained = True
idx = m.group(1)
func = statespace.find_storage_write(state.environment.active_account.address, idx)
return issues
if func:
description += "\nThere is a check on storage index " + str(idx) + ". This storage slot can be written to by calling the function `" + func + "`."
else:
logging.debug("[ETHER_SEND] No storage writes to index " + str(idx))
can_solve = False
break
# CALLER may also be constrained to hardcoded address. I.e. 'caller' and some integer
def _analyze_state(state, node):
issues = []
instruction = state.get_current_instruction()
elif re.search(r"caller", str(constraint)) and re.search(r'[0-9]{20}', str(constraint)):
constrained = True
can_solve = False
break
if instruction["opcode"] != "CALL":
return []
if not constrained:
description += "It seems that this function can be called without restrictions."
call_value = state.mstate.stack[-3]
target = state.mstate.stack[-2]
if can_solve:
not_creator_constraints = []
if len(state.world_state.transaction_sequence) > 1:
creator = state.world_state.transaction_sequence[0].caller
for transaction in state.world_state.transaction_sequence[1:]:
not_creator_constraints.append(
Not(Extract(159, 0, transaction.caller) == Extract(159, 0, creator))
)
not_creator_constraints.append(
Not(Extract(159, 0, transaction.caller) == 0)
)
try:
model = solver.get_model(node.constraints)
for d in model.decls():
logging.debug("[ETHER_SEND] main model: %s = 0x%x" % (d.name(), model[d].as_long()))
debug = "SOLVER OUTPUT:\n" + solver.pretty_print_model(model)
issue = Issue(call.node.contract_name, call.node.function_name, address, "Ether send", "Warning",
description, debug)
model = solver.get_model(
node.constraints + not_creator_constraints + [call_value > 0]
)
debug = "Transaction Sequence: " + str(
solver.get_transaction_sequence(
state, node.constraints + not_creator_constraints + [call_value > 0]
)
)
issue = Issue(
contract=node.contract_name,
function_name=node.function_name,
address=instruction["address"],
swc_id=UNPROTECTED_ETHER_WITHDRAWAL,
title="Ether send",
_type="Warning",
bytecode=state.environment.code.bytecode,
description="It seems that an attacker is able to execute an call instruction,"
" this can mean that the attacker is able to extract funds "
"out of the contract.".format(target),
debug=debug,
)
issues.append(issue)
except UnsatError:
logging.debug("[ETHER_SEND] no model found")
logging.debug("[UNCHECKED_SUICIDE] no model found")
return issues

@ -1,15 +1,16 @@
from mythril.analysis.report import Issue
from mythril.analysis.swc_data import ASSERT_VIOLATION
from mythril.exceptions import UnsatError
from mythril.analysis import solver
import logging
'''
"""
MODULE DESCRIPTION:
Checks whether any exception states are reachable.
'''
"""
def execute(statespace):
@ -24,19 +25,38 @@ def execute(statespace):
for state in node.states:
instruction = state.get_current_instruction()
if(instruction['opcode'] == "ASSERT_FAIL"):
if instruction["opcode"] == "ASSERT_FAIL":
try:
model = solver.get_model(node.constraints)
address = state.get_current_instruction()['address']
description = "A reachable exception (opcode 0xfe) has been detected. This can be caused by type errors, division by zero, out-of-bounds array access, or assert violations. "
description += "This is acceptable in most situations. Note however that `assert()` should only be used to check invariants. Use `require()` for regular input checking. "
debug = "The exception is triggered under the following conditions:\n\n"
debug += solver.pretty_print_model(model)
issues.append(Issue(node.contract_name, node.function_name, address, "Exception state", "Informational", description, debug))
address = state.get_current_instruction()["address"]
description = (
"A reachable exception (opcode 0xfe) has been detected. "
"This can be caused by type errors, division by zero, "
"out-of-bounds array access, or assert violations. "
)
description += (
"Note that explicit `assert()` should only be used to check invariants. "
"Use `require()` for regular input checking. "
)
debug = "Transaction Sequence: " + str(
solver.get_transaction_sequence(state, node.constraints)
)
issues.append(
Issue(
contract=node.contract_name,
function_name=node.function_name,
address=address,
swc_id=ASSERT_VIOLATION,
title="Exception state",
_type="Informational",
description=description,
bytecode=state.environment.code.bytecode,
debug=debug,
)
)
except UnsatError:
logging.debug("[EXCEPTIONS] no model found")

@ -2,42 +2,50 @@ from z3 import *
from mythril.analysis.ops import *
from mythril.analysis.report import Issue
from mythril.analysis import solver
from mythril.analysis.swc_data import REENTRANCY
import re
import logging
from mythril.laser.ethereum.cfg import JumpType
'''
"""
MODULE DESCRIPTION:
Check for call.value()() to external addresses
'''
"""
MAX_SEARCH_DEPTH = 64
def search_children(statespace, node, start_index=0, depth=0, results=[]):
def search_children(
statespace, node, transaction_id, start_index=0, depth=0, results=None
):
if results is None:
results = []
logging.debug("SEARCHING NODE %d", node.uid)
if(depth < MAX_SEARCH_DEPTH):
if depth < MAX_SEARCH_DEPTH:
n_states = len(node.states)
if n_states > start_index:
for j in range(start_index, n_states):
if node.states[j].get_current_instruction()['opcode'] == 'SSTORE':
results.append(node.states[j].get_current_instruction()['address'])
if (
node.states[j].get_current_instruction()["opcode"] == "SSTORE"
and node.states[j].current_transaction.id == transaction_id
):
results.append(node.states[j].get_current_instruction()["address"])
children = []
for edge in statespace.edges:
if edge.node_from == node.uid:
if edge.node_from == node.uid and edge.type != JumpType.Transaction:
children.append(statespace.nodes[edge.node_to])
if (len(children)):
if len(children):
for node in children:
return search_children(statespace, node, depth=depth + 1, results=results)
results += search_children(
statespace, node, transaction_id, depth=depth + 1, results=results
)
return results
@ -52,71 +60,133 @@ def execute(statespace):
for call in statespace.calls:
state = call.state
address = state.get_current_instruction()['address']
address = state.get_current_instruction()["address"]
if (call.type == "CALL"):
if call.type == "CALL":
logging.info("[EXTERNAL_CALLS] Call to: %s, value = %s, gas = %s" % (str(call.to), str(call.value), str(call.gas)))
logging.info(
"[EXTERNAL_CALLS] Call to: %s, value = %s, gas = %s"
% (str(call.to), str(call.value), str(call.gas))
)
if (call.to.type == VarType.SYMBOLIC and (call.gas.type == VarType.CONCRETE and call.gas.val > 2300) or (call.gas.type == VarType.SYMBOLIC and "2300" not in str(call.gas))):
if (
call.to.type == VarType.SYMBOLIC
and (call.gas.type == VarType.CONCRETE and call.gas.val > 2300)
or (call.gas.type == VarType.SYMBOLIC and "2300" not in str(call.gas))
):
description = "This contract executes a message call to "
target = str(call.to)
user_supplied = False
if ("calldata" in target or "caller" in target):
if "calldata" in target or "caller" in target:
if ("calldata" in target):
if "calldata" in target:
description += "an address provided as a function argument. "
else:
description += "the address of the transaction sender. "
user_supplied = True
else:
m = re.search(r'storage_([a-z0-9_&^]+)', str(call.to))
m = re.search(r"storage_([a-z0-9_&^]+)", str(call.to))
if (m):
if m:
idx = m.group(1)
func = statespace.find_storage_write(state.environment.active_account.address, idx)
func = statespace.find_storage_write(
state.environment.active_account.address, idx
)
if func:
description += \
"an address found at storage slot " + str(idx) + ". " + \
"This storage slot can be written to by calling the function `" + func + "`. "
description += (
"an address found at storage slot "
+ str(idx)
+ ". "
+ "This storage slot can be written to by calling the function `"
+ func
+ "`. "
)
user_supplied = True
if user_supplied:
description += "Generally, it is not recommended to call user-supplied addresses using Solidity's call() construct. Note that attackers might leverage reentrancy attacks to exploit race conditions or manipulate this contract's state."
issue = Issue(call.node.contract_name, call.node.function_name, address, "Message call to external contract", "Warning", description)
description += (
"Generally, it is not recommended to call user-supplied addresses using Solidity's call() construct. "
"Note that attackers might leverage reentrancy attacks to exploit race conditions or manipulate this contract's state."
)
issue = Issue(
contract=call.node.contract_name,
function_name=call.node.function_name,
address=address,
title="Message call to external contract",
_type="Warning",
description=description,
bytecode=state.environment.code.bytecode,
swc_id=REENTRANCY,
)
else:
description += "to another contract. Make sure that the called contract is trusted and does not execute user-supplied code."
issue = Issue(call.node.contract_name, call.node.function_name, address, "Message call to external contract", "Informational", description)
issue = Issue(
contract=call.node.contract_name,
function_name=call.node.function_name,
address=address,
title="Message call to external contract",
_type="Informational",
description=description,
bytecode=state.environment.code.bytecode,
swc_id=REENTRANCY,
)
issues.append(issue)
if address not in calls_visited:
calls_visited.append(address)
logging.debug("[EXTERNAL_CALLS] Checking for state changes starting from " + call.node.function_name)
logging.debug(
"[EXTERNAL_CALLS] Checking for state changes starting from "
+ call.node.function_name
)
# Check for SSTORE in remaining instructions in current node & nodes down the CFG
state_change_addresses = search_children(statespace, call.node, call.state_index + 1, depth=0, results=[])
logging.debug("[EXTERNAL_CALLS] Detected state changes at addresses: " + str(state_change_addresses))
if (len(state_change_addresses)):
state_change_addresses = search_children(
statespace,
call.node,
call.state.current_transaction.id,
call.state_index + 1,
depth=0,
results=[],
)
logging.debug(
"[EXTERNAL_CALLS] Detected state changes at addresses: "
+ str(state_change_addresses)
)
if len(state_change_addresses):
for address in state_change_addresses:
description = "The contract account state is changed after an external call. Consider that the called contract could re-enter the function before this state change takes place. This can lead to business logic vulnerabilities."
issue = Issue(call.node.contract_name, call.node.function_name, address, "State change after external call", "Warning", description)
description = (
"The contract account state is changed after an external call. "
"Consider that the called contract could re-enter the function before this "
"state change takes place. This can lead to business logic vulnerabilities."
)
issue = Issue(
contract=call.node.contract_name,
function_name=call.node.function_name,
address=address,
title="State change after external call",
_type="Warning",
description=description,
bytecode=state.environment.code.bytecode,
swc_id=REENTRANCY,
)
issues.append(issue)
return issues

@ -2,19 +2,21 @@ from z3 import *
from mythril.analysis import solver
from mythril.analysis.ops import *
from mythril.analysis.report import Issue
from mythril.analysis.swc_data import INTEGER_OVERFLOW_AND_UNDERFLOW
from mythril.exceptions import UnsatError
from mythril.laser.ethereum.taint_analysis import TaintRunner
import re
import copy
import logging
'''
"""
MODULE DESCRIPTION:
Check for integer underflows.
For every SUB instruction, check if there's a possible state where op1 > op0.
For every ADD, MUL instruction, check if there's a possible state where op1 + op0 > 2^32 - 1
'''
"""
def execute(statespace):
"""
@ -48,7 +50,7 @@ def _check_integer_overflow(statespace, state, node):
# Check the instruction
instruction = state.get_current_instruction()
if instruction['opcode'] not in ("ADD", "MUL"):
if instruction["opcode"] not in ("ADD", "MUL"):
return issues
# Formulate overflow constraints
@ -68,39 +70,55 @@ def _check_integer_overflow(statespace, state, node):
op1 = BitVecVal(op1, 256)
# Formulate expression
if instruction['opcode'] == "ADD":
if instruction["opcode"] == "ADD":
expr = op0 + op1
# constraint = Not(BVAddNoOverflow(op0, op1, signed=False))
else:
expr = op1 * op0
# constraint = Not(BVMulNoOverflow(op0, op1, signed=False))
# Check satisfiable
constraint = Or(And(ULT(expr, op0), op1 != 0), And(ULT(expr, op1), op0 != 0))
# Check satisfiable
model = _try_constraints(node.constraints, [constraint])
if model is None:
logging.debug("[INTEGER_OVERFLOW] no model found")
return issues
if not _verify_integer_overflow(statespace, node, expr, state, model, constraint, op0, op1):
if not _verify_integer_overflow(
statespace, node, expr, state, model, constraint, op0, op1
):
return issues
# Build issue
issue = Issue(node.contract_name, node.function_name, instruction['address'], "Integer Overflow", "Warning")
issue.description = "A possible integer overflow exists in the function `{}`.\n" \
"The addition or multiplication may result in a value higher than the maximum representable integer.".format(
node.function_name)
issue.debug = solver.pretty_print_model(model)
issue = Issue(
contract=node.contract_name,
function_name=node.function_name,
address=instruction["address"],
swc_id=INTEGER_OVERFLOW_AND_UNDERFLOW,
bytecode=state.environment.code.bytecode,
title="Integer Overflow",
_type="Warning",
)
issue.description = "The arithmetic operation can result in integer overflow.\n"
issue.debug = "Transaction Sequence: " + str(
solver.get_transaction_sequence(state, node.constraints)
)
issues.append(issue)
return issues
def _verify_integer_overflow(statespace, node, expr, state, model, constraint, op0, op1):
def _verify_integer_overflow(
statespace, node, expr, state, model, constraint, op0, op1
):
""" Verifies existence of integer overflow """
# If we get to this point then there has been an integer overflow
# Find out if the overflowed value is actually used
interesting_usages = _search_children(statespace, node, expr, constraint=[constraint], index=node.states.index(state))
interesting_usages = _search_children(
statespace, node, expr, constraint=[constraint], index=node.states.index(state)
)
# Stop if it isn't
if len(interesting_usages) == 0:
@ -108,16 +126,14 @@ def _verify_integer_overflow(statespace, node, expr, state, model, constraint, o
return _try_constraints(node.constraints, [Not(constraint)]) is not None
def _try_constraints(constraints, new_constraints):
"""
Tries new constraints
:return Model if satisfiable otherwise None
"""
_constraints = copy.deepcopy(constraints)
for constraint in new_constraints:
_constraints.append(copy.deepcopy(constraint))
try:
model = solver.get_model(_constraints)
model = solver.get_model(constraints + new_constraints)
return model
except UnsatError:
return None
@ -132,7 +148,7 @@ def _check_integer_underflow(statespace, state, node):
"""
issues = []
instruction = state.get_current_instruction()
if instruction['opcode'] == "SUB":
if instruction["opcode"] == "SUB":
stack = state.mstate.stack
@ -147,15 +163,22 @@ def _check_integer_underflow(statespace, state, node):
# Pattern 2: (256*If(1 & storage_0 == 0, 1, 0)) - 1, this would underlow if storage_0 = 0
if type(op0) == int and type(op1) == int:
return []
if re.search(r'calldatasize_', str(op0)):
if re.search(r"calldatasize_", str(op0)):
return []
if re.search(r'256\*.*If\(1', str(op0), re.DOTALL) or re.search(r'256\*.*If\(1', str(op1), re.DOTALL):
if re.search(r"256\*.*If\(1", str(op0), re.DOTALL) or re.search(
r"256\*.*If\(1", str(op1), re.DOTALL
):
return []
if re.search(r'32 \+.*calldata', str(op0), re.DOTALL) or re.search(r'32 \+.*calldata', str(op1), re.DOTALL):
if re.search(r"32 \+.*calldata", str(op0), re.DOTALL) or re.search(
r"32 \+.*calldata", str(op1), re.DOTALL
):
return []
logging.debug("[INTEGER_UNDERFLOW] Checking SUB {0}, {1} at address {2}".format(str(op0), str(op1),
str(instruction['address'])))
logging.debug(
"[INTEGER_UNDERFLOW] Checking SUB {0}, {1} at address {2}".format(
str(op0), str(op1), str(instruction["address"])
)
)
allowed_types = [int, BitVecRef, BitVecNumRef]
if type(op0) in allowed_types and type(op1) in allowed_types:
@ -167,19 +190,31 @@ def _check_integer_underflow(statespace, state, node):
# If we get to this point then there has been an integer overflow
# Find out if the overflowed value is actually used
interesting_usages = _search_children(statespace, node, (op0 - op1), index=node.states.index(state))
interesting_usages = _search_children(
statespace, node, (op0 - op1), index=node.states.index(state)
)
# Stop if it isn't
if len(interesting_usages) == 0:
return issues
issue = Issue(node.contract_name, node.function_name, instruction['address'], "Integer Underflow",
"Warning")
issue.description = "A possible integer underflow exists in the function `" + node.function_name + "`.\n" \
"The subtraction may result in a value < 0."
issue.debug = solver.pretty_print_model(model)
issue = Issue(
contract=node.contract_name,
function_name=node.function_name,
address=instruction["address"],
swc_id=INTEGER_OVERFLOW_AND_UNDERFLOW,
bytecode=state.environment.code.bytecode,
title="Integer Underflow",
_type="Warning",
)
issue.description = (
"The subtraction can result in an integer underflow.\n"
)
issue.debug = "Transaction Sequence: " + str(
solver.get_transaction_sequence(state, node.constraints)
)
issues.append(issue)
except UnsatError:
@ -189,29 +224,39 @@ def _check_integer_underflow(statespace, state, node):
def _check_usage(state, taint_result):
"""Delegates checks to _check_{instruction_name}()"""
opcode = state.get_current_instruction()['opcode']
opcode = state.get_current_instruction()["opcode"]
if opcode == 'JUMPI':
if opcode == "JUMPI":
if _check_jumpi(state, taint_result):
return [state]
elif opcode == 'SSTORE':
elif opcode == "SSTORE":
if _check_sstore(state, taint_result):
return [state]
return []
def _check_jumpi(state, taint_result):
""" Check if conditional jump is dependent on the result of expression"""
assert state.get_current_instruction()['opcode'] == 'JUMPI'
assert state.get_current_instruction()["opcode"] == "JUMPI"
return taint_result.check(state, -2)
def _check_sstore(state, taint_result):
""" Check if store operation is dependent on the result of expression"""
assert state.get_current_instruction()['opcode'] == 'SSTORE'
assert state.get_current_instruction()["opcode"] == "SSTORE"
return taint_result.check(state, -2)
def _search_children(statespace, node, expression, taint_result=None, constraint=[], index=0, depth=0, max_depth=64):
def _search_children(
statespace,
node,
expression,
taint_result=None,
constraint=None,
index=0,
depth=0,
max_depth=64,
):
"""
Checks the statespace for children states, with JUMPI or SSTORE instuctions,
for dependency on expression
@ -224,13 +269,18 @@ def _search_children(statespace, node, expression, taint_result=None, constraint
:param max_depth: Max depth to explore
:return: List of states that match the opcodes and are dependent on expression
"""
if constraint is None:
constraint = []
logging.debug("SEARCHING NODE for usage of an overflowed variable %d", node.uid)
if taint_result is None:
state = node.states[index]
taint_stack = [False for _ in state.mstate.stack]
taint_stack[-1] = True
taint_result = TaintRunner.execute(statespace, node, state, initial_stack=taint_stack)
taint_result = TaintRunner.execute(
statespace, node, state, initial_stack=taint_stack
)
results = []
@ -241,17 +291,14 @@ def _search_children(statespace, node, expression, taint_result=None, constraint
for j in range(index, len(node.states)):
current_state = node.states[j]
current_instruction = current_state.get_current_instruction()
if current_instruction['opcode'] in ('JUMPI', 'SSTORE'):
if current_instruction["opcode"] in ("JUMPI", "SSTORE"):
element = _check_usage(current_state, taint_result)
if len(element) < 1:
continue
if _check_requires(element[0], node, statespace, constraint):
continue
results += element
# Recursively search children
children = \
[
children = [
statespace.nodes[edge.node_to]
for edge in statespace.edges
if edge.node_from == node.uid
@ -259,27 +306,13 @@ def _search_children(statespace, node, expression, taint_result=None, constraint
]
for child in children:
results += _search_children(statespace, child, expression, taint_result, depth=depth + 1, max_depth=max_depth)
results += _search_children(
statespace,
child,
expression,
taint_result,
depth=depth + 1,
max_depth=max_depth,
)
return results
def _check_requires(state, node, statespace, constraint):
"""Checks if usage of overflowed statement results in a revert statement"""
instruction = state.get_current_instruction()
if instruction['opcode'] is not "JUMPI":
return False
children = [
statespace.nodes[edge.node_to]
for edge in statespace.edges
if edge.node_from == node.uid
]
for child in children:
opcodes = [s.get_current_instruction()['opcode'] for s in child.states]
if "REVERT" in opcodes or "ASSERT_FAIL" in opcodes:
return True
# I added the following case, bc of false positives if the max depth is not high enough
if len(children) == 0:
return True
return False

@ -1,5 +1,7 @@
from mythril.analysis.report import Issue
from mythril.analysis.swc_data import *
from mythril.laser.ethereum.cfg import JumpType
"""
MODULE DESCRIPTION:
@ -20,17 +22,26 @@ def execute(statespace):
if len(findings) > 0:
node = call.node
instruction = call.state.get_current_instruction()
issue = Issue(node.contract_name, node.function_name, instruction['address'],
"Multiple Calls",
"Informational")
issue = Issue(
contract=node.contract_name,
function_name=node.function_name,
address=instruction["address"],
swc_id=MULTIPLE_SENDS,
bytecode=call.state.environment.code.bytecode,
title="Multiple Calls",
_type="Informational",
)
issue.description = \
"Multiple sends exist in one transaction, try to isolate each external call into its own transaction." \
" As external calls can fail accidentally or deliberately.\nConsecutive calls: \n"
issue.description = (
"Multiple sends are executed in a single transaction. "
"Try to isolate each external call into its own transaction,"
" as external calls can fail accidentally or deliberately.\nConsecutive calls: \n"
)
for finding in findings:
issue.description += \
"Call at address: {}\n".format(finding.state.get_current_instruction()['address'])
issue.description += "Call at address: {}\n".format(
finding.state.get_current_instruction()["address"]
)
issues.append(issue)
return issues
@ -38,21 +49,28 @@ def execute(statespace):
def _explore_nodes(call, statespace):
children = _child_nodes(statespace, call.node)
sending_children = list(filter(lambda call: call.node in children, statespace.calls))
sending_children = list(filter(lambda c: c.node in children, statespace.calls))
return sending_children
def _explore_states(call, statespace):
other_calls = list(
filter(lambda other: other.node == call.node and other.state_index > call.state_index, statespace.calls)
filter(
lambda other: other.node == call.node
and other.state_index > call.state_index,
statespace.calls,
)
)
return other_calls
def _child_nodes(statespace, node):
result = []
children = [statespace.nodes[edge.node_to] for edge in statespace.edges if edge.node_from == node.uid
and edge.type != JumpType.Transaction]
children = [
statespace.nodes[edge.node_to]
for edge in statespace.edges
if edge.node_from == node.uid and edge.type != JumpType.Transaction
]
for child in children:
result.append(child)

@ -1,16 +1,17 @@
from mythril.analysis import solver
from mythril.analysis.ops import *
from mythril.analysis.report import Issue
from mythril.analysis.swc_data import UNPROTECTED_SELFDESTRUCT
from mythril.exceptions import UnsatError
import logging
'''
"""
MODULE DESCRIPTION:
Check for SUICIDE instructions that either can be reached by anyone, or where msg.sender is checked against a tainted storage index
(i.e. there's a write to that index is unconstrained by msg.sender).
'''
"""
def execute(state_space):
@ -32,13 +33,14 @@ def _analyze_state(state, node):
issues = []
instruction = state.get_current_instruction()
if instruction['opcode'] != "SUICIDE":
if instruction["opcode"] != "SUICIDE":
return []
to = state.mstate.stack[-1]
logging.debug("[UNCHECKED_SUICIDE] suicide in function " + node.function_name)
description = "The function `" + node.function_name + "` executes the SUICIDE instruction. "
description = "A reachable SUICIDE instruction was detected. "
if "caller" in str(to):
description += "The remaining Ether is sent to the caller's address.\n"
@ -55,15 +57,33 @@ def _analyze_state(state, node):
if len(state.world_state.transaction_sequence) > 1:
creator = state.world_state.transaction_sequence[0].caller
for transaction in state.world_state.transaction_sequence[1:]:
not_creator_constraints.append(Not(Extract(159, 0, transaction.caller) == Extract(159, 0, creator)))
not_creator_constraints.append(Not(Extract(159, 0, transaction.caller) == 0))
not_creator_constraints.append(
Not(Extract(159, 0, transaction.caller) == Extract(159, 0, creator))
)
not_creator_constraints.append(
Not(Extract(159, 0, transaction.caller) == 0)
)
try:
model = solver.get_model(node.constraints + not_creator_constraints)
debug = "SOLVER OUTPUT:\n" + solver.pretty_print_model(model)
issue = Issue(node.contract_name, node.function_name, instruction['address'], "Unchecked SUICIDE", "Warning", description, debug)
debug = "Transaction Sequence: " + str(
solver.get_transaction_sequence(
state, node.constraints + not_creator_constraints
)
)
issue = Issue(
contract=node.contract_name,
function_name=node.function_name,
address=instruction["address"],
swc_id=UNPROTECTED_SELFDESTRUCT,
bytecode=state.environment.code.bytecode,
title="Unchecked SUICIDE",
_type="Warning",
description=description,
debug=debug,
)
issues.append(issue)
except UnsatError:
logging.debug("[UNCHECKED_SUICIDE] no model found")

@ -1,17 +1,19 @@
import logging
import re
import copy
from mythril.analysis import solver
from mythril.analysis.ops import *
from mythril.analysis.report import Issue
from mythril.analysis.swc_data import TX_ORDER_DEPENDENCE
from mythril.exceptions import UnsatError
'''
"""
MODULE DESCRIPTION:
This module finds the existance of transaction order dependence vulnerabilities.
The following webpage contains an extensive description of the vulnerability:
https://consensys.github.io/smart-contract-best-practices/known_attacks/#transaction-ordering-dependence-tod-front-running
'''
"""
def execute(statespace):
@ -23,20 +25,28 @@ def execute(statespace):
for call in statespace.calls:
# Do analysis
interesting_storages = list(_get_influencing_storages(call))
changing_sstores = list(_get_influencing_sstores(statespace, interesting_storages))
changing_sstores = list(
_get_influencing_sstores(statespace, interesting_storages)
)
# Build issue if necessary
if len(changing_sstores) > 0:
node = call.node
instruction = call.state.get_current_instruction()
issue = Issue(node.contract_name, node.function_name, instruction['address'],
"Transaction order dependence",
"Warning")
issue.description = \
"A possible transaction order independence vulnerability exists in function {}. The value or " \
"direction of the call statement is determined from a tainted storage location"\
.format(node.function_name)
issue = Issue(
contract=node.contract_name,
function_name=node.function_name,
address=instruction["address"],
title="Transaction order dependence",
bytecode=call.state.environment.code.bytecode,
swc_id=TX_ORDER_DEPENDENCE,
_type="Warning",
)
issue.description = (
"Possible transaction order dependence vulnerability: The value or "
"direction of the call statement is determined from a tainted storage location"
)
issues.append(issue)
return issues
@ -65,7 +75,7 @@ def _get_storage_variable(storage, state):
:param state: state to retrieve the variable from
:return: z3 object representing storage
"""
index = int(re.search('[0-9]+', storage).group())
index = int(re.search("[0-9]+", storage).group())
try:
return state.environment.active_account.storage[index]
except KeyError:
@ -85,6 +95,7 @@ def _can_change(constraints, variable):
except AttributeError:
return False
def _get_influencing_storages(call):
""" Examines a Call object and returns an iterator of all storages that influence the call value or direction"""
state = call.state
@ -108,11 +119,11 @@ def _get_influencing_storages(call):
def _get_influencing_sstores(statespace, interesting_storages):
""" Gets sstore (state, node) tuples that write to interesting_storages"""
for sstore_state, node in _get_states_with_opcode(statespace, 'SSTORE'):
for sstore_state, node in _get_states_with_opcode(statespace, "SSTORE"):
index, value = sstore_state.mstate.stack[-1], sstore_state.mstate.stack[-2]
try:
index = util.get_concrete_int(index)
except AttributeError:
except TypeError:
index = str(index)
if "storage_{}".format(index) not in interesting_storages:
continue

@ -1,10 +1,12 @@
from mythril.analysis.report import Issue
from mythril.analysis.swc_data import UNCHECKED_RET_VAL
from mythril.laser.ethereum.svm import NodeFlags
import logging
import re
'''
"""
MODULE DESCRIPTION:
Test whether CALL return value is checked.
@ -20,7 +22,7 @@ For low-level-calls this check is omitted. E.g.:
c.call.value(0)(bytes4(sha3("ping(uint256)")),1);
'''
"""
def execute(statespace):
@ -41,17 +43,28 @@ def execute(statespace):
instr = state.get_current_instruction()
if (instr['opcode'] == 'ISZERO' and re.search(r'retval', str(state.mstate.stack[-1]))):
if instr["opcode"] == "ISZERO" and re.search(
r"retval", str(state.mstate.stack[-1])
):
retval_checked = True
break
if not retval_checked:
address = state.get_current_instruction()['address']
issue = Issue(node.contract_name, node.function_name, address, "Unchecked CALL return value")
issue.description = \
"The return value of an external call is not checked. Note that execution continue even if the called contract throws."
address = state.get_current_instruction()["address"]
issue = Issue(
contract=node.contract_name,
function_name=node.function_name,
address=address,
bytecode=state.environment.code.bytecode,
title="Unchecked CALL return value",
swc_id=UNCHECKED_RET_VAL,
)
issue.description = (
"The return value of an external call is not checked. "
"Note that execution continue even if the called contract throws."
)
issues.append(issue)
@ -59,12 +72,14 @@ def execute(statespace):
n_states = len(node.states)
for idx in range(0, n_states - 1): # Ignore CALLs at last position in a node
for idx in range(
0, n_states - 1
): # Ignore CALLs at last position in a node
state = node.states[idx]
instr = state.get_current_instruction()
if (instr['opcode'] == 'CALL'):
if instr["opcode"] == "CALL":
retval_checked = False
@ -74,7 +89,9 @@ def execute(statespace):
_state = node.states[_idx]
_instr = _state.get_current_instruction()
if (_instr['opcode'] == 'ISZERO' and re.search(r'retval', str(_state .mstate.stack[-1]))):
if _instr["opcode"] == "ISZERO" and re.search(
r"retval", str(_state.mstate.stack[-1])
):
retval_checked = True
break
@ -83,11 +100,20 @@ def execute(statespace):
if not retval_checked:
address = instr['address']
issue = Issue(node.contract_name, node.function_name, address, "Unchecked CALL return value")
issue.description = \
"The return value of an external call is not checked. Note that execution continue even if the called contract throws."
address = instr["address"]
issue = Issue(
contract=node.contract_name,
function_name=node.function_name,
bytecode=state.environment.code.bytecode,
address=address,
title="Unchecked CALL return value",
swc_id=UNCHECKED_RET_VAL,
)
issue.description = (
"The return value of an external call is not checked. "
"Note that execution continue even if the called contract throws."
)
issues.append(issue)

@ -9,7 +9,6 @@ class VarType(Enum):
class Variable:
def __init__(self, val, _type):
self.val = val
self.type = _type
@ -21,12 +20,11 @@ class Variable:
def get_variable(i):
try:
return Variable(util.get_concrete_int(i), VarType.CONCRETE)
except AttributeError:
except TypeError:
return Variable(simplify(i), VarType.SYMBOLIC)
class Op:
def __init__(self, node, state, state_index):
self.node = node
self.state = state
@ -34,8 +32,17 @@ class Op:
class Call(Op):
def __init__(self, node, state, state_index, _type, to, gas, value=Variable(0, VarType.CONCRETE), data=None):
def __init__(
self,
node,
state,
state_index,
_type,
to,
gas,
value=Variable(0, VarType.CONCRETE),
data=None,
):
super().__init__(node, state, state_index)
self.to = to
@ -46,7 +53,6 @@ class Call(Op):
class SStore(Op):
def __init__(self, node, state, state_index, value):
super().__init__(node, state, state_index)
self.value = value

@ -1,47 +1,84 @@
import hashlib
import logging
import json
import operator
from jinja2 import PackageLoader, Environment
import _pysha3 as sha3
import hashlib
class Issue:
def __init__(self, contract, function, address, title, _type="Informational", description="", debug=""):
class Issue:
def __init__(
self,
contract,
function_name,
address,
swc_id,
title,
bytecode,
_type="Informational",
description="",
debug="",
):
self.title = title
self.contract = contract
self.function = function
self.function = function_name
self.address = address
self.description = description
self.type = _type
self.debug = debug
self.swc_id = swc_id
self.filename = None
self.code = None
self.lineno = None
try:
keccak = sha3.keccak_256()
keccak.update(bytes.fromhex(bytecode))
self.bytecode_hash = "0x" + keccak.hexdigest()
except ValueError:
logging.debug(
"Unable to change the bytecode to bytes. Bytecode: {}".format(bytecode)
)
self.bytecode_hash = ""
@property
def as_dict(self):
issue = {'title': self.title, 'contract': self.contract, 'description': self.description, 'function': self.function, 'type': self.type, 'address': self.address, 'debug': self.debug}
issue = {
"title": self.title,
"swc_id": self.swc_id,
"contract": self.contract,
"description": self.description,
"function": self.function,
"type": self.type,
"address": self.address,
"debug": self.debug,
}
if self.filename and self.lineno:
issue['filename'] = self.filename
issue['lineno'] = self.lineno
issue["filename"] = self.filename
issue["lineno"] = self.lineno
if self.code:
issue['code'] = self.code
issue["code"] = self.code
return issue
def add_code_info(self, contract):
if self.address:
codeinfo = contract.get_source_info(self.address, constructor=(self.function == 'constructor'))
codeinfo = contract.get_source_info(
self.address, constructor=(self.function == "constructor")
)
self.filename = codeinfo.filename
self.code = codeinfo.code
self.lineno = codeinfo.lineno
class Report:
environment = Environment(loader=PackageLoader('mythril.analysis'), trim_blocks=True)
environment = Environment(
loader=PackageLoader("mythril.analysis"), trim_blocks=True
)
def __init__(self, verbose=False):
self.issues = {}
@ -50,26 +87,44 @@ class Report:
def sorted_issues(self):
issue_list = [issue.as_dict for key, issue in self.issues.items()]
return sorted(issue_list, key=operator.itemgetter('address', 'title'))
return sorted(issue_list, key=operator.itemgetter("address", "title"))
def append_issue(self, issue):
m = hashlib.md5()
m.update((issue.contract + str(issue.address) + issue.title).encode('utf-8'))
m.update((issue.contract + str(issue.address) + issue.title).encode("utf-8"))
self.issues[m.digest()] = issue
def as_text(self):
name = self._file_name()
template = Report.environment.get_template('report_as_text.jinja2')
return template.render(filename=name, issues=self.sorted_issues(), verbose=self.verbose)
template = Report.environment.get_template("report_as_text.jinja2")
return template.render(
filename=name, issues=self.sorted_issues(), verbose=self.verbose
)
def as_json(self):
result = {'success': True, 'error': None, 'issues': self.sorted_issues()}
result = {"success": True, "error": None, "issues": self.sorted_issues()}
return json.dumps(result, sort_keys=True)
def as_swc_standard_format(self):
""" Format defined for integration and correlation"""
result = {
"issues": [
{
"swc-id": "SWC-{}".format(issue.swc_id),
"bytecodeOffset": issue.address,
"codeHash": issue.bytecode_hash,
}
for issue in self.issues.values()
]
}
return json.dumps(result, sort_keys=True)
def as_markdown(self):
filename = self._file_name()
template = Report.environment.get_template('report_as_markdown.jinja2')
return template.render(filename=filename, issues=self.sorted_issues(), verbose=self.verbose)
template = Report.environment.get_template("report_as_markdown.jinja2")
return template.render(
filename=filename, issues=self.sorted_issues(), verbose=self.verbose
)
def _file_name(self):
if len(self.issues.values()) > 0:

@ -1,5 +1,8 @@
from z3 import Solver, simplify, sat, unknown
from z3 import Solver, simplify, sat, unknown, FuncInterp, UGE
from mythril.exceptions import UnsatError
from mythril.laser.ethereum.transaction.transaction_models import (
ContractCreationTransaction,
)
import logging
@ -22,12 +25,88 @@ def pretty_print_model(model):
ret = ""
for d in model.decls():
if type(model[d]) == FuncInterp:
condition = model[d].as_list()
ret += "%s: %s\n" % (d.name(), condition)
continue
try:
condition = "0x%x" % model[d].as_long()
except:
condition = str(simplify(model[d]))
ret += ("%s: %s\n" % (d.name(), condition))
ret += "%s: %s\n" % (d.name(), condition)
return ret
def get_transaction_sequence(global_state, constraints):
"""
Generate concrete transaction sequence
:param global_state: GlobalState to generate transaction sequence for
:param constraints: list of constraints used to generate transaction sequence
:param caller: address of caller
:param max_callvalue: maximum callvalue for a transaction
"""
transaction_sequence = global_state.world_state.transaction_sequence
# gaslimit & gasprice don't exist yet
tx_template = {
"calldata": None,
"call_value": None,
"caller": "0xCA11EDEADBEEF37E636E6CA11EDEADBEEFCA11ED",
}
txs = {}
creation_tx_ids = []
tx_constraints = constraints.copy()
for transaction in transaction_sequence:
tx_id = str(transaction.id)
if not isinstance(transaction, ContractCreationTransaction):
# Constrain calldatasize
max_calldatasize = 5000
if max_calldatasize != None:
tx_constraints.append(
UGE(max_calldatasize, transaction.call_data.calldatasize)
)
txs[tx_id] = tx_template.copy()
else:
creation_tx_ids.append(tx_id)
model = get_model(tx_constraints)
for transaction in transaction_sequence:
if not isinstance(transaction, ContractCreationTransaction):
tx_id = str(transaction.id)
txs[tx_id]["calldata"] = "0x" + "".join(
[
hex(b)[2:] if len(hex(b)) % 2 == 0 else "0" + hex(b)[2:]
for b in transaction.call_data.concretized(model)
]
)
for d in model.decls():
name = d.name()
if "call_value" in name:
tx_id = name.replace("call_value", "")
if not tx_id in creation_tx_ids:
call_value = "0x%x" % model[d].as_long()
txs[tx_id]["call_value"] = call_value
if "caller" in name:
# caller is 'creator' for creation transactions
tx_id = name.replace("caller", "")
caller = "0x" + ("%x" % model[d].as_long()).zfill(64)
txs[tx_id]["caller"] = caller
return txs

@ -0,0 +1,27 @@
DEFAULT_FUNCTION_VISIBILITY = "100"
INTEGER_OVERFLOW_AND_UNDERFLOW = "101"
OUTDATED_COMPILER_VERSION = "102"
FLOATING_PRAGMA = "103"
UNCHECKED_RET_VAL = "104"
UNPROTECTED_ETHER_WITHDRAWAL = "105"
UNPROTECTED_SELFDESTRUCT = "106"
REENTRANCY = "107"
DEFAULT_STATE_VARIABLE_VISIBILITY = "108"
UNINITIALIZED_STORAGE_POINTER = "109"
ASSERT_VIOLATION = "110"
DEPRICATED_FUNCTIONS_USAGE = "111"
DELEGATECALL_TO_UNTRUSTED_CONTRACT = "112"
MULTIPLE_SENDS = "113"
TX_ORDER_DEPENDENCE = "114"
TX_ORIGIN_USAGE = "115"
TIMESTAMP_DEPENDENCE = "116"
# TODO: SWC ID 116 is missing, Add it if it's added to the https://github.com/SmartContractSecurity/SWC-registry
INCORRECT_CONSTRUCTOR_NAME = "118"
SHADOWING_STATE_VARIABLES = "119"
WEAK_RANDOMNESS = "120"
SIGNATURE_REPLAY = "121"
IMPROPER_VERIFICATION_BASED_ON_MSG_SENDER = "122"
PREDICTABLE_VARS_DEPENDENCE = (
"N/A"
) # TODO: Add the swc id when this is added to the SWC Registry

@ -4,36 +4,66 @@ from mythril.ether.soliditycontract import SolidityContract
import copy
import logging
from .ops import get_variable, SStore, Call, VarType
from mythril.laser.ethereum.strategy.basic import DepthFirstSearchStrategy, BreadthFirstSearchStrategy
from mythril.laser.ethereum.strategy.basic import (
DepthFirstSearchStrategy,
BreadthFirstSearchStrategy,
ReturnRandomNaivelyStrategy,
ReturnWeightedRandomStrategy,
)
class SymExecWrapper:
'''
"""
Wrapper class for the LASER Symbolic virtual machine. Symbolically executes the code and does a bit of pre-analysis for convenience.
'''
def __init__(self, contract, address, strategy, dynloader=None, max_depth=22,
execution_timeout=None, create_timeout=None):
s_strategy = None
if strategy == 'dfs':
"""
def __init__(
self,
contract,
address,
strategy,
dynloader=None,
max_depth=22,
execution_timeout=None,
create_timeout=None,
max_transaction_count=3,
):
if strategy == "dfs":
s_strategy = DepthFirstSearchStrategy
elif strategy == 'bfs':
elif strategy == "bfs":
s_strategy = BreadthFirstSearchStrategy
elif strategy == "naive-random":
s_strategy = ReturnRandomNaivelyStrategy
elif strategy == "weighted-random":
s_strategy = ReturnWeightedRandomStrategy
else:
raise ValueError("Invalid strategy argument supplied")
account = Account(address, contract.disassembly, dynamic_loader=dynloader, contract_name=contract.name)
account = Account(
address,
contract.disassembly,
dynamic_loader=dynloader,
contract_name=contract.name,
)
self.accounts = {address: account}
self.laser = svm.LaserEVM(self.accounts, dynamic_loader=dynloader, max_depth=max_depth,
execution_timeout=execution_timeout, strategy=s_strategy,
create_timeout=create_timeout)
self.laser = svm.LaserEVM(
self.accounts,
dynamic_loader=dynloader,
max_depth=max_depth,
execution_timeout=execution_timeout,
strategy=s_strategy,
create_timeout=create_timeout,
max_transaction_count=max_transaction_count,
)
if isinstance(contract, SolidityContract):
self.laser.sym_exec(creation_code=contract.creation_code, contract_name=contract.name)
self.laser.sym_exec(
creation_code=contract.creation_code, contract_name=contract.name
)
else:
self.laser.sym_exec(address)
@ -53,31 +83,72 @@ class SymExecWrapper:
instruction = state.get_current_instruction()
op = instruction['opcode']
op = instruction["opcode"]
if op in ('CALL', 'CALLCODE', 'DELEGATECALL', 'STATICCALL'):
if op in ("CALL", "CALLCODE", "DELEGATECALL", "STATICCALL"):
stack = state.mstate.stack
if op in ('CALL', 'CALLCODE'):
gas, to, value, meminstart, meminsz, memoutstart, memoutsz = \
get_variable(stack[-1]), get_variable(stack[-2]), get_variable(stack[-3]), get_variable(stack[-4]), get_variable(stack[-5]), get_variable(stack[-6]), get_variable(stack[-7])
if op in ("CALL", "CALLCODE"):
gas, to, value, meminstart, meminsz, memoutstart, memoutsz = (
get_variable(stack[-1]),
get_variable(stack[-2]),
get_variable(stack[-3]),
get_variable(stack[-4]),
get_variable(stack[-5]),
get_variable(stack[-6]),
get_variable(stack[-7]),
)
if to.type == VarType.CONCRETE and to.val < 5:
# ignore prebuilts
continue
if (meminstart.type == VarType.CONCRETE and meminsz.type == VarType.CONCRETE):
self.calls.append(Call(self.nodes[key], state, state_index, op, to, gas, value, state.mstate.memory[meminstart.val:meminsz.val * 4]))
if (
meminstart.type == VarType.CONCRETE
and meminsz.type == VarType.CONCRETE
):
self.calls.append(
Call(
self.nodes[key],
state,
state_index,
op,
to,
gas,
value,
state.mstate.memory[
meminstart.val : meminsz.val * 4
],
)
)
else:
self.calls.append(Call(self.nodes[key], state, state_index, op, to, gas, value))
self.calls.append(
Call(
self.nodes[key],
state,
state_index,
op,
to,
gas,
value,
)
)
else:
gas, to, meminstart, meminsz, memoutstart, memoutsz = \
get_variable(stack[-1]), get_variable(stack[-2]), get_variable(stack[-3]), get_variable(stack[-4]), get_variable(stack[-5]), get_variable(stack[-6])
self.calls.append(Call(self.nodes[key], state, state_index, op, to, gas))
elif op == 'SSTORE':
gas, to, meminstart, meminsz, memoutstart, memoutsz = (
get_variable(stack[-1]),
get_variable(stack[-2]),
get_variable(stack[-3]),
get_variable(stack[-4]),
get_variable(stack[-5]),
get_variable(stack[-6]),
)
self.calls.append(
Call(self.nodes[key], state, state_index, op, to, gas)
)
elif op == "SSTORE":
stack = copy.deepcopy(state.mstate.stack)
address = state.environment.active_account.address
@ -89,9 +160,13 @@ class SymExecWrapper:
self.sstors[address] = {}
try:
self.sstors[address][str(index)].append(SStore(self.nodes[key], state, state_index, value))
self.sstors[address][str(index)].append(
SStore(self.nodes[key], state, state_index, value)
)
except KeyError:
self.sstors[address][str(index)] = [SStore(self.nodes[key], state, state_index, value)]
self.sstors[address][str(index)] = [
SStore(self.nodes[key], state, state_index, value)
]
state_index += 1
@ -105,7 +180,7 @@ class SymExecWrapper:
taint = True
for constraint in s.node.constraints:
if ("caller" in str(constraint)):
if "caller" in str(constraint):
taint = False
break

@ -1,7 +1,7 @@
<!DOCTYPE html>
<html>
<head>
<title> Laser - Call Graph</title>
<title>Call Graph</title>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.css" integrity="sha256-iq5ygGJ7021Pi7H5S+QAUXCPUfaBzfqeplbg/KlEssg=" crossorigin="anonymous" />
<script src="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.js" integrity="sha256-JuQeAGbk9rG/EoRMixuy5X8syzICcvB0dj3KindZkY0=" crossorigin="anonymous"></script>

@ -3,7 +3,7 @@
{% for issue in issues %}
## {{ issue.title }}
- SWC ID: {{ issue.swc_id }}
- Type: {{ issue.type }}
- Contract: {{ issue.contract | default("Unknown") }}
- Function name: `{{ issue.function }}`

@ -1,6 +1,7 @@
{% if issues %}
{% for issue in issues %}
==== {{ issue.title }} ====
SWC ID: {{ issue.swc_id }}
Type: {{ issue.type }}
Contract: {{ issue.contract | default("Unknown") }}
Function name: {{ issue.function }}

@ -3,18 +3,50 @@ from mythril.laser.ethereum.svm import NodeFlags
import re
colors = [
{'border': '#26996f', 'background': '#2f7e5b', 'highlight': {'border': '#fff', 'background': '#28a16f'}},
{'border': '#9e42b3', 'background': '#842899', 'highlight': {'border': '#fff', 'background': '#933da6'}},
{'border': '#b82323', 'background': '#991d1d', 'highlight': {'border': '#fff', 'background': '#a61f1f'}},
{'border': '#4753bf', 'background': '#3b46a1', 'highlight': {'border': '#fff', 'background': '#424db3'}},
{'border': '#26996f', 'background': '#2f7e5b', 'highlight': {'border': '#fff', 'background': '#28a16f'}},
{'border': '#9e42b3', 'background': '#842899', 'highlight': {'border': '#fff', 'background': '#933da6'}},
{'border': '#b82323', 'background': '#991d1d', 'highlight': {'border': '#fff', 'background': '#a61f1f'}},
{'border': '#4753bf', 'background': '#3b46a1', 'highlight': {'border': '#fff', 'background': '#424db3'}},
{
"border": "#26996f",
"background": "#2f7e5b",
"highlight": {"border": "#fff", "background": "#28a16f"},
},
{
"border": "#9e42b3",
"background": "#842899",
"highlight": {"border": "#fff", "background": "#933da6"},
},
{
"border": "#b82323",
"background": "#991d1d",
"highlight": {"border": "#fff", "background": "#a61f1f"},
},
{
"border": "#4753bf",
"background": "#3b46a1",
"highlight": {"border": "#fff", "background": "#424db3"},
},
{
"border": "#26996f",
"background": "#2f7e5b",
"highlight": {"border": "#fff", "background": "#28a16f"},
},
{
"border": "#9e42b3",
"background": "#842899",
"highlight": {"border": "#fff", "background": "#933da6"},
},
{
"border": "#b82323",
"background": "#991d1d",
"highlight": {"border": "#fff", "background": "#a61f1f"},
},
{
"border": "#4753bf",
"background": "#3b46a1",
"highlight": {"border": "#fff", "background": "#424db3"},
},
]
def get_serializable_statespace(statespace):
def get_serializable_statespace(statespace):
nodes = []
edges = []
@ -28,7 +60,7 @@ def get_serializable_statespace(statespace):
node = statespace.nodes[node_key]
code = node.get_cfg_dict()['code']
code = node.get_cfg_dict()["code"]
code = re.sub("([0-9a-f]{8})[0-9a-f]+", lambda m: m.group(1) + "(...)", code)
if NodeFlags.FUNC_ENTRY in node.flags:
@ -36,52 +68,56 @@ def get_serializable_statespace(statespace):
code_split = code.split("\\n")
truncated_code = code if (len(code_split) < 7) else "\\n".join(code_split[:6]) + "\\n(click to expand +)"
truncated_code = (
code
if (len(code_split) < 7)
else "\\n".join(code_split[:6]) + "\\n(click to expand +)"
)
color = color_map[node.get_cfg_dict()['contract_name']]
color = color_map[node.get_cfg_dict()["contract_name"]]
def get_state_accounts(state):
def get_state_accounts(node_state):
state_accounts = []
for key in state.accounts:
account = state.accounts[key].as_dict
account.pop('code', None)
account['balance'] = str(account['balance'])
for key in node_state.accounts:
account = node_state.accounts[key].as_dict
account.pop("code", None)
account["balance"] = str(account["balance"])
storage = {}
for storage_key in account['storage']:
storage[str(storage_key)] = str(account['storage'][storage_key])
for storage_key in account["storage"]:
storage[str(storage_key)] = str(account["storage"][storage_key])
state_accounts.append({
'address': key,
'storage': storage
})
state_accounts.append({"address": key, "storage": storage})
return state_accounts
states = [{'machine': x.mstate.as_dict, 'accounts': get_state_accounts(x)} for x in node.states]
states = [
{"machine": x.mstate.as_dict, "accounts": get_state_accounts(x)}
for x in node.states
]
for state in states:
state['machine']['stack'] = [str(s) for s in state['machine']['stack']]
state['machine']['memory'] = [str(m) for m in state['machine']['memory']]
state["machine"]["stack"] = [str(s) for s in state["machine"]["stack"]]
state["machine"]["memory"] = [str(m) for m in state["machine"]["memory"]]
truncated_code = truncated_code.replace('\\n', '\n')
code = code.replace('\\n', '\n')
truncated_code = truncated_code.replace("\\n", "\n")
code = code.replace("\\n", "\n")
s_node = {
'id': str(node_key),
'func': str(node.function_name),
'label': truncated_code,
'code': code,
'truncated': truncated_code,
'states': states,
'color': color,
'instructions': code.split('\n')
"id": str(node_key),
"func": str(node.function_name),
"label": truncated_code,
"code": code,
"truncated": truncated_code,
"states": states,
"color": color,
"instructions": code.split("\n"),
}
nodes.append(s_node)
for edge in statespace.edges:
if (edge.condition is None):
if edge.condition is None:
label = ""
else:
@ -90,20 +126,19 @@ def get_serializable_statespace(statespace):
except Z3Exception:
label = str(edge.condition).replace("\n", "")
label = re.sub("([^_])([\d]{2}\d+)", lambda m: m.group(1) + hex(int(m.group(2))), label)
label = re.sub(
"([^_])([\d]{2}\d+)", lambda m: m.group(1) + hex(int(m.group(2))), label
)
code = re.sub("([0-9a-f]{8})[0-9a-f]+", lambda m: m.group(1) + "(...)", code)
s_edge = {
'from': str(edge.as_dict['from']),
'to': str(edge.as_dict['to']),
'arrows': 'to',
'label': label,
'smooth': { 'type': "cubicBezier" }
"from": str(edge.as_dict["from"]),
"to": str(edge.as_dict["to"]),
"arrows": "to",
"label": label,
"smooth": {"type": "cubicBezier"},
}
edges.append(s_edge)
return {
'edges': edges,
'nodes': nodes
}
return {"edges": edges, "nodes": nodes}

@ -0,0 +1,107 @@
import re
from collections import Generator
from ethereum.opcodes import opcodes
regex_PUSH = re.compile("^PUSH(\d*)$")
# Additional mnemonic to catch failed assertions
opcodes[254] = ["ASSERT_FAIL", 0, 0, 0]
class EvmInstruction:
""" Model to hold the information of the disassembly """
def __init__(self, address, op_code, argument=None):
self.address = address
self.op_code = op_code
self.argument = argument
def to_dict(self) -> dict:
result = {"address": self.address, "opcode": self.op_code}
if self.argument:
result["argument"] = self.argument
return result
def instruction_list_to_easm(instruction_list: list) -> str:
result = ""
for instruction in instruction_list:
result += "{} {}".format(instruction["address"], instruction["opcode"])
if "argument" in instruction:
result += " " + instruction["argument"]
result += "\n"
return result
def get_opcode_from_name(operation_name: str) -> int:
for op_code, value in opcodes.items():
if operation_name == value[0]:
return op_code
raise RuntimeError("Unknown opcode")
def find_op_code_sequence(pattern: list, instruction_list: list) -> Generator:
"""
Returns all indices in instruction_list that point to instruction sequences following a pattern
:param pattern: The pattern to look for.
Example: [["PUSH1", "PUSH2"], ["EQ"]] where ["PUSH1", "EQ"] satisfies the pattern
:param instruction_list: List of instructions to look in
:return: Indices to the instruction sequences
"""
for i in range(0, len(instruction_list) - len(pattern) + 1):
if is_sequence_match(pattern, instruction_list, i):
yield i
def is_sequence_match(pattern: list, instruction_list: list, index: int) -> bool:
"""
Checks if the instructions starting at index follow a pattern
:param pattern: List of lists describing a pattern.
Example: [["PUSH1", "PUSH2"], ["EQ"]] where ["PUSH1", "EQ"] satisfies the pattern
:param instruction_list: List of instructions
:param index: Index to check for
:return: Pattern matched
"""
for index, pattern_slot in enumerate(pattern, start=index):
try:
if not instruction_list[index]["opcode"] in pattern_slot:
return False
except IndexError:
return False
return True
def disassemble(bytecode: str) -> list:
"""Disassembles evm bytecode and returns a list of instructions"""
instruction_list = []
address = 0
length = len(bytecode)
if "bzzr" in str(bytecode[-43:]):
# ignore swarm hash
length -= 43
while address < length:
try:
op_code = opcodes[bytecode[address]]
except KeyError:
instruction_list.append(EvmInstruction(address, "INVALID"))
address += 1
continue
op_code_name = op_code[0]
current_instruction = EvmInstruction(address, op_code_name)
match = re.search(regex_PUSH, op_code_name)
if match:
argument_bytes = bytecode[address + 1 : address + 1 + int(match.group(1))]
current_instruction.argument = "0x" + argument_bytes.hex()
address += int(match.group(1))
instruction_list.append(current_instruction)
address += 1
# We use a to_dict() here for compatibility reasons
return [element.to_dict() for element in instruction_list]

@ -1,54 +1,96 @@
from mythril.ether import asm, util
from mythril.ether import util
from mythril.disassembler import asm
from mythril.support.signatures import SignatureDb
import logging
class Disassembly(object):
"""
Disassembly class
def __init__(self, code, enable_online_lookup=True):
Stores bytecode, and its disassembly.
Additionally it will gather the following information on the existing functions in the disassembled code:
- function hashes
- function name to entry point mapping
- function entry point to function name mapping
"""
def __init__(self, code: str, enable_online_lookup: bool = False):
self.bytecode = code
self.instruction_list = asm.disassemble(util.safe_decode(code))
self.func_hashes = []
self.func_to_addr = {}
self.addr_to_func = {}
self.bytecode = code
self.function_name_to_address = {}
self.address_to_function_name = {}
signatures = SignatureDb(enable_online_lookup=enable_online_lookup) # control if you want to have online sighash lookups
signatures = SignatureDb(
enable_online_lookup=enable_online_lookup
) # control if you want to have online signature hash lookups
try:
signatures.open() # open from default locations
except FileNotFoundError:
logging.info("Missing function signature file. Resolving of function names from signature file disabled.")
# Parse jump table & resolve function names
logging.info(
"Missing function signature file. Resolving of function names from signature file disabled."
)
jmptable_indices = asm.find_opcode_sequence(["PUSH4", "EQ"], self.instruction_list)
# Need to take from PUSH1 to PUSH4 because solc seems to remove excess 0s at the beginning for optimizing
jump_table_indices = asm.find_op_code_sequence(
[("PUSH1", "PUSH2", "PUSH3", "PUSH4"), ("EQ",)], self.instruction_list
)
for i in jmptable_indices:
func_hash = self.instruction_list[i]['argument']
self.func_hashes.append(func_hash)
try:
# tries local cache, file and optional online lookup
# may return more than one function signature. since we cannot probe for the correct one we'll use the first
func_names = signatures.get(func_hash)
if len(func_names) > 1:
# ambigious result
func_name = "**ambiguous** %s" % func_names[0] # return first hit but note that result was ambiguous
else:
# only one item
func_name = func_names[0]
except KeyError:
func_name = "_function_" + func_hash
for index in jump_table_indices:
function_hash, jump_target, function_name = get_function_info(
index, self.instruction_list, signatures
)
self.func_hashes.append(function_hash)
try:
offset = self.instruction_list[i + 2]['argument']
jump_target = int(offset, 16)
self.func_to_addr[func_name] = jump_target
self.addr_to_func[jump_target] = func_name
except:
continue
if jump_target is not None and function_name is not None:
self.function_name_to_address[function_name] = jump_target
self.address_to_function_name[jump_target] = function_name
signatures.write() # store resolved signatures (potentially resolved online)
def get_easm(self):
# todo: tintinweb - print funcsig resolved data from self.addr_to_func?
return asm.instruction_list_to_easm(self.instruction_list)
def get_function_info(
index: int, instruction_list: list, signature_database: SignatureDb
) -> (str, int, str):
"""
Finds the function information for a call table entry
Solidity uses the first 4 bytes of the calldata to indicate which function the message call should execute
The generated code that directs execution to the correct function looks like this:
- PUSH function_hash
- EQ
- PUSH entry_point
- JUMPI
This function takes an index that points to the first instruction, and from that finds out the function hash,
function entry and the function name.
:param index: Start of the entry pattern
:param instruction_list: Instruction list for the contract that is being analyzed
:param signature_database: Database used to map function hashes to their respective function names
:return: function hash, function entry point, function name
"""
# Append with missing 0s at the beginning
function_hash = "0x" + instruction_list[index]["argument"][2:].rjust(8, "0")
function_names = signature_database.get(function_hash)
if len(function_names) > 1:
# In this case there was an ambiguous result
function_name = "**ambiguous** {}".format(function_names[0])
elif len(function_names) == 1:
function_name = function_names[0]
else:
function_name = "_function_" + function_hash
try:
offset = instruction_list[index + 2]["argument"]
entry_point = int(offset, 16)
except (KeyError, IndexError):
return function_hash, None, None
return function_hash, entry_point, function_name

@ -1,156 +0,0 @@
import sys
import re
from ethereum.opcodes import opcodes
from mythril.ether import util
regex_PUSH = re.compile('^PUSH(\d*)$')
# Additional mnemonic to catch failed assertions
opcodes[254] = ['ASSERT_FAIL', 0, 0, 0]
def instruction_list_to_easm(instruction_list):
easm = ""
for instruction in instruction_list:
easm += str(instruction['address']) + " " + instruction['opcode']
if 'argument' in instruction:
easm += " " + instruction['argument']
easm += "\n"
return easm
def easm_to_instruction_list(easm):
regex_CODELINE = re.compile('^([A-Z0-9]+)(?:\s+([0-9a-fA-Fx]+))?$')
instruction_list = []
codelines = easm.split('\n')
for line in codelines:
m = re.search(regex_CODELINE, line)
if not m:
# Invalid code line
continue
instruction = {}
instruction['opcode'] = m.group(1)
if m.group(2):
instruction['argument'] = m.group(2)[2:]
instruction_list.append(instruction)
return instruction_list
def get_opcode_from_name(name):
for opcode, value in opcodes.items():
if name == value[0]:
return opcode
raise RuntimeError("Unknown opcode")
def find_opcode_sequence(pattern, instruction_list):
match_indexes = []
pattern_length = len(pattern)
for i in range(0, len(instruction_list) - pattern_length + 1):
if instruction_list[i]['opcode'] == pattern[0]:
matched = True
for j in range(1, len(pattern)):
if not (instruction_list[i + j]['opcode'] == pattern[j]):
matched = False
break
if (matched):
match_indexes.append(i)
return match_indexes
def disassemble(bytecode):
instruction_list = []
addr = 0
length = len(bytecode)
if "bzzr" in str(bytecode[-43:]):
# ignore swarm hash
length -= 43
while addr < length:
instruction = {}
instruction['address'] = addr
try:
if (sys.version_info > (3, 0)):
opcode = opcodes[bytecode[addr]]
else:
opcode = opcodes[ord(bytecode[addr])]
except KeyError:
# invalid opcode
instruction_list.append({'address': addr, 'opcode': "INVALID"})
addr += 1
continue
instruction['opcode'] = opcode[0]
m = re.search(regex_PUSH, opcode[0])
if m:
argument = bytecode[addr+1:addr+1+int(m.group(1))]
instruction['argument'] = "0x" + argument.hex()
addr += int(m.group(1))
instruction_list.append(instruction)
addr += 1
return instruction_list
def assemble(instruction_list):
bytecode = b""
for instruction in instruction_list:
try:
opcode = get_opcode_from_name(instruction['opcode'])
except RuntimeError:
opcode = 0xbb
bytecode += opcode.to_bytes(1, byteorder='big')
if 'argument' in instruction:
bytecode += util.safe_decode(instruction['argument'])
return bytecode

@ -5,29 +5,33 @@ import re
class ETHContract(persistent.Persistent):
def __init__(self, code, creation_code="", name="Unknown", enable_online_lookup=True):
self.creation_code = creation_code
self.name = name
def __init__(
self, code, creation_code="", name="Unknown", enable_online_lookup=False
):
# Workaround: We currently do not support compile-time linking.
# Dynamic contract addresses of the format __[contract-name]_____________ are replaced with a generic address
# Apply this for creation_code & code
code = re.sub(r'(_+.*_+)', 'aa' * 20, code)
creation_code = re.sub(r"(_{2}.{38})", "aa" * 20, creation_code)
code = re.sub(r"(_{2}.{38})", "aa" * 20, code)
self.creation_code = creation_code
self.name = name
self.code = code
self.disassembly = Disassembly(code, enable_online_lookup=enable_online_lookup)
self.creation_disassembly = Disassembly(creation_code, enable_online_lookup=enable_online_lookup)
self.creation_disassembly = Disassembly(
creation_code, enable_online_lookup=enable_online_lookup
)
def as_dict(self):
return {
'address': self.address,
'name': self.name,
'code': self.code,
'creation_code': self.creation_code,
'disassembly': self.disassembly
"address": self.address,
"name": self.name,
"code": self.code,
"creation_code": self.creation_code,
"disassembly": self.disassembly,
}
def get_easm(self):
@ -36,7 +40,7 @@ class ETHContract(persistent.Persistent):
def matches_expression(self, expression):
str_eval = ''
str_eval = ""
easm_code = None
tokens = re.split("\s+(and|or|not)\s+", expression, re.IGNORECASE)
@ -47,23 +51,23 @@ class ETHContract(persistent.Persistent):
str_eval += " " + token + " "
continue
m = re.match(r'^code#([a-zA-Z0-9\s,\[\]]+)#', token)
m = re.match(r"^code#([a-zA-Z0-9\s,\[\]]+)#", token)
if (m):
if m:
if easm_code is None:
easm_code = self.get_easm()
code = m.group(1).replace(",", "\\n")
str_eval += "\"" + code + "\" in easm_code"
str_eval += '"' + code + '" in easm_code'
continue
m = re.match(r'^func#([a-zA-Z0-9\s_,(\\)\[\]]+)#$', token)
m = re.match(r"^func#([a-zA-Z0-9\s_,(\\)\[\]]+)#$", token)
if (m):
if m:
sign_hash = "0x" + utils.sha3(m.group(1))[:4].hex()
str_eval += "\"" + sign_hash + "\" in self.disassembly.func_hashes"
str_eval += '"' + sign_hash + '" in self.disassembly.func_hashes'
continue

@ -8,9 +8,12 @@ import re
def trace(code, calldata=""):
log_handlers = ['eth.vm.op', 'eth.vm.op.stack', 'eth.vm.op.memory', 'eth.vm.op.storage']
log_handlers = [
"eth.vm.op",
"eth.vm.op.stack",
"eth.vm.op.memory",
"eth.vm.op.storage",
]
output = StringIO()
stream_handler = StreamHandler(output)
@ -19,57 +22,44 @@ def trace(code, calldata = ""):
log_vm_op.setLevel("TRACE")
log_vm_op.addHandler(stream_handler)
addr = bytes.fromhex('0123456789ABCDEF0123456789ABCDEF01234567')
addr = bytes.fromhex("0123456789ABCDEF0123456789ABCDEF01234567")
state = State()
ext = messages.VMExt(state, transactions.Transaction(0, 0, 21000, addr, 0, addr))
message = vm.Message(addr, addr, 0, 21000, calldata)
res, gas, dat = vm.vm_execute(ext, message, util.safe_decode(code))
vm.vm_execute(ext, message, util.safe_decode(code))
stream_handler.flush()
ret = output.getvalue()
lines = ret.split("\n")
trace = []
state_trace = []
for line in lines:
m = re.search(r'pc=b\'(\d+)\'.*op=([A-Z0-9]+)', line)
m = re.search(r"pc=b\'(\d+)\'.*op=([A-Z0-9]+)", line)
if m:
pc = m.group(1)
op = m.group(2)
m = re.match(r".*stack=(\[.*?\])", line)
m = re.match(r'.*stack=(\[.*?\])', line)
if (m):
stackitems = re.findall(r'b\'(\d+)\'', m.group(1))
stack = "[";
if (len(stackitems)):
if m:
stackitems = re.findall(r"b\'(\d+)\'", m.group(1))
stack = "["
if len(stackitems):
for i in range(0, len(stackitems) - 1):
stack += hex(int(stackitems[i])) + ", "
stack += hex(int(stackitems[-1]))
stack += "]"
else:
stack = "[]"
if (re.match(r'^PUSH.*', op)):
val = re.search(r'pushvalue=(\d+)', line).group(1)
if re.match(r"^PUSH.*", op):
val = re.search(r"pushvalue=(\d+)", line).group(1)
pushvalue = hex(int(val))
trace.append({'pc': pc, 'op': op, 'stack': stack, 'pushvalue': pushvalue})
state_trace.append(
{"pc": pc, "op": op, "stack": stack, "pushvalue": pushvalue}
)
else:
trace.append({'pc': pc, 'op': op, 'stack': stack})
state_trace.append({"pc": pc, "op": op, "stack": stack})
return trace
return state_trace

@ -5,7 +5,6 @@ from mythril.exceptions import NoContractFoundError
class SourceMapping:
def __init__(self, solidity_file_idx, offset, length, lineno):
self.solidity_file_idx = solidity_file_idx
self.offset = offset
@ -14,38 +13,40 @@ class SourceMapping:
class SolidityFile:
def __init__(self, filename, data):
self.filename = filename
self.data = data
class SourceCodeInfo:
def __init__(self, filename, lineno, code):
self.filename = filename
self.lineno = lineno
self.code = code
def get_contracts_from_file(input_file, solc_args=None):
data = get_solc_json(input_file, solc_args=solc_args)
for key, contract in data['contracts'].items():
def get_contracts_from_file(input_file, solc_args=None, solc_binary="solc"):
data = get_solc_json(input_file, solc_args=solc_args, solc_binary=solc_binary)
for key, contract in data["contracts"].items():
filename, name = key.split(":")
if filename == input_file and len(contract['bin-runtime']):
yield SolidityContract(input_file, name, solc_args)
if filename == input_file and len(contract["bin-runtime"]):
yield SolidityContract(
input_file=input_file,
name=name,
solc_args=solc_args,
solc_binary=solc_binary,
)
class SolidityContract(ETHContract):
def __init__(self, input_file, name=None, solc_args=None, solc_binary="solc"):
def __init__(self, input_file, name=None, solc_args=None):
data = get_solc_json(input_file, solc_args=solc_args)
data = get_solc_json(input_file, solc_args=solc_args, solc_binary=solc_binary)
self.solidity_files = []
for filename in data['sourceList']:
with open(filename, 'r', encoding='utf-8') as file:
for filename in data["sourceList"]:
with open(filename, "r", encoding="utf-8") as file:
code = file.read()
self.solidity_files.append(SolidityFile(filename, code))
@ -55,28 +56,32 @@ class SolidityContract(ETHContract):
srcmap_constructor = []
srcmap = []
if name:
for key, contract in sorted(data['contracts'].items()):
for key, contract in sorted(data["contracts"].items()):
filename, _name = key.split(":")
if filename == input_file and name == _name and len(contract['bin-runtime']):
code = contract['bin-runtime']
creation_code = contract['bin']
srcmap = contract['srcmap-runtime'].split(";")
srcmap_constructor = contract['srcmap'].split(";")
if (
filename == input_file
and name == _name
and len(contract["bin-runtime"])
):
code = contract["bin-runtime"]
creation_code = contract["bin"]
srcmap = contract["srcmap-runtime"].split(";")
srcmap_constructor = contract["srcmap"].split(";")
has_contract = True
break
# If no contract name is specified, get the last bytecode entry for the input file
else:
for key, contract in sorted(data['contracts'].items()):
for key, contract in sorted(data["contracts"].items()):
filename, name = key.split(":")
if filename == input_file and len(contract['bin-runtime']):
code = contract['bin-runtime']
creation_code = contract['bin']
srcmap = contract['srcmap-runtime'].split(";")
srcmap_constructor = contract['srcmap'].split(";")
if filename == input_file and len(contract["bin-runtime"]):
code = contract["bin-runtime"]
creation_code = contract["bin"]
srcmap = contract["srcmap-runtime"].split(";")
srcmap_constructor = contract["srcmap"].split(";")
has_contract = True
if not has_contract:
@ -102,7 +107,9 @@ class SolidityContract(ETHContract):
offset = mappings[index].offset
length = mappings[index].length
code = solidity_file.data.encode('utf-8')[offset:offset + length].decode('utf-8', errors="ignore")
code = solidity_file.data.encode("utf-8")[offset : offset + length].decode(
"utf-8", errors="ignore"
)
lineno = mappings[index].lineno
return SourceCodeInfo(filename, lineno, code)
@ -120,6 +127,11 @@ class SolidityContract(ETHContract):
if len(mapping) > 2 and len(mapping[2]) > 0:
idx = int(mapping[2])
lineno = self.solidity_files[idx].data.encode('utf-8')[0:offset].count('\n'.encode('utf-8')) + 1
lineno = (
self.solidity_files[idx]
.data.encode("utf-8")[0:offset]
.count("\n".encode("utf-8"))
+ 1
)
mappings.append(SourceMapping(idx, offset, length, lineno))

@ -10,7 +10,7 @@ import json
def safe_decode(hex_encoded_string):
if (hex_encoded_string.startswith("0x")):
if hex_encoded_string.startswith("0x"):
return bytes.fromhex(hex_encoded_string[2:])
else:
return bytes.fromhex(hex_encoded_string)
@ -18,10 +18,17 @@ def safe_decode(hex_encoded_string):
def get_solc_json(file, solc_binary="solc", solc_args=None):
cmd = [solc_binary, "--combined-json", "bin,bin-runtime,srcmap,srcmap-runtime", '--allow-paths', "."]
cmd = [solc_binary, "--combined-json", "bin,bin-runtime,srcmap,srcmap-runtime"]
if solc_args:
cmd.extend(solc_args.split(" "))
cmd.extend(solc_args.split())
if not "--allow-paths" in cmd:
cmd.extend(["--allow-paths", "."])
else:
for i, arg in enumerate(cmd):
if arg == "--allow-paths":
cmd[i + 1] += ",."
cmd.append(file)
@ -32,9 +39,14 @@ def get_solc_json(file, solc_binary="solc", solc_args=None):
ret = p.returncode
if ret != 0:
raise CompilerError("Solc experienced a fatal error (code %d).\n\n%s" % (ret, stderr.decode('UTF-8')))
raise CompilerError(
"Solc experienced a fatal error (code %d).\n\n%s"
% (ret, stderr.decode("UTF-8"))
)
except FileNotFoundError:
raise CompilerError("Compiler not found. Make sure that solc is installed and in PATH, or set the SOLC environment variable.")
raise CompilerError(
"Compiler not found. Make sure that solc is installed and in PATH, or set the SOLC environment variable."
)
out = stdout.decode("UTF-8")
@ -52,7 +64,7 @@ def encode_calldata(func_name, arg_types, args):
def get_random_address():
return binascii.b2a_hex(os.urandom(20)).decode('UTF-8')
return binascii.b2a_hex(os.urandom(20)).decode("UTF-8")
def get_indexed_address(index):
@ -60,7 +72,9 @@ def get_indexed_address(index):
def solc_exists(version):
solc_binary = os.path.join(os.environ['HOME'], ".py-solc/solc-v" + version, "bin/solc")
solc_binary = os.path.join(
os.environ["HOME"], ".py-solc/solc-v" + version, "bin/solc"
)
if os.path.exists(solc_binary):
return True
else:

@ -34,25 +34,25 @@ class CountableList(object):
class ReceiptForStorage(rlp.Serializable):
'''
"""
Receipt format stored in levelDB
'''
"""
fields = [
('state_root', binary),
('cumulative_gas_used', big_endian_int),
('bloom', int256),
('tx_hash', hash32),
('contractAddress', address),
('logs', CountableList(Log)),
('gas_used', big_endian_int)
("state_root", binary),
("cumulative_gas_used", big_endian_int),
("bloom", int256),
("tx_hash", hash32),
("contractAddress", address),
("logs", CountableList(Log)),
("gas_used", big_endian_int),
]
class AccountIndexer(object):
'''
"""
Updates address index
'''
"""
def __init__(self, ethDB):
self.db = ethDB
@ -62,32 +62,35 @@ class AccountIndexer(object):
self.updateIfNeeded()
def get_contract_by_hash(self, contract_hash):
'''
get mapped address by its hash, if not found try indexing
'''
address = self.db.reader._get_address_by_hash(contract_hash)
if address is not None:
return address
"""
get mapped contract_address by its hash, if not found try indexing
"""
contract_address = self.db.reader._get_address_by_hash(contract_hash)
if contract_address is not None:
return contract_address
else:
raise AddressNotFoundError
return self.db.reader._get_address_by_hash(contract_hash)
def _process(self, startblock):
'''
"""
Processesing method
'''
logging.debug("Processing blocks %d to %d" % (startblock, startblock + BATCH_SIZE))
"""
logging.debug(
"Processing blocks %d to %d" % (startblock, startblock + BATCH_SIZE)
)
addresses = []
for blockNum in range(startblock, startblock + BATCH_SIZE):
hash = self.db.reader._get_block_hash(blockNum)
if hash is not None:
receipts = self.db.reader._get_block_receipts(hash, blockNum)
block_hash = self.db.reader._get_block_hash(blockNum)
if block_hash is not None:
receipts = self.db.reader._get_block_receipts(block_hash, blockNum)
for receipt in receipts:
if receipt.contractAddress is not None and not all(b == 0 for b in receipt.contractAddress):
if receipt.contractAddress is not None and not all(
b == 0 for b in receipt.contractAddress
):
addresses.append(receipt.contractAddress)
else:
if len(addresses) == 0:
@ -96,9 +99,9 @@ class AccountIndexer(object):
return addresses
def updateIfNeeded(self):
'''
"""
update address index
'''
"""
headBlock = self.db.reader._get_head_block()
if headBlock is not None:
# avoid restarting search if head block is same & we already initialized
@ -113,22 +116,28 @@ class AccountIndexer(object):
# in fast sync head block is at 0 (e.g. in fastSync), we can't use it to determine length
if self.lastBlock is not None and self.lastBlock == 0:
self.lastBlock = 2e+9
self.lastBlock = 2e9
if self.lastBlock is None or (self.lastProcessedBlock is not None and self.lastBlock <= self.lastProcessedBlock):
if self.lastBlock is None or (
self.lastProcessedBlock is not None
and self.lastBlock <= self.lastProcessedBlock
):
return
blockNum = 0
if self.lastProcessedBlock is not None:
blockNum = self.lastProcessedBlock + 1
print("Updating hash-to-address index from block " + str(self.lastProcessedBlock))
print(
"Updating hash-to-address index from block "
+ str(self.lastProcessedBlock)
)
else:
print("Starting hash-to-address index")
count = 0
processed = 0
while (blockNum <= self.lastBlock):
while blockNum <= self.lastBlock:
# leveldb cannot be accessed on multiple processes (not even readonly)
# multithread version performs significantly worse than serial
try:
@ -148,7 +157,10 @@ class AccountIndexer(object):
blockNum = min(blockNum + BATCH_SIZE, self.lastBlock + 1)
cost_time = time.time() - ether.start_time
print("%d blocks processed (in %d seconds), %d unique addresses found, next block: %d" % (processed, cost_time, count, min(self.lastBlock, blockNum)))
print(
"%d blocks processed (in %d seconds), %d unique addresses found, next block: %d"
% (processed, cost_time, count, min(self.lastBlock, blockNum))
)
self.lastProcessedBlock = blockNum - 1
self.db.writer._set_last_indexed_number(self.lastProcessedBlock)

@ -1,48 +1,53 @@
import binascii
import rlp
from mythril.leveldb.accountindexing import CountableList
from mythril.leveldb.accountindexing import ReceiptForStorage, AccountIndexer
from mythril.ethereum.interface.leveldb.accountindexing import CountableList
from mythril.ethereum.interface.leveldb.accountindexing import (
ReceiptForStorage,
AccountIndexer,
)
import logging
from ethereum import utils
from ethereum.block import BlockHeader, Block
from mythril.leveldb.state import State
from mythril.leveldb.eth_db import ETH_DB
from mythril.ethereum.interface.leveldb.state import State
from mythril.ethereum.interface.leveldb.eth_db import ETH_DB
from mythril.ether.ethcontract import ETHContract
from mythril.exceptions import AddressNotFoundError
# Per https://github.com/ethereum/go-ethereum/blob/master/core/rawdb/schema.go
# prefixes and suffixes for keys in geth
header_prefix = b'h' # header_prefix + num (uint64 big endian) + hash -> header
body_prefix = b'b' # body_prefix + num (uint64 big endian) + hash -> block body
num_suffix = b'n' # header_prefix + num (uint64 big endian) + num_suffix -> hash
block_hash_prefix = b'H' # block_hash_prefix + hash -> num (uint64 big endian)
block_receipts_prefix = b'r' # block_receipts_prefix + num (uint64 big endian) + hash -> block receipts
header_prefix = b"h" # header_prefix + num (uint64 big endian) + hash -> header
body_prefix = b"b" # body_prefix + num (uint64 big endian) + hash -> block body
num_suffix = b"n" # header_prefix + num (uint64 big endian) + num_suffix -> hash
block_hash_prefix = b"H" # block_hash_prefix + hash -> num (uint64 big endian)
block_receipts_prefix = (
b"r"
) # block_receipts_prefix + num (uint64 big endian) + hash -> block receipts
# known geth keys
head_header_key = b'LastBlock' # head (latest) header hash
head_header_key = b"LastBlock" # head (latest) header hash
# custom prefixes
address_prefix = b'AM' # address_prefix + hash -> address
address_prefix = b"AM" # address_prefix + hash -> address
# custom keys
address_mapping_head_key = b'accountMapping' # head (latest) number of indexed block
address_mapping_head_key = b"accountMapping" # head (latest) number of indexed block
def _format_block_number(number):
'''
"""
formats block number to uint64 big endian
'''
"""
return utils.zpad(utils.int_to_big_endian(number), 8)
def _encode_hex(v):
'''
"""
encodes hash as hex
'''
return '0x' + utils.encode_hex(v)
"""
return "0x" + utils.encode_hex(v)
class LevelDBReader(object):
'''
"""
level db reading interface, can be used with snapshot
'''
"""
def __init__(self, db):
self.db = db
@ -50,125 +55,119 @@ class LevelDBReader(object):
self.head_state = None
def _get_head_state(self):
'''
"""
gets head state
'''
"""
if not self.head_state:
root = self._get_head_block().state_root
self.head_state = State(self.db, root)
return self.head_state
def _get_account(self, address):
'''
"""
gets account by address
'''
"""
state = self._get_head_state()
account_address = binascii.a2b_hex(utils.remove_0x_head(address))
return state.get_and_cache_account(account_address)
def _get_block_hash(self, number):
'''
"""
gets block hash by block number
'''
"""
num = _format_block_number(number)
hash_key = header_prefix + num + num_suffix
return self.db.get(hash_key)
def _get_head_block(self):
'''
"""
gets head block header
'''
"""
if not self.head_block_header:
hash = self.db.get(head_header_key)
num = self._get_block_number(hash)
self.head_block_header = self._get_block_header(hash, num)
block_hash = self.db.get(head_header_key)
num = self._get_block_number(block_hash)
self.head_block_header = self._get_block_header(block_hash, num)
# find header with valid state
while not self.db.get(self.head_block_header.state_root) and self.head_block_header.prevhash is not None:
hash = self.head_block_header.prevhash
num = self._get_block_number(hash)
self.head_block_header = self._get_block_header(hash, num)
while (
not self.db.get(self.head_block_header.state_root)
and self.head_block_header.prevhash is not None
):
block_hash = self.head_block_header.prevhash
num = self._get_block_number(block_hash)
self.head_block_header = self._get_block_header(block_hash, num)
return self.head_block_header
def _get_block_number(self, hash):
'''
gets block number by hash
'''
number_key = block_hash_prefix + hash
def _get_block_number(self, block_hash):
"""Get block number by its hash"""
number_key = block_hash_prefix + block_hash
return self.db.get(number_key)
def _get_block_header(self, hash, num):
'''
get block header by block header hash & number
'''
header_key = header_prefix + num + hash
def _get_block_header(self, block_hash, num):
"""Get block header by block header hash & number"""
header_key = header_prefix + num + block_hash
block_header_data = self.db.get(header_key)
header = rlp.decode(block_header_data, sedes=BlockHeader)
return header
def _get_address_by_hash(self, hash):
'''
get mapped address by its hash
'''
address_key = address_prefix + hash
def _get_address_by_hash(self, block_hash):
"""Get mapped address by its hash"""
address_key = address_prefix + block_hash
return self.db.get(address_key)
def _get_last_indexed_number(self):
'''
latest indexed block number
'''
"""Get latest indexed block number"""
return self.db.get(address_mapping_head_key)
def _get_block_receipts(self, hash, num):
'''
get block transaction receipts by block header hash & number
'''
def _get_block_receipts(self, block_hash, num):
"""Get block transaction receipts by block header hash & number"""
number = _format_block_number(num)
receipts_key = block_receipts_prefix + number + hash
receipts_key = block_receipts_prefix + number + block_hash
receipts_data = self.db.get(receipts_key)
receipts = rlp.decode(receipts_data, sedes=CountableList(ReceiptForStorage))
return receipts
class LevelDBWriter(object):
'''
"""
level db writing interface
'''
"""
def __init__(self, db):
self.db = db
self.wb = None
def _set_last_indexed_number(self, number):
'''
"""
sets latest indexed block number
'''
"""
return self.db.put(address_mapping_head_key, _format_block_number(number))
def _start_writing(self):
'''
"""
start writing a batch
'''
"""
self.wb = self.db.write_batch()
def _commit_batch(self):
'''
"""
commit batch
'''
"""
self.wb.write()
def _store_account_address(self, address):
'''
"""
get block transaction receipts by block header hash & number
'''
"""
address_key = address_prefix + utils.sha3(address)
self.wb.put(address_key, address)
class EthLevelDB(object):
'''
"""
Go-Ethereum LevelDB client class
'''
"""
def __init__(self, path):
self.path = path
@ -177,9 +176,9 @@ class EthLevelDB(object):
self.writer = LevelDBWriter(self.db)
def get_contracts(self):
'''
"""
iterate through all contracts
'''
"""
for account in self.reader._get_head_state().get_all_accounts():
if account.code is not None:
code = _encode_hex(account.code)
@ -188,9 +187,9 @@ class EthLevelDB(object):
yield contract, account.address, account.balance
def search(self, expression, callback_func):
'''
"""
searches through all contract accounts
'''
"""
cnt = 0
indexer = AccountIndexer(self)
@ -201,11 +200,11 @@ class EthLevelDB(object):
try:
address = _encode_hex(indexer.get_contract_by_hash(address_hash))
except AddressNotFoundError:
'''
"""
The hash->address mapping does not exist in our index. If the index is up-to-date, this likely means
that the contract was created by an internal transaction. Skip this contract as right now we don't
have a good solution for this.
'''
"""
continue
@ -216,28 +215,26 @@ class EthLevelDB(object):
if not cnt % 1000:
logging.info("Searched %d contracts" % cnt)
def contract_hash_to_address(self, hash):
'''
tries to find corresponding account address
'''
def contract_hash_to_address(self, contract_hash):
"""Tries to find corresponding account address"""
address_hash = binascii.a2b_hex(utils.remove_0x_head(hash))
address_hash = binascii.a2b_hex(utils.remove_0x_head(contract_hash))
indexer = AccountIndexer(self)
return _encode_hex(indexer.get_contract_by_hash(address_hash))
def eth_getBlockHeaderByNumber(self, number):
'''
"""
gets block header by block number
'''
hash = self.reader._get_block_hash(number)
"""
block_hash = self.reader._get_block_hash(number)
block_number = _format_block_number(number)
return self.reader._get_block_header(hash, block_number)
return self.reader._get_block_header(block_hash, block_number)
def eth_getBlockByNumber(self, number):
'''
"""
gets block body by block number
'''
"""
block_hash = self.reader._get_block_hash(number)
block_number = _format_block_number(number)
body_key = body_prefix + block_number + block_hash
@ -246,22 +243,24 @@ class EthLevelDB(object):
return body
def eth_getCode(self, address):
'''
"""
gets account code
'''
"""
account = self.reader._get_account(address)
return _encode_hex(account.code)
def eth_getBalance(self, address):
'''
"""
gets account balance
'''
"""
account = self.reader._get_account(address)
return account.balance
def eth_getStorageAt(self, address, position):
'''
"""
gets account storage data at position
'''
"""
account = self.reader._get_account(address)
return _encode_hex(utils.zpad(utils.encode_int(account.get_storage_data(position)), 32))
return _encode_hex(
utils.zpad(utils.encode_int(account.get_storage_data(position)), 32)
)

@ -3,27 +3,27 @@ from ethereum.db import BaseDB
class ETH_DB(BaseDB):
'''
"""
adopts pythereum BaseDB using plyvel
'''
"""
def __init__(self, path):
self.db = plyvel.DB(path)
def get(self, key):
'''
"""
gets value for key
'''
"""
return self.db.get(key)
def put(self, key, value):
'''
"""
puts value for key
'''
"""
self.db.put(key, value)
def write_batch(self):
'''
"""
start writing a batch
'''
"""
return self.db.write_batch()

@ -1,24 +1,39 @@
import rlp
import binascii
from ethereum.utils import normalize_address, hash32, trie_root, \
big_endian_int, address, int256, encode_hex, encode_int, \
big_endian_to_int, int_to_addr, zpad, parse_as_bin, parse_as_int, \
decode_hex, sha3, is_string, is_numeric
from ethereum.utils import (
normalize_address,
hash32,
trie_root,
big_endian_int,
address,
int256,
encode_hex,
encode_int,
big_endian_to_int,
int_to_addr,
zpad,
parse_as_bin,
parse_as_int,
decode_hex,
sha3,
is_string,
is_numeric,
)
from rlp.sedes import big_endian_int, Binary, binary, CountableList
from ethereum import utils
from ethereum import trie
from ethereum.trie import Trie
from ethereum.securetrie import SecureTrie
BLANK_HASH = utils.sha3(b'')
BLANK_ROOT = utils.sha3rlp(b'')
BLANK_HASH = utils.sha3(b"")
BLANK_ROOT = utils.sha3rlp(b"")
STATE_DEFAULTS = {
"txindex": 0,
"gas_used": 0,
"gas_limit": 3141592,
"block_number": 0,
"block_coinbase": '\x00' * 20,
"block_coinbase": "\x00" * 20,
"block_difficulty": 1,
"timestamp": 0,
"logs": [],
@ -32,20 +47,20 @@ STATE_DEFAULTS = {
class Account(rlp.Serializable):
'''
"""
adjusted account from ethereum.state
'''
"""
fields = [
('nonce', big_endian_int),
('balance', big_endian_int),
('storage', trie_root),
('code_hash', hash32)
("nonce", big_endian_int),
("balance", big_endian_int),
("storage", trie_root),
("code_hash", hash32),
]
def __init__(self, nonce, balance, storage, code_hash, db, address):
def __init__(self, nonce, balance, storage, code_hash, db, addr):
self.db = db
self.address = address
self.address = addr
super(Account, self).__init__(nonce, balance, storage, code_hash)
self.storage_cache = {}
self.storage_trie = SecureTrie(Trie(self.db))
@ -57,41 +72,43 @@ class Account(rlp.Serializable):
@property
def code(self):
'''
"""
code rlp data
'''
"""
return self.db.get(self.code_hash)
def get_storage_data(self, key):
'''
"""
get storage data
'''
"""
if key not in self.storage_cache:
v = self.storage_trie.get(utils.encode_int32(key))
self.storage_cache[key] = utils.big_endian_to_int(
rlp.decode(v) if v else b'')
rlp.decode(v) if v else b""
)
return self.storage_cache[key]
@classmethod
def blank_account(cls, db, address, initial_nonce=0):
'''
def blank_account(cls, db, addr, initial_nonce=0):
"""
creates a blank account
'''
db.put(BLANK_HASH, b'')
o = cls(initial_nonce, 0, trie.BLANK_ROOT, BLANK_HASH, db, address)
"""
db.put(BLANK_HASH, b"")
o = cls(initial_nonce, 0, trie.BLANK_ROOT, BLANK_HASH, db, addr)
o.existent_at_start = False
return o
def is_blank(self):
'''
"""
checks if is a blank account
'''
"""
return self.nonce == 0 and self.balance == 0 and self.code_hash == BLANK_HASH
class State():
'''
class State:
"""
adjusted state from ethereum.state
'''
"""
def __init__(self, db, root):
self.db = db
@ -100,29 +117,30 @@ class State():
self.journal = []
self.cache = {}
def get_and_cache_account(self, address):
'''
gets and caches an account for an addres, creates blank if not found
'''
if address in self.cache:
return self.cache[address]
rlpdata = self.secure_trie.get(address)
if rlpdata == trie.BLANK_NODE and len(address) == 32: # support for hashed addresses
rlpdata = self.trie.get(address)
def get_and_cache_account(self, addr):
"""Gets and caches an account for an addres, creates blank if not found"""
if addr in self.cache:
return self.cache[addr]
rlpdata = self.secure_trie.get(addr)
if (
rlpdata == trie.BLANK_NODE and len(addr) == 32
): # support for hashed addresses
rlpdata = self.trie.get(addr)
if rlpdata != trie.BLANK_NODE:
o = rlp.decode(rlpdata, Account, db=self.db, address=address)
o = rlp.decode(rlpdata, Account, db=self.db, address=addr)
else:
o = Account.blank_account(
self.db, address, 0)
self.cache[address] = o
o = Account.blank_account(self.db, addr, 0)
self.cache[addr] = o
o._mutable = True
o._cached_rlp = None
return o
def get_all_accounts(self):
'''
"""
iterates through trie to and yields non-blank leafs as accounts
'''
"""
for address_hash, rlpdata in self.secure_trie.trie.iter_branch():
if rlpdata != trie.BLANK_NODE:
yield rlp.decode(rlpdata, Account, db=self.db, address=address_hash)

@ -1,4 +1,4 @@
from abc import (abstractmethod)
from abc import abstractmethod
from .constants import BLOCK_TAGS, BLOCK_TAG_LATEST
from .utils import hex_to_dec, validate_block
@ -8,76 +8,77 @@ ETH_DEFAULT_RPC_PORT = 8545
PARITY_DEFAULT_RPC_PORT = 8545
PYETHAPP_DEFAULT_RPC_PORT = 4000
MAX_RETRIES = 3
JSON_MEDIA_TYPE = 'application/json'
JSON_MEDIA_TYPE = "application/json"
'''
"""
This code is adapted from: https://github.com/ConsenSys/ethjsonrpc
'''
class BaseClient(object):
"""
class BaseClient(object):
@abstractmethod
def _call(self, method, params=None, _id=1):
pass
def eth_coinbase(self):
'''
"""
https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_coinbase
TESTED
'''
return self._call('eth_coinbase')
"""
return self._call("eth_coinbase")
def eth_blockNumber(self):
'''
"""
https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_blocknumber
TESTED
'''
return hex_to_dec(self._call('eth_blockNumber'))
"""
return hex_to_dec(self._call("eth_blockNumber"))
def eth_getBalance(self, address=None, block=BLOCK_TAG_LATEST):
'''
"""
https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getbalance
TESTED
'''
"""
address = address or self.eth_coinbase()
block = validate_block(block)
return hex_to_dec(self._call('eth_getBalance', [address, block]))
return hex_to_dec(self._call("eth_getBalance", [address, block]))
def eth_getStorageAt(self, address=None, position=0, block=BLOCK_TAG_LATEST):
'''
"""
https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getstorageat
TESTED
'''
"""
block = validate_block(block)
return self._call('eth_getStorageAt', [address, hex(position), block])
return self._call("eth_getStorageAt", [address, hex(position), block])
def eth_getCode(self, address, default_block=BLOCK_TAG_LATEST):
'''
"""
https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getcode
NEEDS TESTING
'''
"""
if isinstance(default_block, str):
if default_block not in BLOCK_TAGS:
raise ValueError
return self._call('eth_getCode', [address, default_block])
return self._call("eth_getCode", [address, default_block])
def eth_getBlockByNumber(self, block=BLOCK_TAG_LATEST, tx_objects=True):
'''
"""
https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getblockbynumber
TESTED
'''
"""
block = validate_block(block)
return self._call('eth_getBlockByNumber', [block, tx_objects])
return self._call("eth_getBlockByNumber", [block, tx_objects])
def eth_getTransactionReceipt(self, tx_hash):
'''
"""
https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_gettransactionreceipt
TESTED
'''
return self._call('eth_getTransactionReceipt', [tx_hash])
"""
return self._call("eth_getTransactionReceipt", [tx_hash])

@ -3,7 +3,12 @@ import logging
import requests
from requests.adapters import HTTPAdapter
from requests.exceptions import ConnectionError as RequestsConnectionError
from .exceptions import (ConnectionError, BadStatusCodeError, BadJsonError, BadResponseError)
from .exceptions import (
ConnectionError,
BadStatusCodeError,
BadJsonError,
BadResponseError,
)
from .base_client import BaseClient
GETH_DEFAULT_RPC_PORT = 8545
@ -11,17 +16,19 @@ ETH_DEFAULT_RPC_PORT = 8545
PARITY_DEFAULT_RPC_PORT = 8545
PYETHAPP_DEFAULT_RPC_PORT = 4000
MAX_RETRIES = 3
JSON_MEDIA_TYPE = 'application/json'
JSON_MEDIA_TYPE = "application/json"
'''
"""
This code is adapted from: https://github.com/ConsenSys/ethjsonrpc
'''
"""
class EthJsonRpc(BaseClient):
'''
"""
Ethereum JSON-RPC client class
'''
"""
def __init__(self, host='localhost', port=GETH_DEFAULT_RPC_PORT, tls=False):
def __init__(self, host="localhost", port=GETH_DEFAULT_RPC_PORT, tls=False):
self.host = host
self.port = port
self.tls = tls
@ -31,17 +38,12 @@ class EthJsonRpc(BaseClient):
def _call(self, method, params=None, _id=1):
params = params or []
data = {
'jsonrpc': '2.0',
'method': method,
'params': params,
'id': _id,
}
scheme = 'http'
data = {"jsonrpc": "2.0", "method": method, "params": params, "id": _id}
scheme = "http"
if self.tls:
scheme += 's'
url = '{}://{}:{}'.format(scheme, self.host, self.port)
headers = {'Content-Type': JSON_MEDIA_TYPE}
scheme += "s"
url = "{}://{}:{}".format(scheme, self.host, self.port)
headers = {"Content-Type": JSON_MEDIA_TYPE}
logging.debug("rpc send: %s" % json.dumps(data))
try:
r = self.session.post(url, headers=headers, data=json.dumps(data))
@ -55,7 +57,7 @@ class EthJsonRpc(BaseClient):
except ValueError:
raise BadJsonError(r.text)
try:
return response['result']
return response["result"]
except KeyError:
raise BadResponseError(response)

@ -0,0 +1,4 @@
BLOCK_TAG_EARLIEST = "earliest"
BLOCK_TAG_LATEST = "latest"
BLOCK_TAG_PENDING = "pending"
BLOCK_TAGS = (BLOCK_TAG_EARLIEST, BLOCK_TAG_LATEST, BLOCK_TAG_PENDING)

@ -2,37 +2,38 @@ from .constants import BLOCK_TAGS
def hex_to_dec(x):
'''
"""
Convert hex to decimal
'''
"""
return int(x, 16)
def clean_hex(d):
'''
"""
Convert decimal to hex and remove the "L" suffix that is appended to large
numbers
'''
return hex(d).rstrip('L')
"""
return hex(d).rstrip("L")
def validate_block(block):
if isinstance(block, str):
if block not in BLOCK_TAGS:
raise ValueError('invalid block tag')
raise ValueError("invalid block tag")
if isinstance(block, int):
block = hex(block)
return block
def wei_to_ether(wei):
'''
"""
Convert wei to ether
'''
"""
return 1.0 * wei / 10 ** 18
def ether_to_wei(ether):
'''
"""
Convert ether to wei
'''
"""
return ether * 10 ** 18

@ -5,8 +5,9 @@
http://www.github.com/ConsenSys/mythril
"""
import logging
import logging, coloredlogs
import json
import os
import sys
import argparse
@ -15,97 +16,244 @@ import argparse
from mythril.exceptions import CriticalError, AddressNotFoundError
from mythril.mythril import Mythril
from mythril.version import VERSION
import mythril.support.signatures as sigs
def exit_with_error(format, message):
if format == 'text' or format == 'markdown':
def exit_with_error(format_, message):
if format_ == "text" or format_ == "markdown":
print(message)
else:
result = {'success': False, 'error': str(message), 'issues': []}
result = {"success": False, "error": str(message), "issues": []}
print(json.dumps(result))
sys.exit()
def main():
parser = argparse.ArgumentParser(description='Security analysis of Ethereum smart contracts')
parser.add_argument("solidity_file", nargs='*')
commands = parser.add_argument_group('commands')
commands.add_argument('-g', '--graph', help='generate a control flow graph')
commands.add_argument('-V', '--version', action='store_true',
help='print the Mythril version number and exit')
commands.add_argument('-x', '--fire-lasers', action='store_true',
help='detect vulnerabilities, use with -c, -a or solidity file(s)')
commands.add_argument('-t', '--truffle', action='store_true',
help='analyze a truffle project (run from project dir)')
commands.add_argument('-d', '--disassemble', action='store_true', help='print disassembly')
commands.add_argument('-j', '--statespace-json', help='dumps the statespace json', metavar='OUTPUT_FILE')
inputs = parser.add_argument_group('input arguments')
inputs.add_argument('-c', '--code', help='hex-encoded bytecode string ("6060604052...")', metavar='BYTECODE')
inputs.add_argument('-f', '--codefile', help='file containing hex-encoded bytecode string',
metavar='BYTECODEFILE', type=argparse.FileType('r'))
inputs.add_argument('-a', '--address', help='pull contract from the blockchain', metavar='CONTRACT_ADDRESS')
inputs.add_argument('-l', '--dynld', action='store_true', help='auto-load dependencies from the blockchain')
outputs = parser.add_argument_group('output formats')
outputs.add_argument('-o', '--outform', choices=['text', 'markdown', 'json'], default='text',
help='report output format', metavar='<text/markdown/json>')
outputs.add_argument('--verbose-report', action='store_true', help='Include debugging information in report')
database = parser.add_argument_group('local contracts database')
database.add_argument('-s', '--search', help='search the contract database', metavar='EXPRESSION')
database.add_argument('--leveldb-dir', help='specify leveldb directory for search or direct access operations', metavar='LEVELDB_PATH')
utilities = parser.add_argument_group('utilities')
utilities.add_argument('--hash', help='calculate function signature hash', metavar='SIGNATURE')
utilities.add_argument('--storage', help='read state variables from storage index, use with -a',
metavar='INDEX,NUM_SLOTS,[array] / mapping,INDEX,[KEY1, KEY2...]')
utilities.add_argument('--solv',
help='specify solidity compiler version. If not present, will try to install it (Experimental)',
metavar='SOLV')
utilities.add_argument('--contract-hash-to-address', help='returns corresponding address for a contract address hash', metavar='SHA3_TO_LOOK_FOR')
options = parser.add_argument_group('options')
options.add_argument('-m', '--modules', help='Comma-separated list of security analysis modules', metavar='MODULES')
options.add_argument('--max-depth', type=int, default=22, help='Maximum recursion depth for symbolic execution')
options.add_argument('--strategy', choices=['dfs', 'bfs'], default='dfs', help='Symbolic execution strategy')
options.add_argument('--execution-timeout', type=int, default=600, help="The amount of seconds to spend on symbolic execution")
options.add_argument('--create-timeout', type=int, default=10, help="The amount of seconds to spend on "
"the initial contract creation")
options.add_argument('--solc-args', help='Extra arguments for solc')
options.add_argument('--phrack', action='store_true', help='Phrack-style call graph')
options.add_argument('--enable-physics', action='store_true', help='enable graph physics simulation')
options.add_argument('-v', type=int, help='log level (0-2)', metavar='LOG_LEVEL')
rpc = parser.add_argument_group('RPC options')
rpc.add_argument('-i', action='store_true', help='Preset: Infura Node service (Mainnet)')
rpc.add_argument('--rpc', help='custom RPC settings', metavar='HOST:PORT / ganache / infura-[network_name]')
rpc.add_argument('--rpctls', type=bool, default=False, help='RPC connection over TLS')
parser = argparse.ArgumentParser(
description="Security analysis of Ethereum smart contracts"
)
parser.add_argument("solidity_file", nargs="*")
commands = parser.add_argument_group("commands")
commands.add_argument("-g", "--graph", help="generate a control flow graph")
commands.add_argument(
"-V",
"--version",
action="store_true",
help="print the Mythril version number and exit",
)
commands.add_argument(
"-x",
"--fire-lasers",
action="store_true",
help="detect vulnerabilities, use with -c, -a or solidity file(s)",
)
commands.add_argument(
"-t",
"--truffle",
action="store_true",
help="analyze a truffle project (run from project dir)",
)
commands.add_argument(
"-d", "--disassemble", action="store_true", help="print disassembly"
)
commands.add_argument(
"-j",
"--statespace-json",
help="dumps the statespace json",
metavar="OUTPUT_FILE",
)
inputs = parser.add_argument_group("input arguments")
inputs.add_argument(
"-c",
"--code",
help='hex-encoded bytecode string ("6060604052...")',
metavar="BYTECODE",
)
inputs.add_argument(
"-f",
"--codefile",
help="file containing hex-encoded bytecode string",
metavar="BYTECODEFILE",
type=argparse.FileType("r"),
)
inputs.add_argument(
"-a",
"--address",
help="pull contract from the blockchain",
metavar="CONTRACT_ADDRESS",
)
inputs.add_argument(
"-l",
"--dynld",
action="store_true",
help="auto-load dependencies from the blockchain",
)
outputs = parser.add_argument_group("output formats")
outputs.add_argument(
"-o",
"--outform",
choices=["text", "markdown", "json"],
default="text",
help="report output format",
metavar="<text/markdown/json>",
)
outputs.add_argument(
"--verbose-report",
action="store_true",
help="Include debugging information in report",
)
database = parser.add_argument_group("local contracts database")
database.add_argument(
"-s", "--search", help="search the contract database", metavar="EXPRESSION"
)
database.add_argument(
"--leveldb-dir",
help="specify leveldb directory for search or direct access operations",
metavar="LEVELDB_PATH",
)
utilities = parser.add_argument_group("utilities")
utilities.add_argument(
"--hash", help="calculate function signature hash", metavar="SIGNATURE"
)
utilities.add_argument(
"--storage",
help="read state variables from storage index, use with -a",
metavar="INDEX,NUM_SLOTS,[array] / mapping,INDEX,[KEY1, KEY2...]",
)
utilities.add_argument(
"--solv",
help="specify solidity compiler version. If not present, will try to install it (Experimental)",
metavar="SOLV",
)
utilities.add_argument(
"--contract-hash-to-address",
help="returns corresponding address for a contract address hash",
metavar="SHA3_TO_LOOK_FOR",
)
options = parser.add_argument_group("options")
options.add_argument(
"-m",
"--modules",
help="Comma-separated list of security analysis modules",
metavar="MODULES",
)
options.add_argument(
"--max-depth",
type=int,
default=22,
help="Maximum recursion depth for symbolic execution",
)
options.add_argument(
"--strategy",
choices=["dfs", "bfs", "naive-random", "weighted-random"],
default="dfs",
help="Symbolic execution strategy",
)
options.add_argument(
"--max-transaction-count",
type=int,
default=1,
help="Maximum number of transactions issued by laser",
)
options.add_argument(
"--execution-timeout",
type=int,
default=600,
help="The amount of seconds to spend on symbolic execution",
)
options.add_argument(
"--create-timeout",
type=int,
default=10,
help="The amount of seconds to spend on " "the initial contract creation",
)
options.add_argument("--solc-args", help="Extra arguments for solc")
options.add_argument(
"--phrack", action="store_true", help="Phrack-style call graph"
)
options.add_argument(
"--enable-physics", action="store_true", help="enable graph physics simulation"
)
options.add_argument("-v", type=int, help="log level (0-2)", metavar="LOG_LEVEL")
options.add_argument(
"-q",
"--query-signature",
action="store_true",
help="Lookup function signatures through www.4byte.directory",
)
rpc = parser.add_argument_group("RPC options")
rpc.add_argument(
"-i", action="store_true", help="Preset: Infura Node service (Mainnet)"
)
rpc.add_argument(
"--rpc",
help="custom RPC settings",
metavar="HOST:PORT / ganache / infura-[network_name]",
)
rpc.add_argument(
"--rpctls", type=bool, default=False, help="RPC connection over TLS"
)
parser.add_argument("--epic", action="store_true", help=argparse.SUPPRESS)
# Get config values
args = parser.parse_args()
if args.epic:
path = os.path.dirname(os.path.realpath(__file__))
sys.argv.remove("--epic")
os.system(" ".join(sys.argv) + " | python3 " + path + "/epic.py")
sys.exit()
if args.version:
if args.outform == 'json':
print(json.dumps({'version_str': VERSION}))
if args.outform == "json":
print(json.dumps({"version_str": VERSION}))
else:
print("Mythril version {}".format(VERSION))
sys.exit()
# Parse cmdline args
if not (args.search or args.hash or args.disassemble or args.graph or args.fire_lasers
or args.storage or args.truffle or args.statespace_json or args.contract_hash_to_address):
if not (
args.search
or args.hash
or args.disassemble
or args.graph
or args.fire_lasers
or args.storage
or args.truffle
or args.statespace_json
or args.contract_hash_to_address
):
parser.print_help()
sys.exit()
if args.v:
if 0 <= args.v < 3:
logging.basicConfig(level=[logging.NOTSET, logging.INFO, logging.DEBUG][args.v])
coloredlogs.install(
fmt="%(name)s[%(process)d] %(levelname)s %(message)s",
level=[logging.NOTSET, logging.INFO, logging.DEBUG][args.v],
)
else:
exit_with_error(args.outform, "Invalid -v value, you can find valid values in usage")
exit_with_error(
args.outform, "Invalid -v value, you can find valid values in usage"
)
if args.query_signature:
if sigs.ethereum_input_decoder == None:
exit_with_error(
args.outform,
"The --query-signature function requires the python package ethereum-input-decoder",
)
# -- commands --
if args.hash:
@ -117,8 +265,12 @@ def main():
# infura = None, rpc = None, rpctls = None
# solc_args = None, dynld = None, max_recursion_depth = 12):
mythril = Mythril(solv=args.solv, dynld=args.dynld,
solc_args=args.solc_args)
mythril = Mythril(
solv=args.solv,
dynld=args.dynld,
solc_args=args.solc_args,
enable_online_lookup=args.query_signature,
)
if args.dynld and not (args.rpc or args.i):
mythril.set_api_from_config_path()
@ -132,7 +284,9 @@ def main():
mythril.set_api_rpc_localhost()
elif args.search or args.contract_hash_to_address:
# Open LevelDB if necessary
mythril.set_api_leveldb(mythril.leveldb_dir if not args.leveldb_dir else args.leveldb_dir)
mythril.set_api_leveldb(
mythril.leveldb_dir if not args.leveldb_dir else args.leveldb_dir
)
if args.search:
# Database search ops
@ -154,7 +308,8 @@ def main():
mythril.analyze_truffle_project(args)
except FileNotFoundError:
print(
"Build directory not found. Make sure that you start the analysis from the project root, and that 'truffle compile' has executed successfully.")
"Build directory not found. Make sure that you start the analysis from the project root, and that 'truffle compile' has executed successfully."
)
sys.exit()
# Load / compile input contracts
@ -164,7 +319,7 @@ def main():
# Load from bytecode
address, _ = mythril.load_from_bytecode(args.code)
elif args.codefile:
bytecode = ''.join([l.strip() for l in args.codefile if len(l.strip()) > 0])
bytecode = "".join([l.strip() for l in args.codefile if len(l.strip()) > 0])
address, _ = mythril.load_from_bytecode(bytecode)
elif args.address:
# Get bytecode from a contract address
@ -172,37 +327,55 @@ def main():
elif args.solidity_file:
# Compile Solidity source file(s)
if args.graph and len(args.solidity_file) > 1:
exit_with_error(args.outform,
"Cannot generate call graphs from multiple input files. Please do it one at a time.")
exit_with_error(
args.outform,
"Cannot generate call graphs from multiple input files. Please do it one at a time.",
)
address, _ = mythril.load_from_solidity(args.solidity_file) # list of files
else:
exit_with_error(args.outform,
"No input bytecode. Please provide EVM code via -c BYTECODE, -a ADDRESS, or -i SOLIDITY_FILES")
exit_with_error(
args.outform,
"No input bytecode. Please provide EVM code via -c BYTECODE, -a ADDRESS, or -i SOLIDITY_FILES",
)
# Commands
if args.storage:
if not args.address:
exit_with_error(args.outform,
"To read storage, provide the address of a deployed contract with the -a option.")
storage = mythril.get_state_variable_from_storage(address=address,
params=[a.strip() for a in args.storage.strip().split(",")])
exit_with_error(
args.outform,
"To read storage, provide the address of a deployed contract with the -a option.",
)
storage = mythril.get_state_variable_from_storage(
address=address,
params=[a.strip() for a in args.storage.strip().split(",")],
)
print(storage)
elif args.disassemble:
easm_text = mythril.contracts[0].get_easm() # or mythril.disassemble(mythril.contracts[0])
easm_text = mythril.contracts[
0
].get_easm() # or mythril.disassemble(mythril.contracts[0])
sys.stdout.write(easm_text)
elif args.graph or args.fire_lasers:
if not mythril.contracts:
exit_with_error(args.outform, "input files do not contain any valid contracts")
exit_with_error(
args.outform, "input files do not contain any valid contracts"
)
if args.graph:
html = mythril.graph_html(strategy=args.strategy, contract=mythril.contracts[0], address=address,
enable_physics=args.enable_physics, phrackify=args.phrack,
max_depth=args.max_depth, execution_timeout=args.execution_timeout,
create_timeout=args.create_timeout)
html = mythril.graph_html(
strategy=args.strategy,
contract=mythril.contracts[0],
address=address,
enable_physics=args.enable_physics,
phrackify=args.phrack,
max_depth=args.max_depth,
execution_timeout=args.execution_timeout,
create_timeout=args.create_timeout,
)
try:
with open(args.graph, "w") as f:
@ -211,26 +384,40 @@ def main():
exit_with_error(args.outform, "Error saving graph: " + str(e))
else:
report = mythril.fire_lasers(strategy=args.strategy, address=address,
modules=[m.strip() for m in args.modules.strip().split(",")] if args.modules else [],
report = mythril.fire_lasers(
strategy=args.strategy,
address=address,
modules=[m.strip() for m in args.modules.strip().split(",")]
if args.modules
else [],
verbose_report=args.verbose_report,
max_depth=args.max_depth, execution_timeout=args.execution_timeout,
create_timeout=args.create_timeout)
max_depth=args.max_depth,
execution_timeout=args.execution_timeout,
create_timeout=args.create_timeout,
max_transaction_count=args.max_transaction_count,
)
outputs = {
'json': report.as_json(),
'text': report.as_text(),
'markdown': report.as_markdown()
"json": report.as_json(),
"text": report.as_text(),
"markdown": report.as_markdown(),
}
print(outputs[args.outform])
elif args.statespace_json:
if not mythril.contracts:
exit_with_error(args.outform, "input files do not contain any valid contracts")
statespace = mythril.dump_statespace(strategy=args.strategy, contract=mythril.contracts[0], address=address,
max_depth=args.max_depth, execution_timeout=args.execution_timeout,
create_timeout=args.create_timeout)
exit_with_error(
args.outform, "input files do not contain any valid contracts"
)
statespace = mythril.dump_statespace(
strategy=args.strategy,
contract=mythril.contracts[0],
address=address,
max_depth=args.max_depth,
execution_timeout=args.execution_timeout,
create_timeout=args.create_timeout,
)
try:
with open(args.statespace_json, "w") as f:

@ -0,0 +1,249 @@
#!/usr/bin/env python
#
# "THE BEER-WARE LICENSE" (Revision 43~maze)
#
# <maze@pyth0n.org> wrote these files. As long as you retain this notice you
# can do whatever you want with this stuff. If we meet some day, and you think
# this stuff is worth it, you can buy me a beer in return.
# https://github.com/tehmaze/lolcat
import atexit
import math
import os
import random
import re
import sys
import time
PY3 = sys.version_info >= (3,)
# Reset terminal colors at exit
def reset():
sys.stdout.write("\x1b[0m")
sys.stdout.flush()
atexit.register(reset)
STRIP_ANSI = re.compile(r"\x1b\[(\d+)(;\d+)?(;\d+)?[m|K]")
COLOR_ANSI = (
(0x00, 0x00, 0x00),
(0xCD, 0x00, 0x00),
(0x00, 0xCD, 0x00),
(0xCD, 0xCD, 0x00),
(0x00, 0x00, 0xEE),
(0xCD, 0x00, 0xCD),
(0x00, 0xCD, 0xCD),
(0xE5, 0xE5, 0xE5),
(0x7F, 0x7F, 0x7F),
(0xFF, 0x00, 0x00),
(0x00, 0xFF, 0x00),
(0xFF, 0xFF, 0x00),
(0x5C, 0x5C, 0xFF),
(0xFF, 0x00, 0xFF),
(0x00, 0xFF, 0xFF),
(0xFF, 0xFF, 0xFF),
)
class LolCat(object):
def __init__(self, mode=256, output=sys.stdout):
self.mode = mode
self.output = output
def _distance(self, rgb1, rgb2):
return sum(map(lambda c: (c[0] - c[1]) ** 2, zip(rgb1, rgb2)))
def ansi(self, rgb):
r, g, b = rgb
if self.mode in (8, 16):
colors = COLOR_ANSI[: self.mode]
matches = [
(self._distance(c, map(int, rgb)), i) for i, c in enumerate(colors)
]
matches.sort()
color = matches[0][1]
return "3%d" % (color,)
else:
gray_possible = True
sep = 2.5
while gray_possible:
if r < sep or g < sep or b < sep:
gray = r < sep and g < sep and b < sep
gray_possible = False
sep += 42.5
if gray:
color = 232 + int(float(sum(rgb) / 33.0))
else:
color = sum(
[16]
+ [
int(6 * float(val) / 256) * mod
for val, mod in zip(rgb, [36, 6, 1])
]
)
return "38;5;%d" % (color,)
def wrap(self, *codes):
return "\x1b[%sm" % ("".join(codes),)
def rainbow(self, freq, i):
r = math.sin(freq * i) * 127 + 128
g = math.sin(freq * i + 2 * math.pi / 3) * 127 + 128
b = math.sin(freq * i + 4 * math.pi / 3) * 127 + 128
return [r, g, b]
def cat(self, fd, options):
if options.animate:
self.output.write("\x1b[?25l")
for line in fd:
options.os += 1
self.println(line, options)
if options.animate:
self.output.write("\x1b[?25h")
def println(self, s, options):
s = s.rstrip()
if options.force or self.output.isatty():
s = STRIP_ANSI.sub("", s)
if options.animate:
self.println_ani(s, options)
else:
self.println_plain(s, options)
self.output.write("\n")
self.output.flush()
def println_ani(self, s, options):
if not s:
return
for i in range(1, options.duration):
self.output.write("\x1b[%dD" % (len(s),))
self.output.flush()
options.os += options.spread
self.println_plain(s, options)
time.sleep(1.0 / options.speed)
def println_plain(self, s, options):
for i, c in enumerate(s if PY3 else s.decode(options.charset_py2, "replace")):
rgb = self.rainbow(options.freq, options.os + i / options.spread)
self.output.write(
"".join(
[
self.wrap(self.ansi(rgb)),
c if PY3 else c.encode(options.charset_py2, "replace"),
]
)
)
def detect_mode(term_hint="xterm-256color"):
"""
Poor-mans color mode detection.
"""
if "ANSICON" in os.environ:
return 16
elif os.environ.get("ConEmuANSI", "OFF") == "ON":
return 256
else:
term = os.environ.get("TERM", term_hint)
if term.endswith("-256color") or term in ("xterm", "screen"):
return 256
elif term.endswith("-color") or term in ("rxvt",):
return 16
else:
return 256 # optimistic default
def run():
"""Main entry point."""
import optparse
parser = optparse.OptionParser(usage=r"%prog [<options>] [file ...]")
parser.add_option(
"-p", "--spread", type="float", default=3.0, help="Rainbow spread"
)
parser.add_option(
"-F", "--freq", type="float", default=0.1, help="Rainbow frequency"
)
parser.add_option("-S", "--seed", type="int", default=0, help="Rainbow seed")
parser.add_option(
"-a",
"--animate",
action="store_true",
default=False,
help="Enable psychedelics",
)
parser.add_option(
"-d", "--duration", type="int", default=12, help="Animation duration"
)
parser.add_option(
"-s", "--speed", type="float", default=20.0, help="Animation speed"
)
parser.add_option(
"-f",
"--force",
action="store_true",
default=False,
help="Force colour even when stdout is not a tty",
)
parser.add_option(
"-3", action="store_const", dest="mode", const=8, help="Force 3 bit colour mode"
)
parser.add_option(
"-4",
action="store_const",
dest="mode",
const=16,
help="Force 4 bit colour mode",
)
parser.add_option(
"-8",
action="store_const",
dest="mode",
const=256,
help="Force 8 bit colour mode",
)
parser.add_option(
"-c",
"--charset-py2",
default="utf-8",
help="Manually set a charset to convert from, for python 2.7",
)
options, args = parser.parse_args()
options.os = random.randint(0, 256) if options.seed == 0 else options.seed
options.mode = options.mode or detect_mode()
lolcat = LolCat(mode=options.mode)
if not args:
args = ["-"]
for filename in args:
if filename == "-":
lolcat.cat(sys.stdin, options)
else:
try:
with open(filename, "r") as handle:
lolcat.cat(handle, options)
except IOError as error:
sys.stderr.write(str(error) + "\n")
if __name__ == "__main__":
sys.exit(run())

@ -1,7 +1,7 @@
import logging
from z3 import simplify
from z3 import simplify, Extract
import mythril.laser.ethereum.util as util
from mythril.laser.ethereum.state import Account, CalldataType, GlobalState
from mythril.laser.ethereum.state import Account, CalldataType, GlobalState, Calldata
from mythril.support.loader import DynLoader
import re
@ -11,7 +11,9 @@ to get the necessary elements from the stack and determine the parameters for th
"""
def get_call_parameters(global_state: GlobalState, dynamic_loader: DynLoader, with_value=False):
def get_call_parameters(
global_state: GlobalState, dynamic_loader: DynLoader, with_value=False
):
"""
Gets call parameters from global state
Pops the values from the stack and determines output parameters
@ -22,21 +24,40 @@ def get_call_parameters(global_state: GlobalState, dynamic_loader: DynLoader, wi
"""
gas, to = global_state.mstate.pop(2)
value = global_state.mstate.pop() if with_value else 0
memory_input_offset, memory_input_size, memory_out_offset, memory_out_size = global_state.mstate.pop(4)
memory_input_offset, memory_input_size, memory_out_offset, memory_out_size = global_state.mstate.pop(
4
)
callee_address = get_callee_address(global_state, dynamic_loader, to)
callee_account = None
call_data, call_data_type = get_call_data(global_state, memory_input_offset, memory_input_size, False)
call_data, call_data_type = get_call_data(
global_state, memory_input_offset, memory_input_size, False
)
if int(callee_address, 16) >= 5 or int(callee_address, 16) == 0:
call_data, call_data_type = get_call_data(global_state, memory_input_offset, memory_input_size)
callee_account = get_callee_account(global_state, callee_address, dynamic_loader)
return callee_address, callee_account, call_data, value, call_data_type, gas, memory_out_offset, memory_out_size
def get_callee_address(global_state:GlobalState, dynamic_loader: DynLoader, symbolic_to_address):
call_data, call_data_type = get_call_data(
global_state, memory_input_offset, memory_input_size
)
callee_account = get_callee_account(
global_state, callee_address, dynamic_loader
)
return (
callee_address,
callee_account,
call_data,
value,
call_data_type,
gas,
memory_out_offset,
memory_out_size,
)
def get_callee_address(
global_state: GlobalState, dynamic_loader: DynLoader, symbolic_to_address
):
"""
Gets the address of the callee
:param global_state: state to look in
@ -48,21 +69,24 @@ def get_callee_address(global_state:GlobalState, dynamic_loader: DynLoader, symb
try:
callee_address = hex(util.get_concrete_int(symbolic_to_address))
except AttributeError:
logging.info("Symbolic call encountered")
except TypeError:
logging.debug("Symbolic call encountered")
match = re.search(r'storage_(\d+)', str(simplify(symbolic_to_address)))
match = re.search(r"storage_(\d+)", str(simplify(symbolic_to_address)))
logging.debug("CALL to: " + str(simplify(symbolic_to_address)))
if match is None or dynamic_loader is None:
raise ValueError()
index = int(match.group(1))
logging.info("Dynamic contract address at storage index {}".format(index))
logging.debug("Dynamic contract address at storage index {}".format(index))
# attempt to read the contract address from instance storage
try:
callee_address = dynamic_loader.read_storage(environment.active_account.address, index)
callee_address = dynamic_loader.read_storage(
environment.active_account.address, index
)
# TODO: verify whether this happens or not
except:
logging.debug("Error accessing contract storage.")
raise ValueError
@ -89,30 +113,31 @@ def get_callee_account(global_state, callee_address, dynamic_loader):
return global_state.accounts[callee_address]
except KeyError:
# We have a valid call address, but contract is not in the modules list
logging.info("Module with address " + callee_address + " not loaded.")
logging.debug("Module with address " + callee_address + " not loaded.")
if dynamic_loader is None:
raise ValueError()
logging.info("Attempting to load dependency")
logging.debug("Attempting to load dependency")
try:
code = dynamic_loader.dynld(environment.active_account.address, callee_address)
except Exception as e:
logging.info("Unable to execute dynamic loader.")
except Exception:
logging.debug("Unable to execute dynamic loader.")
raise ValueError()
if code is None:
logging.info("No code returned, not a contract account?")
logging.debug("No code returned, not a contract account?")
raise ValueError()
logging.info("Dependency loaded: " + callee_address)
logging.debug("Dependency loaded: " + callee_address)
callee_account = Account(callee_address, code, callee_address, dynamic_loader=dynamic_loader)
callee_account = Account(
callee_address, code, callee_address, dynamic_loader=dynamic_loader
)
accounts[callee_address] = callee_account
return callee_account
def get_call_data(global_state, memory_start, memory_size, pad=True):
"""
Gets call_data from the global_state
@ -122,17 +147,33 @@ def get_call_data(global_state, memory_start, memory_size, pad=True):
:return: Tuple containing: call_data array from memory or empty array if symbolic, type found
"""
state = global_state.mstate
transaction_id = "{}_internalcall".format(global_state.current_transaction.id)
try:
# TODO: This only allows for either fully concrete or fully symbolic calldata.
# Improve management of memory and callata to support a mix between both types.
call_data = state.memory[util.get_concrete_int(memory_start):util.get_concrete_int(memory_start + memory_size)]
if len(call_data) < 32 and pad:
call_data += [0] * (32 - len(call_data))
calldata_from_mem = state.memory[
util.get_concrete_int(memory_start) : util.get_concrete_int(
memory_start + memory_size
)
]
i = 0
starting_calldata = []
while i < len(calldata_from_mem):
elem = calldata_from_mem[i]
if isinstance(elem, int):
starting_calldata.append(elem)
i += 1
else: # BitVec
for j in range(0, elem.size(), 8):
starting_calldata.append(Extract(j + 7, j, elem))
i += 1
call_data = Calldata(transaction_id, starting_calldata)
call_data_type = CalldataType.CONCRETE
logging.debug("Calldata: " + str(call_data))
except AttributeError:
except TypeError:
logging.info("Unsupported symbolic calldata offset")
call_data_type = CalldataType.SYMBOLIC
call_data = []
call_data = Calldata("{}_internalcall".format(transaction_id))
return call_data, call_data_type

@ -3,6 +3,7 @@ from enum import Enum
gbl_next_uid = 0 # node counter
class JumpType(Enum):
CONDITIONAL = 1
UNCONDITIONAL = 2
@ -41,18 +42,24 @@ class Node:
instruction = state.get_current_instruction()
code += str(instruction['address']) + " " + instruction['opcode']
if instruction['opcode'].startswith("PUSH"):
code += " " + instruction['argument']
code += str(instruction["address"]) + " " + instruction["opcode"]
if instruction["opcode"].startswith("PUSH"):
code += " " + instruction["argument"]
code += "\\n"
return dict(contract_name=self.contract_name, start_addr=self.start_addr, function_name=self.function_name,
code=code)
return dict(
contract_name=self.contract_name,
start_addr=self.start_addr,
function_name=self.function_name,
code=code,
)
class Edge:
def __init__(self, node_from, node_to, edge_type=JumpType.UNCONDITIONAL, condition=None):
def __init__(
self, node_from, node_to, edge_type=JumpType.UNCONDITIONAL, condition=None
):
self.node_from = node_from
self.node_to = node_to
self.type = edge_type
@ -63,4 +70,4 @@ class Edge:
@property
def as_dict(self):
return {"from": self.node_from, 'to': self.node_to}
return {"from": self.node_from, "to": self.node_to}

@ -12,3 +12,7 @@ class StackOverflowException(VmException):
class InvalidJumpDestination(VmException):
pass
class InvalidInstruction(VmException):
pass

File diff suppressed because it is too large Load Diff

@ -1,5 +1,6 @@
from z3 import ExprRef
class KeccakFunctionManager:
def __init__(self):
self.keccak_expression_mapping = {}

@ -18,7 +18,7 @@ class NativeContractException(Exception):
def int_to_32bytes(i): # used because int can't fit as bytes function's input
o = [0] * 32
for x in range(32):
o[31 - x] = i & 0xff
o[31 - x] = i & 0xFF
i >>= 8
return bytes(o)
@ -41,7 +41,7 @@ def ecrecover(data):
except TypeError:
raise NativeContractException
message = b''.join([ALL_BYTES[x] for x in data[0:32]])
message = b"".join([ALL_BYTES[x] for x in data[0:32]])
if r >= secp256k1n or s >= secp256k1n or v < 27 or v > 28:
return []
try:
@ -66,7 +66,7 @@ def ripemd160(data):
data = bytes(data)
except TypeError:
raise NativeContractException
return 12*[0]+[i for i in hashlib.new('ripemd160', data).digest()]
return 12 * [0] + [i for i in hashlib.new("ripemd160", data).digest()]
def identity(data):
@ -79,4 +79,4 @@ def native_contracts(address, data):
"""
functions = (ecrecover, sha256, ripemd160, identity)
return functions[address-1](data)
return functions[address - 1](data.starting_calldata)

@ -1,10 +1,31 @@
from z3 import BitVec, BitVecVal
from z3 import (
BitVec,
BitVecVal,
BitVecRef,
BitVecNumRef,
BitVecSort,
Solver,
ExprRef,
Concat,
sat,
simplify,
Array,
ForAll,
Solver,
UGT,
Implies,
)
from z3.z3types import Z3Exception
from mythril.disassembler.disassembly import Disassembly
from copy import copy, deepcopy
from enum import Enum
from random import randint
from mythril.laser.ethereum.util import get_concrete_int
from mythril.laser.ethereum.evm_exceptions import StackOverflowException, StackUnderflowException
from mythril.laser.ethereum.evm_exceptions import (
StackOverflowException,
StackUnderflowException,
)
class CalldataType(Enum):
@ -12,10 +33,92 @@ class CalldataType(Enum):
SYMBOLIC = 2
class Calldata:
"""
Calldata class representing the calldata of a transaction
"""
def __init__(self, tx_id, starting_calldata=None):
"""
Constructor for Calldata
:param tx_id: unique value representing the transaction the calldata is for
:param starting_calldata: byte array representing the concrete calldata of a transaction
"""
self.tx_id = tx_id
if starting_calldata:
self._calldata = []
self.calldatasize = BitVecVal(len(starting_calldata), 256)
self.concrete = True
else:
self._calldata = Array(
"{}_calldata".format(self.tx_id), BitVecSort(256), BitVecSort(8)
)
self.calldatasize = BitVec("{}_calldatasize".format(self.tx_id), 256)
self.concrete = False
self.starting_calldata = starting_calldata or []
@property
def constraints(self):
constraints = []
if self.concrete:
for calldata_byte in self.starting_calldata:
if type(calldata_byte) == int:
self._calldata.append(BitVecVal(calldata_byte, 8))
else:
self._calldata.append(calldata_byte)
constraints.append(self.calldatasize == len(self.starting_calldata))
else:
x = BitVec("x", 256)
constraints.append(
ForAll(x, Implies(self[x] != 0, UGT(self.calldatasize, x)))
)
return constraints
def concretized(self, model):
result = []
for i in range(
get_concrete_int(model.eval(self.calldatasize, model_completion=True))
):
result.append(get_concrete_int(model.eval(self[i], model_completion=True)))
return result
def get_word_at(self, index: int):
return self[index : index + 32]
def __getitem__(self, item):
if isinstance(item, slice):
try:
current_index = (
item.start
if isinstance(item.start, BitVecRef)
else BitVecVal(item.start, 256)
)
dataparts = []
while simplify(current_index != item.stop):
dataparts.append(self[current_index])
current_index = simplify(current_index + 1)
except Z3Exception:
raise IndexError("Invalid Calldata Slice")
return simplify(Concat(dataparts))
if self.concrete:
try:
return self._calldata[get_concrete_int(item)]
except IndexError:
return BitVecVal(0, 8)
else:
return self._calldata[item]
class Storage:
"""
Storage class represents the storage of an Account
"""
def __init__(self, concrete=False, address=None, dynamic_loader=None):
"""
Constructor for Storage
@ -32,7 +135,12 @@ class Storage:
except KeyError:
if self.address and int(self.address[2:], 16) != 0 and self.dynld:
try:
self._storage[item] = int(self.dynld.read_storage(contract_address=self.address, index=int(item)), 16)
self._storage[item] = int(
self.dynld.read_storage(
contract_address=self.address, index=int(item)
),
16,
)
return self._storage[item]
except ValueError:
pass
@ -47,12 +155,21 @@ class Storage:
def keys(self):
return self._storage.keys()
class Account:
"""
Account class representing ethereum accounts
"""
def __init__(self, address, code=None, contract_name="unknown", balance=None, concrete_storage=False,
dynamic_loader=None):
def __init__(
self,
address,
code=None,
contract_name="unknown",
balance=None,
concrete_storage=False,
dynamic_loader=None,
):
"""
Constructor for account
:param address: Address of the account
@ -64,7 +181,9 @@ class Account:
self.nonce = 0
self.code = code or Disassembly("")
self.balance = balance if balance else BitVec("balance", 256)
self.storage = Storage(concrete_storage, address=address, dynamic_loader=dynamic_loader)
self.storage = Storage(
concrete_storage, address=address, dynamic_loader=dynamic_loader
)
# Metadata
self.address = address
@ -83,13 +202,19 @@ class Account:
@property
def as_dict(self):
return {'nonce': self.nonce, 'code': self.code, 'balance': self.balance, 'storage': self.storage}
return {
"nonce": self.nonce,
"code": self.code,
"balance": self.balance,
"storage": self.storage,
}
class Environment:
"""
The environment class represents the current execution environment for the symbolic executor
"""
def __init__(
self,
active_account,
@ -121,12 +246,17 @@ class Environment:
def __str__(self):
return str(self.as_dict)
@property
def as_dict(self):
return dict(active_account=self.active_account, sender=self.sender, calldata=self.calldata,
gasprice=self.gasprice, callvalue=self.callvalue, origin=self.origin,
calldata_type=self.calldata_type)
return dict(
active_account=self.active_account,
sender=self.sender,
calldata=self.calldata,
gasprice=self.gasprice,
callvalue=self.callvalue,
origin=self.origin,
calldata_type=self.calldata_type,
)
class Constraints(list):
@ -169,9 +299,12 @@ class MachineStack(list):
"""
Defines EVM stack, overrides the default list to handle overflows
"""
STACK_LIMIT = 1024
def __init__(self, default_list=None):
if default_list is None:
default_list = []
super(MachineStack, self).__init__(default_list)
def append(self, element):
@ -180,8 +313,10 @@ class MachineStack(list):
:function: appends the element to list if the size is less than STACK_LIMIT, else throws an error
"""
if super(MachineStack, self).__len__() >= self.STACK_LIMIT:
raise StackOverflowException("Reached the EVM stack limit of {}, you can't append more "
"elements".format(self.STACK_LIMIT))
raise StackOverflowException(
"Reached the EVM stack limit of {}, you can't append more "
"elements".format(self.STACK_LIMIT)
)
super(MachineStack, self).append(element)
def pop(self, index=-1):
@ -200,19 +335,21 @@ class MachineStack(list):
try:
return super(MachineStack, self).__getitem__(item)
except IndexError:
raise StackUnderflowException("Trying to access a stack element which doesn't exist")
raise StackUnderflowException(
"Trying to access a stack element which doesn't exist"
)
def __add__(self, other):
"""
Implement list concatenation if needed
"""
raise NotImplementedError('Implement this if needed')
raise NotImplementedError("Implement this if needed")
def __iadd__(self, other):
"""
Implement list concatenation if needed
"""
raise NotImplementedError('Implement this if needed')
raise NotImplementedError("Implement this if needed")
class MachineState:
@ -236,7 +373,7 @@ class MachineState:
"""
if self.memory_size > start + size:
return
m_extend = (start + size - self.memory_size)
m_extend = start + size - self.memory_size
self.memory.extend(bytearray(m_extend))
def memory_write(self, offset, data):
@ -266,14 +403,29 @@ class MachineState:
@property
def as_dict(self):
return dict(pc=self.pc, stack=self.stack, memory=self.memory, memsize=self.memory_size, gas=self.gas)
return dict(
pc=self.pc,
stack=self.stack,
memory=self.memory,
memsize=self.memory_size,
gas=self.gas,
)
class GlobalState:
"""
GlobalState represents the current globalstate
"""
def __init__(self, world_state, environment, node, machine_state=None, transaction_stack=None, last_return_data=None):
def __init__(
self,
world_state,
environment,
node,
machine_state=None,
transaction_stack=None,
last_return_data=None,
):
""" Constructor for GlobalState"""
self.node = node
self.world_state = world_state
@ -288,8 +440,14 @@ class GlobalState:
environment = copy(self.environment)
mstate = deepcopy(self.mstate)
transaction_stack = copy(self.transaction_stack)
return GlobalState(world_state, environment, self.node, mstate, transaction_stack=transaction_stack,
last_return_data=self.last_return_data)
return GlobalState(
world_state,
environment,
self.node,
mstate,
transaction_stack=transaction_stack,
last_return_data=self.last_return_data,
)
@property
def accounts(self):
@ -299,7 +457,6 @@ class GlobalState:
def get_current_instruction(self):
""" Gets the current instruction for this GlobalState"""
instructions = self.environment.code.instruction_list
return instructions[self.mstate.pc]
@ -316,7 +473,6 @@ class GlobalState:
def new_bitvec(self, name, size=256):
transaction_id = self.current_transaction.id
node_id = self.node.uid
return BitVec("{}_{}".format(transaction_id, name), size)
@ -325,6 +481,7 @@ class WorldState:
"""
The WorldState class represents the world state as described in the yellow paper
"""
def __init__(self, transaction_sequence=None):
"""
Constructor for the world state. Initializes the accounts record
@ -347,7 +504,9 @@ class WorldState:
new_world_state.node = self.node
return new_world_state
def create_account(self, balance=0, address=None, concrete_storage=False, dynamic_loader=None):
def create_account(
self, balance=0, address=None, concrete_storage=False, dynamic_loader=None
):
"""
Create non-contract account
:param address: The account's address
@ -357,7 +516,12 @@ class WorldState:
:return: The new account
"""
address = address if address else self._generate_new_address()
new_account = Account(address, balance=balance, dynamic_loader=dynamic_loader, concrete_storage=concrete_storage)
new_account = Account(
address,
balance=balance,
dynamic_loader=dynamic_loader,
concrete_storage=concrete_storage,
)
self._put_account(new_account)
return new_account
@ -369,14 +533,16 @@ class WorldState:
:param storage: Initial storage for the contract
:return: The new account
"""
new_account = Account(self._generate_new_address(), code=contract_code, balance=0)
new_account = Account(
self._generate_new_address(), code=contract_code, balance=0
)
new_account.storage = storage
self._put_account(new_account)
def _generate_new_address(self):
""" Generates a new address for the global state"""
while True:
address = '0x' + ''.join([str(hex(randint(0, 16)))[-1] for _ in range(20)])
address = "0x" + "".join([str(hex(randint(0, 16)))[-1] for _ in range(20)])
if address not in self.accounts.keys():
return address

@ -0,0 +1,25 @@
from abc import ABC, abstractmethod
class BasicSearchStrategy(ABC):
__slots__ = "work_list", "max_depth"
def __init__(self, work_list, max_depth):
self.work_list = work_list
self.max_depth = max_depth
def __iter__(self):
return self
@abstractmethod
def get_strategic_global_state(self):
raise NotImplementedError("Must be implemented by a subclass")
def __next__(self):
try:
global_state = self.get_strategic_global_state()
if global_state.mstate.depth >= self.max_depth:
return self.__next__()
return global_state
except IndexError:
raise StopIteration

@ -1,54 +1,75 @@
"""
This module implements basic symbolic execution search strategies
"""
from random import randrange
from . import BasicSearchStrategy
try:
from random import choices
except ImportError:
# This is for supporting python versions < 3.6
from itertools import accumulate
from random import random
from bisect import bisect
class DepthFirstSearchStrategy:
def choices(population, weights=None):
"""
Returns a random element out of the population based on weight.
If the relative weights or cumulative weights are not specified,
the selections are made with equal probability.
"""
if weights is None:
return [population[int(random() * len(population))]]
cum_weights = accumulate(weights)
return [
population[
bisect(cum_weights, random() * cum_weights[-1], 0, len(population) - 1)
]
]
class DepthFirstSearchStrategy(BasicSearchStrategy):
"""
Implements a depth first search strategy
I.E. Follow one path to a leaf, and then continue to the next one
"""
def __init__(self, work_list, max_depth):
self.work_list = work_list
self.max_depth = max_depth
def __iter__(self):
return self
def __next__(self):
""" Picks the next state to execute """
try:
# This strategies assumes that new states are appended at the end of the work_list
# By taking the last element we effectively pick the "newest" states, which amounts to dfs
global_state = self.work_list.pop()
if global_state.mstate.depth >= self.max_depth:
return self.__next__()
return global_state
except IndexError:
raise StopIteration()
def get_strategic_global_state(self):
return self.work_list.pop()
class BreadthFirstSearchStrategy:
class BreadthFirstSearchStrategy(BasicSearchStrategy):
"""
Implements a breadth first search strategy
I.E. Execute all states of a "level" before continuing
"""
def __init__(self, work_list, max_depth):
self.work_list = work_list
self.max_depth = max_depth
def __iter__(self):
return self
def get_strategic_global_state(self):
return self.work_list.pop(0)
def __next__(self):
""" Picks the next state to execute """
try:
# This strategies assumes that new states are appended at the end of the work_list
# By taking the first element we effectively pick the "oldest" states, which amounts to bfs
global_state = self.work_list.pop(0)
if global_state.mstate.depth >= self.max_depth:
return self.__next__()
return global_state
except IndexError:
raise StopIteration()
class ReturnRandomNaivelyStrategy(BasicSearchStrategy):
"""
chooses a random state from the worklist with equal likelihood
"""
def get_strategic_global_state(self):
if len(self.work_list) > 0:
return self.work_list.pop(randrange(len(self.work_list)))
else:
raise IndexError
class ReturnWeightedRandomStrategy(BasicSearchStrategy):
"""
chooses a random state from the worklist with likelihood based on inverse proportion to depth
"""
def get_strategic_global_state(self):
probability_distribution = [
1 / (global_state.mstate.depth + 1) for global_state in self.work_list
]
return self.work_list.pop(
choices(range(len(self.work_list)), probability_distribution)[0]
)

@ -1,14 +1,21 @@
import logging
from mythril.disassembler.disassembly import Disassembly
from mythril.laser.ethereum.state import WorldState
from mythril.laser.ethereum.transaction import TransactionStartSignal, TransactionEndSignal, \
ContractCreationTransaction
from mythril.laser.ethereum.transaction import (
TransactionStartSignal,
TransactionEndSignal,
ContractCreationTransaction,
)
from mythril.laser.ethereum.evm_exceptions import StackUnderflowException
from mythril.laser.ethereum.instructions import Instruction
from mythril.laser.ethereum.cfg import NodeFlags, Node, Edge, JumpType
from mythril.laser.ethereum.strategy.basic import DepthFirstSearchStrategy
from datetime import datetime, timedelta
from copy import copy
from mythril.laser.ethereum.transaction import execute_contract_creation, execute_message_call
from mythril.laser.ethereum.transaction import (
execute_contract_creation,
execute_message_call,
)
from functools import reduce
from mythril.laser.ethereum.evm_exceptions import VmException
@ -17,9 +24,9 @@ class SVMError(Exception):
pass
'''
"""
Main symbolic execution engine.
'''
"""
class LaserEVM:
@ -27,8 +34,16 @@ class LaserEVM:
Laser EVM class
"""
def __init__(self, accounts, dynamic_loader=None, max_depth=float('inf'), execution_timeout=60, create_timeout=10,
strategy=DepthFirstSearchStrategy):
def __init__(
self,
accounts,
dynamic_loader=None,
max_depth=float("inf"),
execution_timeout=60,
create_timeout=10,
strategy=DepthFirstSearchStrategy,
max_transaction_count=3,
):
world_state = WorldState()
world_state.accounts = accounts
# this sets the initial world state
@ -45,6 +60,7 @@ class LaserEVM:
self.work_list = []
self.strategy = strategy(self.work_list, max_depth)
self.max_depth = max_depth
self.max_transaction_count = max_transaction_count
self.execution_timeout = execution_timeout
self.create_timeout = create_timeout
@ -54,7 +70,9 @@ class LaserEVM:
self.pre_hooks = {}
self.post_hooks = {}
logging.info("LASER EVM initialized with dynamic loader: " + str(dynamic_loader))
logging.info(
"LASER EVM initialized with dynamic loader: " + str(dynamic_loader)
)
@property
def accounts(self):
@ -69,31 +87,66 @@ class LaserEVM:
execute_message_call(self, main_address)
elif creation_code:
logging.info("Starting contract creation transaction")
created_account = execute_contract_creation(self, creation_code, contract_name)
logging.info("Finished contract creation, found {} open states".format(len(self.open_states)))
created_account = execute_contract_creation(
self, creation_code, contract_name
)
logging.info(
"Finished contract creation, found {} open states".format(
len(self.open_states)
)
)
if len(self.open_states) == 0:
logging.warning("No contract was created during the execution of contract creation "
"Increase the resources for creation execution (--max-depth or --create_timeout)")
logging.warning(
"No contract was created during the execution of contract creation "
"Increase the resources for creation execution (--max-depth or --create-timeout)"
)
# Reset code coverage
self.coverage = {}
self.time = datetime.now()
logging.info("Starting message call transaction")
execute_message_call(self, created_account.address)
for i in range(self.max_transaction_count):
initial_coverage = self._get_covered_instructions()
self.time = datetime.now()
logging.info(
"Starting message call transaction, iteration: {}".format(i)
)
execute_message_call(self, created_account.address)
end_coverage = self._get_covered_instructions()
if end_coverage == initial_coverage:
break
logging.info("Finished symbolic execution")
logging.info("%d nodes, %d edges, %d total states", len(self.nodes), len(self.edges), self.total_states)
logging.info(
"%d nodes, %d edges, %d total states",
len(self.nodes),
len(self.edges),
self.total_states,
)
for code, coverage in self.coverage.items():
cov = reduce(lambda sum_, val: sum_ + 1 if val else sum_, coverage[1]) / float(coverage[0]) * 100
cov = (
reduce(lambda sum_, val: sum_ + 1 if val else sum_, coverage[1])
/ float(coverage[0])
* 100
)
logging.info("Achieved {} coverage for code: {}".format(cov, code))
def _get_covered_instructions(self) -> int:
""" Gets the total number of covered instructions for all accounts in the svm"""
total_covered_instructions = 0
for _, cv in self.coverage.items():
total_covered_instructions += reduce(
lambda sum_, val: sum_ + 1 if val else sum_, cv[1]
)
return total_covered_instructions
def exec(self, create=False):
for global_state in self.strategy:
if self.execution_timeout and not create:
if self.time + timedelta(seconds=self.execution_timeout) <= datetime.now():
if (
self.time + timedelta(seconds=self.execution_timeout)
<= datetime.now()
):
return
elif self.create_timeout and create:
if self.time + timedelta(seconds=self.create_timeout) <= datetime.now():
@ -112,7 +165,7 @@ class LaserEVM:
def execute_state(self, global_state):
instructions = global_state.environment.code.instruction_list
try:
op_code = instructions[global_state.mstate.pc]['opcode']
op_code = instructions[global_state.mstate.pc]["opcode"]
except IndexError:
self.open_states.append(global_state.world_state)
return [], None
@ -120,53 +173,97 @@ class LaserEVM:
self._execute_pre_hook(op_code, global_state)
try:
self._measure_coverage(global_state)
new_global_states = Instruction(op_code, self.dynamic_loader).evaluate(global_state)
new_global_states = Instruction(op_code, self.dynamic_loader).evaluate(
global_state
)
except VmException as e:
logging.debug("Encountered a VmException, ending path: `{}`".format(str(e)))
new_global_states = []
transaction, return_global_state = global_state.transaction_stack.pop()
except TransactionStartSignal as e:
if return_global_state is None:
# In this case we don't put an unmodified world state in the open_states list Since in the case of an
# exceptional halt all changes should be discarded, and this world state would not provide us with a
# previously unseen world state
logging.debug(
"Encountered a VmException, ending path: `{}`".format(str(e))
)
new_global_states = []
else:
# First execute the post hook for the transaction ending instruction
self._execute_post_hook(op_code, [global_state])
new_global_states = self._end_message_call(
return_global_state,
global_state,
revert_changes=True,
return_data=None,
)
except TransactionStartSignal as start_signal:
# Setup new global state
new_global_state = e.transaction.initial_global_state()
new_global_state = start_signal.transaction.initial_global_state()
new_global_state.transaction_stack = copy(global_state.transaction_stack) + [(e.transaction, global_state)]
new_global_state.transaction_stack = copy(
global_state.transaction_stack
) + [(start_signal.transaction, global_state)]
new_global_state.node = global_state.node
new_global_state.mstate.constraints = global_state.mstate.constraints
return [new_global_state], op_code
except TransactionEndSignal as e:
transaction, return_global_state = e.global_state.transaction_stack.pop()
except TransactionEndSignal as end_signal:
transaction, return_global_state = (
end_signal.global_state.transaction_stack.pop()
)
if return_global_state is None:
if not isinstance(transaction, ContractCreationTransaction) or transaction.return_data:
e.global_state.world_state.node = global_state.node
self.open_states.append(e.global_state.world_state)
if (
not isinstance(transaction, ContractCreationTransaction)
or transaction.return_data
) and not end_signal.revert:
end_signal.global_state.world_state.node = global_state.node
self.open_states.append(end_signal.global_state.world_state)
new_global_states = []
else:
# First execute the post hook for the transaction ending instruction
self._execute_post_hook(op_code, [e.global_state])
self._execute_post_hook(op_code, [end_signal.global_state])
new_global_states = self._end_message_call(
return_global_state,
global_state,
revert_changes=False or end_signal.revert,
return_data=transaction.return_data,
)
self._execute_post_hook(op_code, new_global_states)
return new_global_states, op_code
def _end_message_call(
self, return_global_state, global_state, revert_changes=False, return_data=None
):
# Resume execution of the transaction initializing instruction
op_code = return_global_state.environment.code.instruction_list[return_global_state.mstate.pc]['opcode']
op_code = return_global_state.environment.code.instruction_list[
return_global_state.mstate.pc
]["opcode"]
# Set execution result in the return_state
return_global_state.last_return_data = transaction.return_data
return_global_state.last_return_data = return_data
if not revert_changes:
return_global_state.world_state = copy(global_state.world_state)
return_global_state.environment.active_account = \
global_state.accounts[return_global_state.environment.active_account.address]
return_global_state.environment.active_account = global_state.accounts[
return_global_state.environment.active_account.address
]
# Execute the post instruction handler
new_global_states = Instruction(op_code, self.dynamic_loader).evaluate(return_global_state, True)
new_global_states = Instruction(op_code, self.dynamic_loader).evaluate(
return_global_state, True
)
# In order to get a nice call graph we need to set the nodes here
for state in new_global_states:
state.node = global_state.node
self._execute_post_hook(op_code, new_global_states)
return new_global_states, op_code
return new_global_states
def _measure_coverage(self, global_state):
code = global_state.environment.code.bytecode
@ -174,7 +271,10 @@ class LaserEVM:
instruction_index = global_state.mstate.pc
if code not in self.coverage.keys():
self.coverage[code] = [number_of_instructions, [False]*number_of_instructions]
self.coverage[code] = [
number_of_instructions,
[False] * number_of_instructions,
]
self.coverage[code][1][instruction_index] = True
@ -185,19 +285,27 @@ class LaserEVM:
self._new_node_state(state)
elif opcode == "JUMPI":
for state in new_states:
self._new_node_state(state, JumpType.CONDITIONAL, state.mstate.constraints[-1])
self._new_node_state(
state, JumpType.CONDITIONAL, state.mstate.constraints[-1]
)
elif opcode in ("SLOAD", "SSTORE") and len(new_states) > 1:
for state in new_states:
self._new_node_state(state, JumpType.CONDITIONAL, state.mstate.constraints[-1])
self._new_node_state(
state, JumpType.CONDITIONAL, state.mstate.constraints[-1]
)
elif opcode in ("CALL", 'CALLCODE', 'DELEGATECALL', 'STATICCALL'):
elif opcode in ("CALL", "CALLCODE", "DELEGATECALL", "STATICCALL"):
assert len(new_states) <= 1
for state in new_states:
self._new_node_state(state, JumpType.CALL)
# Keep track of added contracts so the graph can be generated properly
if state.environment.active_account.contract_name not in self.world_state.accounts.keys():
if (
state.environment.active_account.contract_name
not in self.world_state.accounts.keys()
):
self.world_state.accounts[
state.environment.active_account.address] = state.environment.active_account
state.environment.active_account.address
] = state.environment.active_account
elif opcode == "RETURN":
for state in new_states:
self._new_node_state(state, JumpType.RETURN)
@ -211,30 +319,37 @@ class LaserEVM:
state.node = new_node
new_node.constraints = state.mstate.constraints
self.nodes[new_node.uid] = new_node
self.edges.append(Edge(old_node.uid, new_node.uid, edge_type=edge_type, condition=condition))
self.edges.append(
Edge(old_node.uid, new_node.uid, edge_type=edge_type, condition=condition)
)
if edge_type == JumpType.RETURN:
new_node.flags |= NodeFlags.CALL_RETURN
elif edge_type == JumpType.CALL:
try:
if 'retval' in str(state.mstate.stack[-1]):
if "retval" in str(state.mstate.stack[-1]):
new_node.flags |= NodeFlags.CALL_RETURN
else:
new_node.flags |= NodeFlags.FUNC_ENTRY
except StackUnderflowException:
new_node.flags |= NodeFlags.FUNC_ENTRY
address = state.environment.code.instruction_list[state.mstate.pc]['address']
address = state.environment.code.instruction_list[state.mstate.pc]["address"]
environment = state.environment
disassembly = environment.code
if address in state.environment.code.addr_to_func:
if address in disassembly.address_to_function_name:
# Enter a new function
environment.active_function_name = disassembly.addr_to_func[address]
environment.active_function_name = disassembly.address_to_function_name[
address
]
new_node.flags |= NodeFlags.FUNC_ENTRY
logging.info(
"- Entering function " + environment.active_account.contract_name + ":" + new_node.function_name)
logging.debug(
"- Entering function "
+ environment.active_account.contract_name
+ ":"
+ new_node.function_name
)
elif address == 0:
environment.active_function_name = "fallback"

@ -2,6 +2,7 @@ import logging, copy
import mythril.laser.ethereum.util as helper
from mythril.laser.ethereum.cfg import JumpType
class TaintRecord:
"""
TaintRecord contains tainting information for a specific (state, node)
@ -82,7 +83,7 @@ class TaintRunner:
"""
@staticmethod
def execute(statespace, node, state, initial_stack=[]):
def execute(statespace, node, state, initial_stack=None):
"""
Runs taint analysis on the statespace
:param statespace: symbolic statespace to run taint analysis on
@ -91,6 +92,8 @@ class TaintRunner:
:param stack_indexes: stack indexes to introduce taint
:return: TaintResult object containing analysis results
"""
if initial_stack is None:
initial_stack = []
result = TaintResult()
transaction_stack_length = len(node.states[0].transaction_stack)
# Build initial current_node
@ -107,21 +110,36 @@ class TaintRunner:
records = TaintRunner.execute_node(node, record, index)
result.add_records(records)
children = TaintRunner.children(node, statespace, environment, transaction_stack_length)
if len(records) == 0: # continue if there is no record to work on
continue
children = TaintRunner.children(
node, statespace, environment, transaction_stack_length
)
for child in children:
current_nodes.append((child, records[-1], 0))
return result
@staticmethod
def children(node, statespace, environment, transaction_stack_length):
direct_children = [statespace.nodes[edge.node_to] for edge in statespace.edges if edge.node_from == node.uid and edge.type != JumpType.Transaction]
direct_children = [
statespace.nodes[edge.node_to]
for edge in statespace.edges
if edge.node_from == node.uid and edge.type != JumpType.Transaction
]
children = []
for child in direct_children:
if all(len(state.transaction_stack) == transaction_stack_length for state in child.states):
if all(
len(state.transaction_stack) == transaction_stack_length
for state in child.states
):
children.append(child)
elif all(len(state.transaction_stack) > transaction_stack_length for state in child.states):
children += TaintRunner.children(child, statespace, environment, transaction_stack_length)
elif all(
len(state.transaction_stack) > transaction_stack_length
for state in child.states
):
children += TaintRunner.children(
child, statespace, environment, transaction_stack_length
)
return children
@staticmethod
@ -147,7 +165,7 @@ class TaintRunner:
new_record = record.clone()
# Apply Change
op = state.get_current_instruction()['opcode']
op = state.get_current_instruction()["opcode"]
if op in TaintRunner.stack_taint_table.keys():
mutator = TaintRunner.stack_taint_table[op]
@ -168,7 +186,7 @@ class TaintRunner:
TaintRunner.mutate_sstore(new_record, state.mstate.stack[-1])
elif op.startswith("LOG"):
TaintRunner.mutate_log(new_record, op)
elif op in ('CALL', 'CALLCODE', 'DELEGATECALL', 'STATICCALL'):
elif op in ("CALL", "CALLCODE", "DELEGATECALL", "STATICCALL"):
TaintRunner.mutate_call(new_record, op)
else:
logging.debug("Unknown operation encountered: {}".format(op))
@ -210,7 +228,7 @@ class TaintRunner:
_ = record.stack.pop()
try:
index = helper.get_concrete_int(op0)
except AttributeError:
except TypeError:
logging.debug("Can't MLOAD taint track symbolically")
record.stack.append(False)
return
@ -222,7 +240,7 @@ class TaintRunner:
_, value_taint = record.stack.pop(), record.stack.pop()
try:
index = helper.get_concrete_int(op0)
except AttributeError:
except TypeError:
logging.debug("Can't mstore taint track symbolically")
return
@ -233,7 +251,7 @@ class TaintRunner:
_ = record.stack.pop()
try:
index = helper.get_concrete_int(op0)
except AttributeError:
except TypeError:
logging.debug("Can't MLOAD taint track symbolically")
record.stack.append(False)
return
@ -245,7 +263,7 @@ class TaintRunner:
_, value_taint = record.stack.pop(), record.stack.pop()
try:
index = helper.get_concrete_int(op0)
except AttributeError:
except TypeError:
logging.debug("Can't mstore taint track symbolically")
return
@ -260,7 +278,7 @@ class TaintRunner:
@staticmethod
def mutate_call(record, op):
pops = 6
if op in ('CALL', 'CALLCODE'):
if op in ("CALL", "CALLCODE"):
pops += 1
for _ in range(pops):
record.stack.pop()
@ -269,55 +287,55 @@ class TaintRunner:
stack_taint_table = {
# instruction: (taint source, taint target)
'POP': (1, 0),
'ADD': (2, 1),
'MUL': (2, 1),
'SUB': (2, 1),
'AND': (2, 1),
'OR': (2, 1),
'XOR': (2, 1),
'NOT': (1, 1),
'BYTE': (2, 1),
'DIV': (2, 1),
'MOD': (2, 1),
'SDIV': (2, 1),
'SMOD': (2, 1),
'ADDMOD': (3, 1),
'MULMOD': (3, 1),
'EXP': (2, 1),
'SIGNEXTEND': (2, 1),
'LT': (2, 1),
'GT': (2, 1),
'SLT': (2, 1),
'SGT': (2, 1),
'EQ': (2, 1),
'ISZERO': (1, 1),
'CALLVALUE': (0, 1),
'CALLDATALOAD': (1, 1),
'CALLDATACOPY': (3, 0), #todo
'CALLDATASIZE': (0, 1),
'ADDRESS': (0, 1),
'BALANCE': (1, 1),
'ORIGIN': (0, 1),
'CALLER': (0, 1),
'CODESIZE': (0, 1),
'SHA3': (2, 1),
'GASPRICE': (0, 1),
'CODECOPY': (3, 0),
'EXTCODESIZE': (1, 1),
'EXTCODECOPY': (4, 0),
'RETURNDATASIZE': (0, 1),
'BLOCKHASH': (1, 1),
'COINBASE': (0, 1),
'TIMESTAMP': (0, 1),
'NUMBER': (0, 1),
'DIFFICULTY': (0, 1),
'GASLIMIT': (0, 1),
'JUMP': (1, 0),
'JUMPI': (2, 0),
'PC': (0, 1),
'MSIZE': (0, 1),
'GAS': (0, 1),
'CREATE': (3, 1),
'RETURN': (2, 0)
"POP": (1, 0),
"ADD": (2, 1),
"MUL": (2, 1),
"SUB": (2, 1),
"AND": (2, 1),
"OR": (2, 1),
"XOR": (2, 1),
"NOT": (1, 1),
"BYTE": (2, 1),
"DIV": (2, 1),
"MOD": (2, 1),
"SDIV": (2, 1),
"SMOD": (2, 1),
"ADDMOD": (3, 1),
"MULMOD": (3, 1),
"EXP": (2, 1),
"SIGNEXTEND": (2, 1),
"LT": (2, 1),
"GT": (2, 1),
"SLT": (2, 1),
"SGT": (2, 1),
"EQ": (2, 1),
"ISZERO": (1, 1),
"CALLVALUE": (0, 1),
"CALLDATALOAD": (1, 1),
"CALLDATACOPY": (3, 0), # todo
"CALLDATASIZE": (0, 1),
"ADDRESS": (0, 1),
"BALANCE": (1, 1),
"ORIGIN": (0, 1),
"CALLER": (0, 1),
"CODESIZE": (0, 1),
"SHA3": (2, 1),
"GASPRICE": (0, 1),
"CODECOPY": (3, 0),
"EXTCODESIZE": (1, 1),
"EXTCODECOPY": (4, 0),
"RETURNDATASIZE": (0, 1),
"BLOCKHASH": (1, 1),
"COINBASE": (0, 1),
"TIMESTAMP": (0, 1),
"NUMBER": (0, 1),
"DIFFICULTY": (0, 1),
"GASLIMIT": (0, 1),
"JUMP": (1, 0),
"JUMPI": (2, 0),
"PC": (0, 1),
"MSIZE": (0, 1),
"GAS": (0, 1),
"CREATE": (3, 1),
"RETURN": (2, 0),
}

@ -1,2 +1,5 @@
from mythril.laser.ethereum.transaction.transaction_models import *
from mythril.laser.ethereum.transaction.symbolic import execute_message_call, execute_contract_creation
from mythril.laser.ethereum.transaction.symbolic import (
execute_message_call,
execute_contract_creation,
)

@ -1,27 +1,49 @@
from mythril.laser.ethereum.transaction.transaction_models import MessageCallTransaction, ContractCreationTransaction, get_next_transaction_id
from mythril.laser.ethereum.transaction.transaction_models import (
MessageCallTransaction,
ContractCreationTransaction,
get_next_transaction_id,
)
from z3 import BitVec
from mythril.laser.ethereum.state import GlobalState, Environment, CalldataType, Account, WorldState
from mythril.laser.ethereum.state import (
GlobalState,
Environment,
CalldataType,
Account,
WorldState,
Calldata,
)
from mythril.disassembler.disassembly import Disassembly
from mythril.laser.ethereum.cfg import Node, Edge, JumpType
def execute_message_call(laser_evm, callee_address, caller_address, origin_address, code, data, gas, gas_price, value):
def execute_message_call(
laser_evm,
callee_address,
caller_address,
origin_address,
code,
data,
gas,
gas_price,
value,
):
""" Executes a message call transaction from all open states """
open_states = laser_evm.open_states[:]
del laser_evm.open_states[:]
for open_world_state in open_states:
next_transaction_id = get_next_transaction_id()
transaction = MessageCallTransaction(
identifier=get_next_transaction_id(),
identifier=next_transaction_id,
world_state=open_world_state,
callee_account=open_world_state[callee_address],
caller=caller_address,
call_data=data,
call_data=Calldata(next_transaction_id, data),
gas_price=gas_price,
call_value=value,
origin=origin_address,
call_data_type=CalldataType.SYMBOLIC,
code=Disassembly(code)
code=Disassembly(code),
)
_setup_global_state_for_execution(laser_evm, transaction)
@ -38,8 +60,14 @@ def _setup_global_state_for_execution(laser_evm, transaction):
laser_evm.nodes[new_node.uid] = new_node
if transaction.world_state.node:
laser_evm.edges.append(Edge(transaction.world_state.node.uid, new_node.uid, edge_type=JumpType.Transaction,
condition=None))
laser_evm.edges.append(
Edge(
transaction.world_state.node.uid,
new_node.uid,
edge_type=JumpType.Transaction,
condition=None,
)
)
global_state.node = new_node
new_node.states.append(global_state)
laser_evm.work_list.append(global_state)

@ -3,9 +3,13 @@ from logging import debug
from mythril.disassembler.disassembly import Disassembly
from mythril.laser.ethereum.cfg import Node, Edge, JumpType
from mythril.laser.ethereum.state import CalldataType
from mythril.laser.ethereum.transaction.transaction_models import MessageCallTransaction, ContractCreationTransaction,\
get_next_transaction_id
from mythril.laser.ethereum.state import CalldataType, Calldata
from mythril.laser.ethereum.transaction.transaction_models import (
MessageCallTransaction,
ContractCreationTransaction,
get_next_transaction_id,
)
def execute_message_call(laser_evm, callee_address):
""" Executes a message call transaction from all open states """
@ -23,7 +27,7 @@ def execute_message_call(laser_evm, callee_address):
callee_account=open_world_state[callee_address],
caller=BitVec("caller{}".format(next_transaction_id), 256),
identifier=next_transaction_id,
call_data=[],
call_data=Calldata(next_transaction_id),
gas_price=BitVec("gas_price{}".format(next_transaction_id), 256),
call_value=BitVec("call_value{}".format(next_transaction_id), 256),
origin=BitVec("origin{}".format(next_transaction_id), 256),
@ -34,12 +38,16 @@ def execute_message_call(laser_evm, callee_address):
laser_evm.exec()
def execute_contract_creation(laser_evm, contract_initialization_code, contract_name=None):
def execute_contract_creation(
laser_evm, contract_initialization_code, contract_name=None
):
""" Executes a contract creation transaction from all open states"""
open_states = laser_evm.open_states[:]
del laser_evm.open_states[:]
new_account = laser_evm.world_state.create_account(0, concrete_storage=True, dynamic_loader=None)
new_account = laser_evm.world_state.create_account(
0, concrete_storage=True, dynamic_loader=None
)
if contract_name:
new_account.contract_name = contract_name
@ -55,7 +63,7 @@ def execute_contract_creation(laser_evm, contract_initialization_code, contract_
BitVec("gas_price{}".format(next_transaction_id), 256),
BitVec("call_value{}".format(next_transaction_id), 256),
BitVec("origin{}".format(next_transaction_id), 256),
CalldataType.SYMBOLIC
CalldataType.SYMBOLIC,
)
_setup_global_state_for_execution(laser_evm, transaction)
laser_evm.exec(True)
@ -72,10 +80,16 @@ def _setup_global_state_for_execution(laser_evm, transaction):
laser_evm.nodes[new_node.uid] = new_node
if transaction.world_state.node:
laser_evm.edges.append(Edge(transaction.world_state.node.uid, new_node.uid, edge_type=JumpType.Transaction,
condition=None))
laser_evm.edges.append(
Edge(
transaction.world_state.node.uid,
new_node.uid,
edge_type=JumpType.Transaction,
condition=None,
)
)
global_state.mstate.constraints = transaction.world_state.node.constraints
global_state.mstate.constraints += transaction.world_state.node.constraints
new_node.constraints = global_state.mstate.constraints
global_state.world_state.transaction_sequence.append(transaction)

@ -1,6 +1,6 @@
import logging
from mythril.disassembler.disassembly import Disassembly
from mythril.laser.ethereum.state import GlobalState, Environment, WorldState
from mythril.laser.ethereum.state import GlobalState, Environment, WorldState, Calldata
from z3 import BitVec
import array
@ -12,14 +12,18 @@ def get_next_transaction_id():
_next_transaction_id += 1
return _next_transaction_id
class TransactionEndSignal(Exception):
""" Exception raised when a transaction is finalized"""
def __init__(self, global_state):
def __init__(self, global_state, revert=False):
self.global_state = global_state
self.revert = revert
class TransactionStartSignal(Exception):
""" Exception raised when a new transaction is started"""
def __init__(self, transaction, op_code):
self.transaction = transaction
self.op_code = op_code
@ -27,28 +31,48 @@ class TransactionStartSignal(Exception):
class MessageCallTransaction:
""" Transaction object models an transaction"""
def __init__(self,
def __init__(
self,
world_state,
callee_account,
caller,
call_data=(),
call_data=None,
identifier=None,
gas_price=None,
call_value=None,
origin=None,
call_data_type=None,
code=None
code=None,
):
assert isinstance(world_state, WorldState)
self.id = identifier or get_next_transaction_id()
self.world_state = world_state
self.callee_account = callee_account
self.caller = caller
self.call_data = call_data
self.gas_price = BitVec("gasprice{}".format(identifier), 256) if gas_price is None else gas_price
self.call_value = BitVec("callvalue{}".format(identifier), 256) if call_value is None else call_value
self.origin = BitVec("origin{}".format(identifier), 256) if origin is None else origin
self.call_data_type = BitVec("call_data_type{}".format(identifier), 256) if call_data_type is None else call_data_type
self.call_data = (
Calldata(self.id, call_data)
if not isinstance(call_data, Calldata)
else call_data
)
self.gas_price = (
BitVec("gasprice{}".format(identifier), 256)
if gas_price is None
else gas_price
)
self.call_value = (
BitVec("callvalue{}".format(identifier), 256)
if call_value is None
else call_value
)
self.origin = (
BitVec("origin{}".format(identifier), 256) if origin is None else origin
)
self.call_data_type = (
BitVec("call_data_type{}".format(identifier), 256)
if call_data_type is None
else call_data_type
)
self.code = code
self.return_data = None
@ -66,24 +90,29 @@ class MessageCallTransaction:
)
global_state = GlobalState(self.world_state, environment, None)
global_state.environment.active_function_name = 'fallback'
global_state.environment.active_function_name = "fallback"
global_state.mstate.constraints.extend(
global_state.environment.calldata.constraints
)
return global_state
def end(self, global_state, return_data=None):
def end(self, global_state, return_data=None, revert=False):
self.return_data = return_data
raise TransactionEndSignal(global_state)
raise TransactionEndSignal(global_state, revert)
class ContractCreationTransaction:
""" Transaction object models an transaction"""
def __init__(self,
def __init__(
self,
world_state,
caller,
identifier=None,
callee_account=None,
code=None,
call_data=(),
call_data=None,
gas_price=None,
call_value=None,
origin=None,
@ -93,16 +122,38 @@ class ContractCreationTransaction:
self.id = identifier or get_next_transaction_id()
self.world_state = world_state
# TODO: set correct balance for new account
self.callee_account = callee_account if callee_account else world_state.create_account(0, concrete_storage=True)
self.callee_account = (
callee_account
if callee_account
else world_state.create_account(0, concrete_storage=True)
)
self.caller = caller
self.gas_price = BitVec("gasprice{}".format(identifier), 256) if gas_price is None else gas_price
self.call_value = BitVec("callvalue{}".format(identifier), 256) if call_value is None else call_value
self.origin = BitVec("origin{}".format(identifier), 256) if origin is None else origin
self.call_data_type = BitVec("call_data_type{}".format(identifier), 256) if call_data_type is None else call_data_type
self.gas_price = (
BitVec("gasprice{}".format(identifier), 256)
if gas_price is None
else gas_price
)
self.call_value = (
BitVec("callvalue{}".format(identifier), 256)
if call_value is None
else call_value
)
self.origin = (
BitVec("origin{}".format(identifier), 256) if origin is None else origin
)
self.call_data_type = (
BitVec("call_data_type{}".format(identifier), 256)
if call_data_type is None
else call_data_type
)
self.call_data = call_data
self.call_data = (
Calldata(self.id, call_data)
if not isinstance(call_data, Calldata)
else call_data
)
self.origin = origin
self.code = code
self.return_data = None
@ -121,19 +172,26 @@ class ContractCreationTransaction:
)
global_state = GlobalState(self.world_state, environment, None)
global_state.environment.active_function_name = 'constructor'
global_state.environment.active_function_name = "constructor"
global_state.mstate.constraints.extend(
global_state.environment.calldata.constraints
)
return global_state
def end(self, global_state, return_data=None):
def end(self, global_state, return_data=None, revert=False):
if not all([isinstance(element, int) for element in return_data]):
if (
not all([isinstance(element, int) for element in return_data])
or len(return_data) == 0
):
self.return_data = None
raise TransactionEndSignal(global_state)
contract_code = bytes.hex(array.array('B', return_data).tostring())
contract_code = bytes.hex(array.array("B", return_data).tostring())
global_state.environment.active_account.code = Disassembly(contract_code)
self.return_data = global_state.environment.active_account.address
assert global_state.environment.active_account.code.instruction_list != []
raise TransactionEndSignal(global_state)
raise TransactionEndSignal(global_state, revert=revert)

@ -16,7 +16,7 @@ def sha3(seed):
def safe_decode(hex_encoded_string):
if (hex_encoded_string.startswith("0x")):
if hex_encoded_string.startswith("0x"):
return bytes.fromhex(hex_encoded_string[2:])
else:
return bytes.fromhex(hex_encoded_string)
@ -31,7 +31,7 @@ def get_instruction_index(instruction_list, address):
index = 0
for instr in instruction_list:
if instr['address'] == address:
if instr["address"] == address:
return index
index += 1
@ -46,7 +46,7 @@ def get_trace_line(instr, state):
# stack = re.sub("(\d+)", lambda m: hex(int(m.group(1))), stack)
stack = re.sub("\n", "", stack)
return str(instr['address']) + " " + instr['opcode'] + "\tSTACK: " + stack
return str(instr["address"]) + " " + instr["opcode"] + "\tSTACK: " + stack
def pop_bitvec(state):
@ -80,9 +80,12 @@ def get_concrete_int(item):
elif is_true(simplified):
return 1
else:
raise ValueError("Symbolic boolref encountered")
raise TypeError("Symbolic boolref encountered")
try:
return simplify(item).as_long()
except AttributeError:
raise TypeError("Got a symbolic BitVecRef")
def concrete_int_from_bytes(_bytes, start_index):
@ -90,7 +93,7 @@ def concrete_int_from_bytes(_bytes, start_index):
# logging.debug("-- concrete_int_from_bytes: " + str(_bytes[start_index:start_index+32]))
b = _bytes[start_index : start_index + 32]
val = int.from_bytes(b, byteorder='big')
val = int.from_bytes(b, byteorder="big")
return val
@ -99,10 +102,10 @@ def concrete_int_to_bytes(val):
# logging.debug("concrete_int_to_bytes " + str(val))
if (type(val) == int):
return val.to_bytes(32, byteorder='big')
if type(val) == int:
return val.to_bytes(32, byteorder="big")
return (simplify(val).as_long()).to_bytes(32, byteorder='big')
return (simplify(val).as_long()).to_bytes(32, byteorder="big")
def bytearray_to_int(arr):
@ -110,4 +113,3 @@ def bytearray_to_int(arr):
for a in arr:
o = (o << 8) + a
return o

@ -20,8 +20,8 @@ import platform
from mythril.ether import util
from mythril.ether.ethcontract import ETHContract
from mythril.ether.soliditycontract import SolidityContract, get_contracts_from_file
from mythril.rpc.client import EthJsonRpc
from mythril.rpc.exceptions import ConnectionError
from mythril.ethereum.interface.rpc.client import EthJsonRpc
from mythril.ethereum.interface.rpc.exceptions import ConnectionError
from mythril.support import signatures
from mythril.support.truffle import analyze_truffle_project
from mythril.support.loader import DynLoader
@ -31,10 +31,11 @@ from mythril.analysis.callgraph import generate_graph
from mythril.analysis.traceexplore import get_serializable_statespace
from mythril.analysis.security import fire_lasers
from mythril.analysis.report import Report
from mythril.leveldb.client import EthLevelDB
from mythril.ethereum.interface.leveldb.client import EthLevelDB
# logging.basicConfig(level=logging.DEBUG)
class Mythril(object):
"""
Mythril main interface class.
@ -75,27 +76,40 @@ class Mythril(object):
mythril.get_state_variable_from_storage(args)
"""
def __init__(self, solv=None,
solc_args=None, dynld=False):
def __init__(
self, solv=None, solc_args=None, dynld=False, enable_online_lookup=False
):
self.solv = solv
self.solc_args = solc_args
self.dynld = dynld
self.enable_online_lookup = enable_online_lookup
self.mythril_dir = self._init_mythril_dir()
self.sigs = signatures.SignatureDb()
self.sigs = signatures.SignatureDb(
enable_online_lookup=self.enable_online_lookup
)
try:
self.sigs.open() # tries mythril_dir/signatures.json by default (provide path= arg to make this configurable)
except FileNotFoundError as fnfe:
except FileNotFoundError:
logging.info(
"No signature database found. Creating database if sigs are loaded in: " + self.sigs.signatures_file + "\n" +
"Consider replacing it with the pre-initialized database at https://raw.githubusercontent.com/ConsenSys/mythril/master/signatures.json")
"No signature database found. Creating database if sigs are loaded in: "
+ self.sigs.signatures_file
+ "\n"
+ "Consider replacing it with the pre-initialized database at https://raw.githubusercontent.com/ConsenSys/mythril/master/signatures.json"
)
except json.JSONDecodeError as jde:
raise CriticalError("Invalid JSON in signatures file " + self.sigs.signatures_file + "\n" + str(jde))
raise CriticalError(
"Invalid JSON in signatures file "
+ self.sigs.signatures_file
+ "\n"
+ str(jde)
)
self.solc_binary = self._init_solc_binary(solv)
self.config_path = os.path.join(self.mythril_dir, 'config.ini')
self.config_path = os.path.join(self.mythril_dir, "config.ini")
self.leveldb_dir = self._init_config()
self.eth = None # ethereum API client
@ -103,11 +117,12 @@ class Mythril(object):
self.contracts = [] # loaded contracts
def _init_mythril_dir(self):
@staticmethod
def _init_mythril_dir():
try:
mythril_dir = os.environ['MYTHRIL_DIR']
mythril_dir = os.environ["MYTHRIL_DIR"]
except KeyError:
mythril_dir = os.path.join(os.path.expanduser('~'), ".mythril")
mythril_dir = os.path.join(os.path.expanduser("~"), ".mythril")
# Initialize data directory and signature database
@ -125,88 +140,109 @@ class Mythril(object):
"""
system = platform.system().lower()
leveldb_fallback_dir = os.path.expanduser('~')
leveldb_fallback_dir = os.path.expanduser("~")
if system.startswith("darwin"):
leveldb_fallback_dir = os.path.join(leveldb_fallback_dir, "Library", "Ethereum")
leveldb_fallback_dir = os.path.join(
leveldb_fallback_dir, "Library", "Ethereum"
)
elif system.startswith("windows"):
leveldb_fallback_dir = os.path.join(leveldb_fallback_dir, "AppData", "Roaming", "Ethereum")
leveldb_fallback_dir = os.path.join(
leveldb_fallback_dir, "AppData", "Roaming", "Ethereum"
)
else:
leveldb_fallback_dir = os.path.join(leveldb_fallback_dir, ".ethereum")
leveldb_fallback_dir = os.path.join(leveldb_fallback_dir, "geth", "chaindata")
if not os.path.exists(self.config_path):
logging.info("No config file found. Creating default: " + self.config_path)
open(self.config_path, 'a').close()
open(self.config_path, "a").close()
config = ConfigParser(allow_no_value=True)
config.optionxform = str
config.read(self.config_path, 'utf-8')
if 'defaults' not in config.sections():
config.read(self.config_path, "utf-8")
if "defaults" not in config.sections():
self._add_default_options(config)
if not config.has_option('defaults', 'leveldb_dir'):
if not config.has_option("defaults", "leveldb_dir"):
self._add_leveldb_option(config, leveldb_fallback_dir)
if not config.has_option('defaults', 'dynamic_loading'):
if not config.has_option("defaults", "dynamic_loading"):
self._add_dynamic_loading_option(config)
with codecs.open(self.config_path, 'w', 'utf-8') as fp:
with codecs.open(self.config_path, "w", "utf-8") as fp:
config.write(fp)
leveldb_dir = config.get('defaults', 'leveldb_dir', fallback=leveldb_fallback_dir)
leveldb_dir = config.get(
"defaults", "leveldb_dir", fallback=leveldb_fallback_dir
)
return os.path.expanduser(leveldb_dir)
@staticmethod
def _add_default_options(config):
config.add_section('defaults')
config.add_section("defaults")
@staticmethod
def _add_leveldb_option(config, leveldb_fallback_dir):
config.set('defaults', "#Default chaindata locations:")
config.set('defaults', "#– Mac: ~/Library/Ethereum/geth/chaindata")
config.set('defaults', "#– Linux: ~/.ethereum/geth/chaindata")
config.set('defaults', "#– Windows: %USERPROFILE%\\AppData\\Roaming\\Ethereum\\geth\\chaindata")
config.set('defaults', 'leveldb_dir', leveldb_fallback_dir)
config.set("defaults", "#Default chaindata locations:")
config.set("defaults", "#– Mac: ~/Library/Ethereum/geth/chaindata")
config.set("defaults", "#– Linux: ~/.ethereum/geth/chaindata")
config.set(
"defaults",
"#– Windows: %USERPROFILE%\\AppData\\Roaming\\Ethereum\\geth\\chaindata",
)
config.set("defaults", "leveldb_dir", leveldb_fallback_dir)
@staticmethod
def _add_dynamic_loading_option(config):
config.set('defaults', '#– To connect to Infura use dynamic_loading: infura')
config.set('defaults', '#– To connect to Rpc use '
'dynamic_loading: HOST:PORT / ganache / infura-[network_name]')
config.set('defaults', '#– To connect to local host use dynamic_loading: localhost')
config.set('defaults', 'dynamic_loading', 'infura')
config.set("defaults", "#– To connect to Infura use dynamic_loading: infura")
config.set(
"defaults",
"#– To connect to Rpc use "
"dynamic_loading: HOST:PORT / ganache / infura-[network_name]",
)
config.set(
"defaults", "#– To connect to local host use dynamic_loading: localhost"
)
config.set("defaults", "dynamic_loading", "infura")
def analyze_truffle_project(self, *args, **kwargs):
return analyze_truffle_project(self.sigs, *args, **kwargs) # just passthru by passing signatures for now
return analyze_truffle_project(
self.sigs, *args, **kwargs
) # just passthru by passing signatures for now
def _init_solc_binary(self, version):
@staticmethod
def _init_solc_binary(version):
# Figure out solc binary and version
# Only proper versions are supported. No nightlies, commits etc (such as available in remix)
if version:
# tried converting input to semver, seemed not necessary so just slicing for now
if version == str(solc.main.get_solc_version())[:6]:
logging.info('Given version matches installed version')
logging.info("Given version matches installed version")
try:
solc_binary = os.environ['SOLC']
solc_binary = os.environ["SOLC"]
except KeyError:
solc_binary = 'solc'
solc_binary = "solc"
else:
if util.solc_exists(version):
logging.info('Given version is already installed')
logging.info("Given version is already installed")
else:
try:
solc.install_solc('v' + version)
solc.install_solc("v" + version)
except SolcError:
raise CriticalError("There was an error when trying to install the specified solc version")
raise CriticalError(
"There was an error when trying to install the specified solc version"
)
solc_binary = os.path.join(os.environ['HOME'], ".py-solc/solc-v" + version, "bin/solc")
solc_binary = os.path.join(
os.environ["HOME"], ".py-solc/solc-v" + version, "bin/solc"
)
logging.info("Setting the compiler to " + str(solc_binary))
else:
try:
solc_binary = os.environ['SOLC']
solc_binary = os.environ["SOLC"]
except KeyError:
solc_binary = 'solc'
solc_binary = "solc"
return solc_binary
def set_api_leveldb(self, leveldb):
@ -215,22 +251,24 @@ class Mythril(object):
return self.eth
def set_api_rpc_infura(self):
self.eth = EthJsonRpc('mainnet.infura.io', 443, True)
self.eth = EthJsonRpc("mainnet.infura.io", 443, True)
logging.info("Using INFURA for RPC queries")
def set_api_rpc(self, rpc=None, rpctls=False):
if rpc == 'ganache':
rpcconfig = ('localhost', 8545, False)
if rpc == "ganache":
rpcconfig = ("localhost", 8545, False)
else:
m = re.match(r'infura-(.*)', rpc)
if m and m.group(1) in ['mainnet', 'rinkeby', 'kovan', 'ropsten']:
rpcconfig = (m.group(1) + '.infura.io', 443, True)
m = re.match(r"infura-(.*)", rpc)
if m and m.group(1) in ["mainnet", "rinkeby", "kovan", "ropsten"]:
rpcconfig = (m.group(1) + ".infura.io", 443, True)
else:
try:
host, port = rpc.split(":")
rpcconfig = (host, int(port), rpctls)
except ValueError:
raise CriticalError("Invalid RPC argument, use 'ganache', 'infura-[network]' or 'HOST:PORT'")
raise CriticalError(
"Invalid RPC argument, use 'ganache', 'infura-[network]' or 'HOST:PORT'"
)
if rpcconfig:
self.eth = EthJsonRpc(rpcconfig[0], int(rpcconfig[1]), rpcconfig[2])
@ -239,28 +277,26 @@ class Mythril(object):
raise CriticalError("Invalid RPC settings, check help for details.")
def set_api_rpc_localhost(self):
self.eth = EthJsonRpc('localhost', 8545)
self.eth = EthJsonRpc("localhost", 8545)
logging.info("Using default RPC settings: http://localhost:8545")
def set_api_from_config_path(self):
config = ConfigParser(allow_no_value=False)
config.optionxform = str
config.read(self.config_path, 'utf-8')
if config.has_option('defaults', 'dynamic_loading'):
dynamic_loading = config.get('defaults', 'dynamic_loading')
config.read(self.config_path, "utf-8")
if config.has_option("defaults", "dynamic_loading"):
dynamic_loading = config.get("defaults", "dynamic_loading")
else:
dynamic_loading = 'infura'
if dynamic_loading == 'infura':
dynamic_loading = "infura"
if dynamic_loading == "infura":
self.set_api_rpc_infura()
elif dynamic_loading == 'localhost':
elif dynamic_loading == "localhost":
self.set_api_rpc_localhost()
else:
self.set_api_rpc(dynamic_loading)
def search_db(self, search):
def search_callback(contract, address, balance):
def search_callback(_, address, balance):
print("Address: " + address + ", balance: " + str(balance))
try:
@ -270,33 +306,47 @@ class Mythril(object):
raise CriticalError("Syntax error in search expression.")
def contract_hash_to_address(self, hash):
if not re.match(r'0x[a-fA-F0-9]{64}', hash):
if not re.match(r"0x[a-fA-F0-9]{64}", hash):
raise CriticalError("Invalid address hash. Expected format is '0x...'.")
print(self.eth_db.contract_hash_to_address(hash))
def load_from_bytecode(self, code):
address = util.get_indexed_address(0)
self.contracts.append(ETHContract(code, name="MAIN"))
self.contracts.append(
ETHContract(
code, name="MAIN", enable_online_lookup=self.enable_online_lookup
)
)
return address, self.contracts[-1] # return address and contract object
def load_from_address(self, address):
if not re.match(r'0x[a-fA-F0-9]{40}', address):
if not re.match(r"0x[a-fA-F0-9]{40}", address):
raise CriticalError("Invalid contract address. Expected format is '0x...'.")
try:
code = self.eth.eth_getCode(address)
except FileNotFoundError as e:
raise CriticalError("IPC error: " + str(e))
except ConnectionError as e:
raise CriticalError("Could not connect to RPC server. Make sure that your node is running and that RPC parameters are set correctly.")
except ConnectionError:
raise CriticalError(
"Could not connect to RPC server. Make sure that your node is running and that RPC parameters are set correctly."
)
except Exception as e:
raise CriticalError("IPC / RPC error: " + str(e))
else:
if code == "0x" or code == "0x0":
raise CriticalError("Received an empty response from eth_getCode. Check the contract address and verify that you are on the correct chain.")
raise CriticalError(
"Received an empty response from eth_getCode. Check the contract address and verify that you are on the correct chain."
)
else:
self.contracts.append(ETHContract(code, name=address))
self.contracts.append(
ETHContract(
code,
name=address,
enable_online_lookup=self.enable_online_lookup,
)
)
return address, self.contracts[-1] # return address and contract object
def load_from_solidity(self, solidity_files):
@ -317,55 +367,110 @@ class Mythril(object):
try:
# import signatures from solidity source
self.sigs.import_from_solidity_source(file)
self.sigs.import_from_solidity_source(
file, solc_binary=self.solc_binary, solc_args=self.solc_args
)
# Save updated function signatures
self.sigs.write() # dump signatures to disk (previously opened file or default location)
if contract_name is not None:
contract = SolidityContract(file, contract_name, solc_args=self.solc_args)
contract = SolidityContract(
input_file=file,
name=contract_name,
solc_args=self.solc_args,
solc_binary=self.solc_binary,
)
self.contracts.append(contract)
contracts.append(contract)
else:
for contract in get_contracts_from_file(file, solc_args=self.solc_args):
for contract in get_contracts_from_file(
input_file=file,
solc_args=self.solc_args,
solc_binary=self.solc_binary,
):
self.contracts.append(contract)
contracts.append(contract)
except FileNotFoundError:
raise CriticalError("Input file not found: " + file)
except CompilerError as e:
raise CriticalError(e)
except NoContractFoundError:
logging.info("The file " + file + " does not contain a compilable contract.")
logging.info(
"The file " + file + " does not contain a compilable contract."
)
return address, contracts
def dump_statespace(self, strategy, contract, address=None, max_depth=None,
execution_timeout=None, create_timeout=None):
sym = SymExecWrapper(contract, address, strategy,
def dump_statespace(
self,
strategy,
contract,
address=None,
max_depth=None,
execution_timeout=None,
create_timeout=None,
):
sym = SymExecWrapper(
contract,
address,
strategy,
dynloader=DynLoader(self.eth) if self.dynld else None,
max_depth=max_depth, execution_timeout=execution_timeout, create_timeout=create_timeout)
max_depth=max_depth,
execution_timeout=execution_timeout,
create_timeout=create_timeout,
)
return get_serializable_statespace(sym)
def graph_html(self, strategy, contract, address, max_depth=None, enable_physics=False,
phrackify=False, execution_timeout=None, create_timeout=None):
sym = SymExecWrapper(contract, address, strategy,
def graph_html(
self,
strategy,
contract,
address,
max_depth=None,
enable_physics=False,
phrackify=False,
execution_timeout=None,
create_timeout=None,
):
sym = SymExecWrapper(
contract,
address,
strategy,
dynloader=DynLoader(self.eth) if self.dynld else None,
max_depth=max_depth, execution_timeout=execution_timeout, create_timeout=create_timeout)
max_depth=max_depth,
execution_timeout=execution_timeout,
create_timeout=create_timeout,
)
return generate_graph(sym, physics=enable_physics, phrackify=phrackify)
def fire_lasers(self, strategy, contracts=None, address=None,
modules=None, verbose_report=False, max_depth=None, execution_timeout=None, create_timeout=None):
def fire_lasers(
self,
strategy,
contracts=None,
address=None,
modules=None,
verbose_report=False,
max_depth=None,
execution_timeout=None,
create_timeout=None,
max_transaction_count=None,
):
all_issues = []
for contract in (contracts or self.contracts):
sym = SymExecWrapper(contract, address, strategy,
for contract in contracts or self.contracts:
sym = SymExecWrapper(
contract,
address,
strategy,
dynloader=DynLoader(self.eth) if self.dynld else None,
max_depth=max_depth, execution_timeout=execution_timeout,
create_timeout=create_timeout)
max_depth=max_depth,
execution_timeout=execution_timeout,
create_timeout=create_timeout,
max_transaction_count=max_transaction_count,
)
issues = fire_lasers(sym, modules)
@ -382,7 +487,9 @@ class Mythril(object):
return report
def get_state_variable_from_storage(self, address, params=[]):
def get_state_variable_from_storage(self, address, params=None):
if params is None:
params = []
(position, length, mappings) = (0, 1, [])
try:
if params[0] == "mapping":
@ -391,9 +498,14 @@ class Mythril(object):
position = int(params[1])
position_formatted = utils.zpad(utils.int_to_big_endian(position), 32)
for i in range(2, len(params)):
key = bytes(params[i], 'utf8')
key = bytes(params[i], "utf8")
key_formatted = utils.rzpad(key, 32)
mappings.append(int.from_bytes(utils.sha3(key_formatted + position_formatted), byteorder='big'))
mappings.append(
int.from_bytes(
utils.sha3(key_formatted + position_formatted),
byteorder="big",
)
)
length = len(mappings)
if length == 1:
@ -408,32 +520,54 @@ class Mythril(object):
if len(params) >= 2:
length = int(params[1])
if len(params) == 3 and params[2] == "array":
position_formatted = utils.zpad(utils.int_to_big_endian(position), 32)
position = int.from_bytes(utils.sha3(position_formatted), byteorder='big')
position_formatted = utils.zpad(
utils.int_to_big_endian(position), 32
)
position = int.from_bytes(
utils.sha3(position_formatted), byteorder="big"
)
except ValueError:
raise CriticalError("Invalid storage index. Please provide a numeric value.")
raise CriticalError(
"Invalid storage index. Please provide a numeric value."
)
outtxt = []
try:
if length == 1:
outtxt.append("{}: {}".format(position, self.eth.eth_getStorageAt(address, position)))
outtxt.append(
"{}: {}".format(
position, self.eth.eth_getStorageAt(address, position)
)
)
else:
if len(mappings) > 0:
for i in range(0, len(mappings)):
position = mappings[i]
outtxt.append("{}: {}".format(hex(position), self.eth.eth_getStorageAt(address, position)))
outtxt.append(
"{}: {}".format(
hex(position),
self.eth.eth_getStorageAt(address, position),
)
)
else:
for i in range(position, position + length):
outtxt.append("{}: {}".format(hex(i), self.eth.eth_getStorageAt(address, i)))
outtxt.append(
"{}: {}".format(
hex(i), self.eth.eth_getStorageAt(address, i)
)
)
except FileNotFoundError as e:
raise CriticalError("IPC error: " + str(e))
except ConnectionError as e:
raise CriticalError("Could not connect to RPC server. Make sure that your node is running and that RPC parameters are set correctly.")
return '\n'.join(outtxt)
except ConnectionError:
raise CriticalError(
"Could not connect to RPC server. Make sure that your node is running and that RPC parameters are set correctly."
)
return "\n".join(outtxt)
def disassemble(self, contract):
@staticmethod
def disassemble(contract):
return contract.get_easm()
@staticmethod

@ -1,8 +0,0 @@
BLOCK_TAG_EARLIEST = 'earliest'
BLOCK_TAG_LATEST = 'latest'
BLOCK_TAG_PENDING = 'pending'
BLOCK_TAGS = (
BLOCK_TAG_EARLIEST,
BLOCK_TAG_LATEST,
BLOCK_TAG_PENDING,
)

@ -4,7 +4,6 @@ import re
class DynLoader:
def __init__(self, eth):
self.eth = eth
self.storage_cache = {}
@ -19,13 +18,17 @@ class DynLoader:
self.storage_cache[contract_address] = {}
data = self.eth.eth_getStorageAt(contract_address, position=index, block='latest')
data = self.eth.eth_getStorageAt(
contract_address, position=index, block="latest"
)
self.storage_cache[contract_address][index] = data
except IndexError:
data = self.eth.eth_getStorageAt(contract_address, position=index, block='latest')
data = self.eth.eth_getStorageAt(
contract_address, position=index, block="latest"
)
self.storage_cache[contract_address][index] = data
@ -33,11 +36,13 @@ class DynLoader:
def dynld(self, contract_address, dependency_address):
logging.info("Dynld at contract " + contract_address + ": " + dependency_address)
logging.info(
"Dynld at contract " + contract_address + ": " + dependency_address
)
m = re.match(r'^(0x[0-9a-fA-F]{40})$', dependency_address)
m = re.match(r"^(0x[0-9a-fA-F]{40})$", dependency_address)
if (m):
if m:
dependency_address = m.group(1)
else:
@ -47,7 +52,7 @@ class DynLoader:
code = self.eth.eth_getCode(dependency_address)
if (code == "0x"):
if code == "0x":
return None
else:
return Disassembly(code)

@ -11,7 +11,6 @@ from subprocess import Popen, PIPE
from mythril.exceptions import CompilerError
# todo: tintinweb - make this a normal requirement? (deps: eth-abi and requests, both already required by mythril)
try:
# load if available but do not fail
import ethereum_input_decoder
@ -27,14 +26,15 @@ try:
import fcntl
def lock_file(f, exclusive=False):
if f.mode == 'r' and exclusive:
raise Exception('Please use non exclusive mode for reading')
if f.mode == "r" and exclusive:
raise Exception("Please use non exclusive mode for reading")
flag = fcntl.LOCK_EX if exclusive else fcntl.LOCK_SH
fcntl.lockf(f, flag)
def unlock_file(f):
return
except ImportError:
# Windows file locking
# TODO: confirm the existence or non existence of shared locks in windows msvcrt and make changes based on that
@ -44,8 +44,8 @@ except ImportError:
return os.path.getsize(os.path.realpath(f.name))
def lock_file(f, exclusive=False):
if f.mode == 'r' and exclusive:
raise Exception('Please use non exclusive mode for reading')
if f.mode == "r" and exclusive:
raise Exception("Please use non exclusive mode for reading")
msvcrt.locking(f.fileno(), msvcrt.LK_RLCK, file_size(f))
def unlock_file(f):
@ -53,17 +53,22 @@ except ImportError:
class SignatureDb(object):
def __init__(self, enable_online_lookup=True):
def __init__(self, enable_online_lookup=False):
"""
Constr
:param enable_online_lookup: enable onlien signature hash lookup
"""
self.signatures = {} # signatures in-mem cache
self.signatures_file = None
self.enable_online_lookup = enable_online_lookup # enable online funcsig resolving
self.online_lookup_miss = set() # temporarily track misses from onlinedb to avoid requesting the same non-existent sighash multiple times
self.online_directory_unavailable_until = 0 # flag the online directory as unavailable for some time
self.enable_online_lookup = (
enable_online_lookup
) # enable online funcsig resolving
self.online_lookup_miss = (
set()
) # temporarily track misses from onlinedb to avoid requesting the same non-existent sighash multiple times
self.online_directory_unavailable_until = (
0
) # flag the online directory as unavailable for some time
def open(self, path=None):
"""
@ -75,15 +80,19 @@ class SignatureDb(object):
if not path:
# try default locations
try:
mythril_dir = os.environ['MYTHRIL_DIR']
mythril_dir = os.environ["MYTHRIL_DIR"]
except KeyError:
mythril_dir = os.path.join(os.path.expanduser('~'), ".mythril")
path = os.path.join(mythril_dir, 'signatures.json')
mythril_dir = os.path.join(os.path.expanduser("~"), ".mythril")
path = os.path.join(mythril_dir, "signatures.json")
self.signatures_file = path # store early to allow error handling to access the place we tried to load the file
self.signatures_file = (
path
) # store early to allow error handling to access the place we tried to load the file
if not os.path.exists(path):
logging.debug("Signatures: file not found: %s" % path)
raise FileNotFoundError("Missing function signature file. Resolving of function names disabled.")
raise FileNotFoundError(
"Missing function signature file. Resolving of function names disabled."
)
with open(path, "r") as f:
lock_file(f)
@ -111,6 +120,8 @@ class SignatureDb(object):
:return: self
"""
path = path or self.signatures_file
directory = os.path.split(path)[0]
if sync and os.path.exists(path):
# reload and save if file exists
with open(path, "r") as f:
@ -120,9 +131,14 @@ class SignatureDb(object):
finally:
unlock_file(f)
sigs.update(self.signatures) # reload file and merge cached sigs into what we load from file
sigs.update(
self.signatures
) # reload file and merge cached sigs into what we load from file
self.signatures = sigs
if directory and not os.path.exists(directory):
os.makedirs(directory) # create folder structure if not existS
if not os.path.exists(path): # creates signatures.json file if it doesn't exist
open(path, "w").close()
@ -146,11 +162,20 @@ class SignatureDb(object):
"""
if not sighash.startswith("0x"):
sighash = "0x%s" % sighash # normalize sighash format
if self.enable_online_lookup and not self.signatures.get(sighash) and sighash not in self.online_lookup_miss and time.time() > self.online_directory_unavailable_until:
if (
self.enable_online_lookup
and not self.signatures.get(sighash)
and sighash not in self.online_lookup_miss
and time.time() > self.online_directory_unavailable_until
):
# online lookup enabled, and signature not in cache, sighash was not a miss earlier, and online directory not down
logging.debug("Signatures: performing online lookup for sighash %r" % sighash)
logging.debug(
"Signatures: performing online lookup for sighash %r" % sighash
)
try:
funcsigs = SignatureDb.lookup_online(sighash, timeout=timeout) # might return multiple sigs
funcsigs = SignatureDb.lookup_online(
sighash, timeout=timeout
) # might return multiple sigs
if funcsigs:
# only store if we get at least one result
self.signatures[sighash] = funcsigs
@ -158,11 +183,19 @@ class SignatureDb(object):
# miss
self.online_lookup_miss.add(sighash)
except FourByteDirectoryOnlineLookupError as fbdole:
self.online_directory_unavailable_until = time.time() + 2 * 60 # wait at least 2 mins to try again
logging.warning("online function signature lookup not available. will not try to lookup hash for the next 2 minutes. exception: %r" % fbdole)
self.online_directory_unavailable_until = (
time.time() + 2 * 60
) # wait at least 2 mins to try again
logging.warning(
"online function signature lookup not available. will not try to lookup hash for the next 2 minutes. exception: %r"
% fbdole
)
if sighash not in self.signatures:
return []
if type(self.signatures[sighash]) != list:
return [self.signatures[sighash]]
return self.signatures[sighash] # raise keyerror
return self.signatures[sighash]
def __getitem__(self, item):
"""
@ -172,13 +205,19 @@ class SignatureDb(object):
"""
return self.get(sighash=item)
def import_from_solidity_source(self, file_path):
def import_from_solidity_source(
self, file_path, solc_binary="solc", solc_args=None
):
"""
Import Function Signatures from solidity source files
:param file_path: solidity source code file path
:return: self
"""
self.signatures.update(SignatureDb.get_sigs_from_file(file_path))
self.signatures.update(
SignatureDb.get_sigs_from_file(
file_path, solc_binary=solc_binary, solc_args=solc_args
)
)
return self
@staticmethod
@ -196,31 +235,41 @@ class SignatureDb(object):
"""
if not ethereum_input_decoder:
return None
return list(ethereum_input_decoder.decoder.FourByteDirectory.lookup_signatures(sighash,
timeout=timeout,
proxies=proxies))
return list(
ethereum_input_decoder.decoder.FourByteDirectory.lookup_signatures(
sighash, timeout=timeout, proxies=proxies
)
)
@staticmethod
def get_sigs_from_file(file_name):
def get_sigs_from_file(file_name, solc_binary="solc", solc_args=None):
"""
:param file_name: accepts a filename
:return: their signature mappings
"""
sigs = {}
cmd = ["solc", "--hashes", file_name]
cmd = [solc_binary, "--hashes", file_name]
if solc_args:
cmd.extend(solc_args.split())
try:
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
ret = p.returncode
if ret != 0:
raise CompilerError("Solc experienced a fatal error (code %d).\n\n%s" % (ret, stderr.decode('UTF-8')))
raise CompilerError(
"Solc experienced a fatal error (code %d).\n\n%s"
% (ret, stderr.decode("UTF-8"))
)
except FileNotFoundError:
raise CompilerError(
"Compiler not found. Make sure that solc is installed and in PATH, or set the SOLC environment variable.")
stdout = stdout.decode('unicode_escape').split('\n')
"Compiler not found. Make sure that solc is installed and in PATH, or set the SOLC environment variable."
)
stdout = stdout.decode("unicode_escape").split("\n")
for line in stdout:
if '(' in line and ')' in line and ":" in line: # the ':' need not be checked but just to be sure
sigs["0x"+line.split(':')[0]] = [line.split(":")[1].strip()]
if (
"(" in line and ")" in line and ":" in line
): # the ':' need not be checked but just to be sure
sigs["0x" + line.split(":")[0]] = [line.split(":")[1].strip()]
logging.debug("Signatures: found %d signatures after parsing" % len(sigs))
return sigs

@ -25,36 +25,49 @@ def analyze_truffle_project(sigs, args):
for filename in files:
if re.match(r'.*\.json$', filename) and filename != "Migrations.json":
if re.match(r".*\.json$", filename) and filename != "Migrations.json":
with open(os.path.join(build_dir, filename)) as cf:
contractdata = json.load(cf)
try:
name = contractdata['contractName']
bytecode = contractdata['deployedBytecode']
filename = PurePath(contractdata['sourcePath']).name
name = contractdata["contractName"]
bytecode = contractdata["deployedBytecode"]
filename = PurePath(contractdata["sourcePath"]).name
except KeyError:
print("Unable to parse contract data. Please use Truffle 4 to compile your project.")
print(
"Unable to parse contract data. Please use Truffle 4 to compile your project."
)
sys.exit()
if len(bytecode) < 4:
continue
sigs.import_from_solidity_source(contractdata['sourcePath'])
sigs.import_from_solidity_source(
contractdata["sourcePath"], solc_args=args.solc_args
)
sigs.write()
ethcontract = ETHContract(bytecode, name=name)
address = util.get_indexed_address(0)
sym = SymExecWrapper(ethcontract, address, args.strategy, max_depth=args.max_depth,
create_timeout=args.create_timeout, execution_timeout=args.execution_timeout)
sym = SymExecWrapper(
ethcontract,
address,
args.strategy,
max_depth=args.max_depth,
create_timeout=args.create_timeout,
execution_timeout=args.execution_timeout,
)
issues = fire_lasers(sym)
if not len(issues):
if args.outform == 'text' or args.outform == 'markdown':
if args.outform == "text" or args.outform == "markdown":
print("# Analysis result for " + name + "\n\nNo issues found.")
else:
result = {'contract': name, 'result': {'success': True, 'error': None, 'issues': []}}
result = {
"contract": name,
"result": {"success": True, "error": None, "issues": []},
}
print(json.dumps(result))
else:
@ -62,9 +75,9 @@ def analyze_truffle_project(sigs, args):
# augment with source code
disassembly = ethcontract.disassembly
source = contractdata['source']
source = contractdata["source"]
deployed_source_map = contractdata['deployedSourceMap'].split(";")
deployed_source_map = contractdata["deployedSourceMap"].split(";")
mappings = []
@ -80,13 +93,17 @@ def analyze_truffle_project(sigs, args):
if len(mapping) > 2 and len(mapping[2]) > 0:
idx = int(mapping[2])
lineno = source.encode('utf-8')[0:offset].count('\n'.encode('utf-8')) + 1
lineno = (
source.encode("utf-8")[0:offset].count("\n".encode("utf-8")) + 1
)
mappings.append(SourceMapping(idx, offset, length, lineno))
for issue in issues:
index = get_instruction_index(disassembly.instruction_list, issue.address)
index = get_instruction_index(
disassembly.instruction_list, issue.address
)
if index:
try:
@ -94,20 +111,31 @@ def analyze_truffle_project(sigs, args):
length = mappings[index].length
issue.filename = filename
issue.code = source.encode('utf-8')[offset:offset + length].decode('utf-8')
issue.code = source.encode("utf-8")[
offset : offset + length
].decode("utf-8")
issue.lineno = mappings[index].lineno
except IndexError:
logging.debug("No code mapping at index %d", index)
report.append_issue(issue)
if args.outform == 'json':
if args.outform == "json":
result = {'contract': name, 'result': {'success': True, 'error': None, 'issues': list(map(lambda x: x.as_dict, issues))}}
result = {
"contract": name,
"result": {
"success": True,
"error": None,
"issues": list(map(lambda x: x.as_dict, issues)),
},
}
print(json.dumps(result))
else:
if args.outform == 'text':
print("# Analysis result for " + name + ":\n\n" + report.as_text())
elif args.outform == 'markdown':
if args.outform == "text":
print(
"# Analysis result for " + name + ":\n\n" + report.as_text()
)
elif args.outform == "markdown":
print(report.as_markdown())

@ -1,3 +1,3 @@
# This file is suitable for sourcing inside POSIX shell, e.g. bash as
# well as for importing into Python
VERSION="v0.18.12" # NOQA
VERSION = "v0.19.3" # NOQA

@ -1,8 +1,10 @@
coloredlogs>=10.0
configparser>=3.5.0
coverage
eth_abi>=1.0.0
eth-account>=0.1.0a2
ethereum>=2.3.2
ethereum-input-decoder>=0.2.2
eth-hash>=0.1.0
eth-keyfile>=0.5.1
eth-keys>=0.2.0b3
@ -22,4 +24,5 @@ pytest_mock
requests
rlp>=1.0.1
transaction>=2.2.1
z3-solver>=4.5
z3-solver==4.5.1.0.post2
pysha3

@ -22,19 +22,22 @@ VERSION = None
# Package version (vX.Y.Z). It must match git tag being used for CircleCI
# deployment; otherwise the build will failed.
version_path = (Path(__file__).parent / 'mythril' / 'version.py').absolute()
exec(open(str(version_path), 'r').read())
version_path = (Path(__file__).parent / "mythril" / "version.py").absolute()
exec(open(str(version_path), "r").read())
class VerifyVersionCommand(install):
"""Custom command to verify that the git tag matches our version"""
description = 'verify that the git tag matches our version'
description = "verify that the git tag matches our version"
def run(self):
tag = os.getenv('CIRCLE_TAG')
tag = os.getenv("CIRCLE_TAG")
if tag != VERSION:
info = "Git tag: {0} does not match the version of this app: {1}".format(tag, VERSION)
info = "Git tag: {0} does not match the version of this app: {1}".format(
tag, VERSION
)
sys.exit(info)
@ -44,90 +47,62 @@ def read_file(fname):
:param fname: path relative to setup.py
:return: file contents
"""
with open(os.path.join(os.path.dirname(__file__), fname), 'r') as fd:
with open(os.path.join(os.path.dirname(__file__), fname), "r") as fd:
return fd.read()
setup(
name='mythril',
name="mythril",
version=VERSION[1:],
description='Security analysis tool for Ethereum smart contracts',
description="Security analysis tool for Ethereum smart contracts",
long_description=read_file("README.md") if os.path.isfile("README.md") else "",
long_description_content_type='text/markdown', # requires twine and recent setuptools
url='https://github.com/b-mueller/mythril',
author='Bernhard Mueller',
author_email='bernhard.mueller11@gmail.com',
license='MIT',
long_description_content_type="text/markdown", # requires twine and recent setuptools
url="https://github.com/b-mueller/mythril",
author="Bernhard Mueller",
author_email="bernhard.mueller11@gmail.com",
license="MIT",
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'Topic :: Software Development :: Disassemblers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
"Development Status :: 3 - Alpha",
"Intended Audience :: Science/Research",
"Topic :: Software Development :: Disassemblers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
],
keywords='hacking disassembler security ethereum',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
keywords="hacking disassembler security ethereum",
packages=find_packages(exclude=["contrib", "docs", "tests"]),
install_requires=[
'ethereum>=2.3.2',
'z3-solver>=4.5',
'requests',
'py-solc',
'plyvel',
'eth_abi>=1.0.0',
'eth-utils>=1.0.1',
'eth-account>=0.1.0a2',
'eth-hash>=0.1.0',
'eth-keyfile>=0.5.1',
'eth-keys>=0.2.0b3',
'eth-rlp>=0.1.0',
'eth-tester>=0.1.0b21',
'eth-typing>=1.3.0,<2.0.0',
'coverage',
'jinja2>=2.9',
'rlp>=1.0.1',
'transaction>=2.2.1',
'py-flags',
'mock',
'configparser>=3.5.0',
'persistent>=4.2.0'
"coloredlogs>=10.0",
"ethereum>=2.3.2",
"z3-solver==4.5.1.0.post2",
"requests",
"py-solc",
"plyvel",
"eth_abi>=1.0.0",
"eth-utils>=1.0.1",
"eth-account>=0.1.0a2",
"eth-hash>=0.1.0",
"eth-keyfile>=0.5.1",
"eth-keys>=0.2.0b3",
"eth-rlp>=0.1.0",
"eth-tester>=0.1.0b21",
"eth-typing>=1.3.0,<2.0.0",
"coverage",
"jinja2>=2.9",
"rlp>=1.0.1",
"transaction>=2.2.1",
"py-flags",
"mock",
"configparser>=3.5.0",
"persistent>=4.2.0",
"ethereum-input-decoder>=0.2.2",
],
tests_require=[
'pytest>=3.6.0',
'pytest_mock',
'pytest-cov'
],
python_requires='>=3.5',
extras_require={
},
package_data={
'mythril.analysis.templates': ['*']
},
tests_require=["pytest>=3.6.0", "pytest_mock", "pytest-cov"],
python_requires=">=3.5",
extras_require={},
package_data={"mythril.analysis.templates": ["*"]},
include_package_data=True,
entry_points={
'console_scripts': ["myth=mythril.interfaces.cli:main"],
},
cmdclass={
'verify': VerifyVersionCommand,
}
entry_points={"console_scripts": ["myth=mythril.interfaces.cli:main"]},
cmdclass={"verify": VerifyVersionCommand},
)

@ -26,7 +26,7 @@ contract Rubixi {
Participant[] private participants;
//Fallback function
function() {
function() payable {
init();
}

@ -1,5 +1,7 @@
<!DOCTYPE html>
<html>
<head>
<title>Call Graph</title>
<style type="text/css">
#mynetwork {
background-color: #232625;

@ -1,6 +1,7 @@
<!DOCTYPE html>
<html>
<head>
<title>Call Graph</title>
<style type="text/css">
#mynetwork {

@ -1,5 +1,7 @@
<!DOCTYPE html>
<html>
<head>
<title>Call Graph</title>
<style type="text/css">
#mynetwork {
background-color: #232625;

Binary file not shown.

Before

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

@ -16,15 +16,17 @@ MYTHRIL_DIR = TESTS_DIR / "mythril_dir"
class BaseTestCase(TestCase):
def setUp(self):
self.changed_files = []
self.ori_mythril_dir = getattr(os.environ, 'MYTHRIL_DIR', '')
os.environ['MYTHRIL_DIR'] = str(MYTHRIL_DIR)
shutil.copyfile(str(MYTHRIL_DIR / "signatures.json.example"), str(MYTHRIL_DIR / "signatures.json"))
self.ori_mythril_dir = getattr(os.environ, "MYTHRIL_DIR", "")
os.environ["MYTHRIL_DIR"] = str(MYTHRIL_DIR)
shutil.copyfile(
str(MYTHRIL_DIR / "signatures.json.example"),
str(MYTHRIL_DIR / "signatures.json"),
)
def tearDown(self):
os.environ['MYTHRIL_DIR'] = self.ori_mythril_dir
os.environ["MYTHRIL_DIR"] = self.ori_mythril_dir
os.remove(str(MYTHRIL_DIR / "signatures.json"))
def compare_files_error_message(self):
@ -41,4 +43,6 @@ class BaseTestCase(TestCase):
self.changed_files.append((input_file, output_expected, output_current))
def assert_and_show_changed_files(self):
self.assertEqual(0, len(self.changed_files), msg=self.compare_files_error_message())
self.assertEqual(
0, len(self.changed_files), msg=self.compare_files_error_message()
)

@ -1,4 +1,8 @@
from mythril.analysis.modules.delegatecall import execute, _concrete_call, _symbolic_call
from mythril.analysis.modules.delegatecall import (
execute,
_concrete_call,
_symbolic_call,
)
from mythril.analysis.ops import Call, Variable, VarType
from mythril.analysis.symbolic import SymExecWrapper
from mythril.laser.ethereum.cfg import Node
@ -6,13 +10,17 @@ from mythril.laser.ethereum.state import GlobalState, Environment, Account
import pytest
from unittest.mock import MagicMock, patch
import pytest_mock
from mythril.disassembler.disassembly import Disassembly
def test_concrete_call():
# arrange
address = "0x10"
active_account = Account(address)
active_account.code = Disassembly("00")
environment = Environment(active_account, None, None, None, None, None)
state = GlobalState(None, None, None)
state = GlobalState(None, environment, None)
state.mstate.memory = ["placeholder", "calldata_bling_0"]
node = Node("example")
@ -32,18 +40,24 @@ def test_concrete_call():
assert issue.contract == node.contract_name
assert issue.function == node.function_name
assert issue.title == "Call data forwarded with delegatecall()"
assert issue.type == 'Informational'
assert issue.description == "This contract forwards its call data via DELEGATECALL in its fallback function." \
" This means that any function in the called contract can be executed." \
" Note that the callee contract will have access to the storage of the " \
assert issue.type == "Informational"
assert (
issue.description
== "This contract forwards its call data via DELEGATECALL in its fallback function."
" This means that any function in the called contract can be executed."
" Note that the callee contract will have access to the storage of the "
"calling contract.\n DELEGATECALL target: 0x1"
)
def test_concrete_call_symbolic_to():
# arrange
address = "0x10"
state = GlobalState(None, None, None)
active_account = Account(address)
active_account.code = Disassembly("00")
environment = Environment(active_account, None, None, None, None, None)
state = GlobalState(None, environment, None)
state.mstate.memory = ["placeholder", "calldata_bling_0"]
node = Node("example")
@ -63,11 +77,14 @@ def test_concrete_call_symbolic_to():
assert issue.contract == node.contract_name
assert issue.function == node.function_name
assert issue.title == "Call data forwarded with delegatecall()"
assert issue.type == 'Informational'
assert issue.description == "This contract forwards its call data via DELEGATECALL in its fallback function." \
" This means that any function in the called contract can be executed." \
" Note that the callee contract will have access to the storage of the " \
assert issue.type == "Informational"
assert (
issue.description
== "This contract forwards its call data via DELEGATECALL in its fallback function."
" This means that any function in the called contract can be executed."
" Note that the callee contract will have access to the storage of the "
"calling contract.\n DELEGATECALL target: calldata_3"
)
def test_concrete_call_not_calldata():
@ -88,11 +105,11 @@ def test_symbolic_call_storage_to(mocker):
address = "0x10"
active_account = Account(address)
active_account.code = Disassembly("00")
environment = Environment(active_account, None, None, None, None, None)
state = GlobalState(None, environment, None)
state.mstate.memory = ["placeholder", "calldata_bling_0"]
node = Node("example")
node.contract_name = "the contract name"
node.function_name = "the function name"
@ -100,14 +117,12 @@ def test_symbolic_call_storage_to(mocker):
to = Variable("storage_1", VarType.SYMBOLIC)
call = Call(node, state, None, "Type: ", to, None)
mocker.patch.object(SymExecWrapper, "__init__", lambda x, y: None)
statespace = SymExecWrapper(1)
mocker.patch.object(statespace, 'find_storage_write')
mocker.patch.object(statespace, "find_storage_write")
statespace.find_storage_write.return_value = "Function name"
# act
issues = _symbolic_call(call, state, address, statespace)
@ -116,21 +131,26 @@ def test_symbolic_call_storage_to(mocker):
assert issue.address == address
assert issue.contract == node.contract_name
assert issue.function == node.function_name
assert issue.title == 'Type: to a user-supplied address'
assert issue.type == 'Informational'
assert issue.description == 'This contract delegates execution to a contract address in storage slot 1.' \
' This storage slot can be written to by calling the function `Function name`. ' \
'Be aware that the called contract gets unrestricted access to this contract\'s state.'
assert issue.title == "Type: to a user-supplied address"
assert issue.type == "Informational"
assert (
issue.description
== "This contract delegates execution to a contract address in storage slot 1."
" This storage slot can be written to by calling the function `Function name`. "
"Be aware that the called contract gets unrestricted access to this contract's state."
)
def test_symbolic_call_calldata_to(mocker):
# arrange
address = "0x10"
state = GlobalState(None, None, None)
active_account = Account(address)
active_account.code = Disassembly("00")
environment = Environment(active_account, None, None, None, None, None)
state = GlobalState(None, environment, None)
state.mstate.memory = ["placeholder", "calldata_bling_0"]
node = Node("example")
node.contract_name = "the contract name"
node.function_name = "the function name"
@ -138,14 +158,12 @@ def test_symbolic_call_calldata_to(mocker):
to = Variable("calldata", VarType.SYMBOLIC)
call = Call(node, state, None, "Type: ", to, None)
mocker.patch.object(SymExecWrapper, "__init__", lambda x, y: None)
statespace = SymExecWrapper(1)
mocker.patch.object(statespace, 'find_storage_write')
mocker.patch.object(statespace, "find_storage_write")
statespace.find_storage_write.return_value = "Function name"
# act
issues = _symbolic_call(call, state, address, statespace)
@ -154,29 +172,34 @@ def test_symbolic_call_calldata_to(mocker):
assert issue.address == address
assert issue.contract == node.contract_name
assert issue.function == node.function_name
assert issue.title == 'Type: to a user-supplied address'
assert issue.type == 'Informational'
assert issue.description == 'This contract delegates execution to a contract address obtained from calldata. ' \
'Be aware that the called contract gets unrestricted access to this contract\'s state.'
@patch('mythril.laser.ethereum.state.GlobalState.get_current_instruction')
@patch('mythril.analysis.modules.delegatecall._concrete_call')
@patch('mythril.analysis.modules.delegatecall._symbolic_call')
assert issue.title == "Type: to a user-supplied address"
assert issue.type == "Informational"
assert (
issue.description
== "This contract delegates execution to a contract address obtained from calldata. "
"Be aware that the called contract gets unrestricted access to this contract's state."
)
@patch("mythril.laser.ethereum.state.GlobalState.get_current_instruction")
@patch("mythril.analysis.modules.delegatecall._concrete_call")
@patch("mythril.analysis.modules.delegatecall._symbolic_call")
def test_delegate_call(sym_mock, concrete_mock, curr_instruction):
# arrange
# sym_mock = mocker.patch.object(delegatecall, "_symbolic_call")
# concrete_mock = mocker.patch.object(delegatecall, "_concrete_call")
sym_mock.return_value = []
concrete_mock.return_value = []
curr_instruction.return_value = {'address': '0x10'}
curr_instruction.return_value = {"address": "0x10"}
active_account = Account("0x10")
active_account.code = Disassembly("00")
active_account = Account('0x10')
environment = Environment(active_account, None, None, None, None, None)
state = GlobalState(None, environment, Node)
state.mstate.memory = ["placeholder", "calldata_bling_0"]
state.mstate.stack = [1, 2, 3]
assert state.get_current_instruction() == {'address': '0x10'}
assert state.get_current_instruction() == {"address": "0x10"}
node = Node("example")
node.contract_name = "the contract name"
@ -189,15 +212,15 @@ def test_delegate_call(sym_mock, concrete_mock, curr_instruction):
statespace.calls = [call]
# act
issues = execute(statespace)
execute(statespace)
# assert
assert concrete_mock.call_count == 1
assert sym_mock.call_count == 1
@patch('mythril.analysis.modules.delegatecall._concrete_call')
@patch('mythril.analysis.modules.delegatecall._symbolic_call')
@patch("mythril.analysis.modules.delegatecall._concrete_call")
@patch("mythril.analysis.modules.delegatecall._symbolic_call")
def test_delegate_call_not_delegate(sym_mock, concrete_mock):
# arrange
# sym_mock = mocker.patch.object(delegatecall, "_symbolic_call")
@ -223,8 +246,8 @@ def test_delegate_call_not_delegate(sym_mock, concrete_mock):
assert sym_mock.call_count == 0
@patch('mythril.analysis.modules.delegatecall._concrete_call')
@patch('mythril.analysis.modules.delegatecall._symbolic_call')
@patch("mythril.analysis.modules.delegatecall._concrete_call")
@patch("mythril.analysis.modules.delegatecall._symbolic_call")
def test_delegate_call_not_fallback(sym_mock, concrete_mock):
# arrange
# sym_mock = mocker.patch.object(delegatecall, "_symbolic_call")

@ -3,50 +3,61 @@ from tests import *
MYTH = str(PROJECT_DIR / "myth")
def output_of(command):
return check_output(command, shell=True).decode("UTF-8")
class CommandLineToolTestCase(BaseTestCase):
class CommandLineToolTestCase(BaseTestCase):
def test_disassemble_code_correctly(self):
command = "python3 {} MYTH -d -c 0x5050".format(MYTH)
self.assertEqual('0 POP\n1 POP\n', output_of(command))
self.assertEqual("0 POP\n1 POP\n", output_of(command))
def test_disassemble_solidity_file_correctly(self):
solidity_file = str(TESTDATA / 'input_contracts'/ 'metacoin.sol')
solidity_file = str(TESTDATA / "input_contracts" / "metacoin.sol")
command = "python3 {} -d {}".format(MYTH, solidity_file)
self.assertIn('2 PUSH1 0x40\n4 MSTORE', output_of(command))
self.assertIn("2 PUSH1 0x40\n4 MSTORE", output_of(command))
def test_hash_a_function_correctly(self):
command = "python3 {} --hash 'setOwner(address)'".format(MYTH)
self.assertEqual('0x13af4035\n', output_of(command))
self.assertEqual("0x13af4035\n", output_of(command))
class TruffleTestCase(BaseTestCase):
class TruffleTestCase(BaseTestCase):
def test_analysis_truffle_project(self):
truffle_project_root = str(TESTS_DIR / "truffle_project")
command = "cd {}; truffle compile; python3 {} --truffle".format(truffle_project_root, MYTH)
self.assertIn("In the function `withdrawfunds()` a non-zero amount of Ether is sent to msg.sender.", output_of(command))
command = "cd {}; truffle compile; python3 {} --truffle".format(
truffle_project_root, MYTH
)
self.assertIn("=== Ether send ====", output_of(command))
class InfuraTestCase(BaseTestCase):
class InfuraTestCase(BaseTestCase):
def test_infura_mainnet(self):
command = "python3 {} --rpc infura-mainnet -d -a 0x2a0c0dbecc7e4d658f48e01e3fa353f44050c208".format(MYTH)
command = "python3 {} --rpc infura-mainnet -d -a 0x2a0c0dbecc7e4d658f48e01e3fa353f44050c208".format(
MYTH
)
output = output_of(command)
self.assertIn("0 PUSH1 0x60\n2 PUSH1 0x40\n4 MSTORE", output)
self.assertIn("7278 POP\n7279 POP\n7280 JUMP\n7281 STOP", output)
def test_infura_rinkeby(self):
command = "python3 {} --rpc infura-rinkeby -d -a 0xB6f2bFED892a662bBF26258ceDD443f50Fa307F5".format(MYTH)
command = "python3 {} --rpc infura-rinkeby -d -a 0xB6f2bFED892a662bBF26258ceDD443f50Fa307F5".format(
MYTH
)
output = output_of(command)
self.assertIn("34 JUMPDEST\n35 CALLVALUE", output)
def test_infura_kovan(self):
command = "python3 {} --rpc infura-kovan -d -a 0xE6bBF9B5A3451242F82f8cd458675092617a1235".format(MYTH)
command = "python3 {} --rpc infura-kovan -d -a 0xE6bBF9B5A3451242F82f8cd458675092617a1235".format(
MYTH
)
output = output_of(command)
self.assertIn("9999 PUSH1 0x00\n10001 NOT\n10002 AND\n10003 PUSH1 0x00", output)
def test_infura_ropsten(self):
command = "python3 {} --rpc infura-ropsten -d -a 0x6e0E0e02377Bc1d90E8a7c21f12BA385C2C35f78".format(MYTH)
command = "python3 {} --rpc infura-ropsten -d -a 0x6e0E0e02377Bc1d90E8a7c21f12BA385C2C35f78".format(
MYTH
)
output = output_of(command)
self.assertIn("1821 PUSH1 0x01\n1823 PUSH2 0x070c", output)

@ -0,0 +1,124 @@
from mythril.disassembler.asm import *
import pytest
valid_names = [("PUSH1", 0x60), ("STOP", 0x0), ("RETURN", 0xF3)]
@pytest.mark.parametrize("operation_name, hex_value", valid_names)
def test_get_opcode(operation_name: str, hex_value: int):
# Act
return_value = get_opcode_from_name(operation_name)
# Assert
assert return_value == hex_value
def test_get_unknown_opcode():
operation_name = "definitely unknown"
# Act
with pytest.raises(RuntimeError):
get_opcode_from_name(operation_name)
sequence_match_test_data = [
# Normal no match
(
(["PUSH1"], ["EQ"]),
[{"opcode": "PUSH1"}, {"opcode": "PUSH3"}, {"opcode": "EQ"}],
1,
False,
),
# Normal match
(
(["PUSH1"], ["EQ"]),
[{"opcode": "PUSH1"}, {"opcode": "PUSH1"}, {"opcode": "EQ"}],
1,
True,
),
# Out of bounds pattern
(
(["PUSH1"], ["EQ"]),
[{"opcode": "PUSH1"}, {"opcode": "PUSH3"}, {"opcode": "EQ"}],
3,
False,
),
(
(["PUSH1"], ["EQ"]),
[{"opcode": "PUSH1"}, {"opcode": "PUSH3"}, {"opcode": "EQ"}],
2,
False,
),
# Double option match
(
(["PUSH1", "PUSH3"], ["EQ"]),
[{"opcode": "PUSH1"}, {"opcode": "PUSH1"}, {"opcode": "EQ"}],
1,
True,
),
(
(["PUSH1", "PUSH3"], ["EQ"]),
[{"opcode": "PUSH1"}, {"opcode": "PUSH3"}, {"opcode": "EQ"}],
1,
True,
),
# Double option no match
(
(["PUSH1", "PUSH3"], ["EQ"]),
[{"opcode": "PUSH1"}, {"opcode": "PUSH3"}, {"opcode": "EQ"}],
0,
False,
),
]
@pytest.mark.parametrize(
"pattern, instruction_list, index, expected_result", sequence_match_test_data
)
def test_is_sequence_match(pattern, instruction_list, index, expected_result):
# Act
return_value = is_sequence_match(pattern, instruction_list, index)
# Assert
assert return_value == expected_result
find_sequence_match_test_data = [
# Normal no match
(
(["PUSH1"], ["EQ"]),
[{"opcode": "PUSH1"}, {"opcode": "PUSH3"}, {"opcode": "EQ"}],
[],
),
# Normal match
(
(["PUSH1"], ["EQ"]),
[
{"opcode": "PUSH1"},
{"opcode": "PUSH1"},
{"opcode": "EQ"},
{"opcode": "PUSH1"},
{"opcode": "EQ"},
],
[1, 3],
),
]
@pytest.mark.parametrize(
"pattern, instruction_list, expected_result", find_sequence_match_test_data
)
def test_find_op_code_sequence(pattern, instruction_list, expected_result):
# Act
return_value = list(find_op_code_sequence(pattern, instruction_list))
# Assert
assert return_value == expected_result
def test_disassemble():
# Act
instruction_list = disassemble(b"\x00\x16\x06")
# Assert
assert instruction_list[0]["opcode"] == "STOP"
assert instruction_list[1]["opcode"] == "AND"
assert instruction_list[2]["opcode"] == "MOD"

@ -0,0 +1,61 @@
from mythril.disassembler.disassembly import *
instruction_list = [
{"opcode": "PUSH4", "argument": "0x10203040"},
{"opcode": "EQ"},
{"opcode": "PUSH4", "argument": "0x40302010"},
{"opcode": "JUMPI"},
]
def test_get_function_info(mocker):
# Arrange
global instruction_list
signature_database_mock = SignatureDb()
mocker.patch.object(signature_database_mock, "get")
signature_database_mock.get.return_value = ["function_name"]
# Act
function_hash, entry_point, function_name = get_function_info(
0, instruction_list, signature_database_mock
)
# Assert
assert function_hash == "0x10203040"
assert entry_point == 0x40302010
assert function_name == "function_name"
def test_get_function_info_multiple_names(mocker):
# Arrange
global instruction_list
signature_database_mock = SignatureDb()
mocker.patch.object(signature_database_mock, "get")
signature_database_mock.get.return_value = ["function_name", "another_name"]
# Act
function_hash, entry_point, function_name = get_function_info(
0, instruction_list, signature_database_mock
)
# Assert
assert function_name == "**ambiguous** function_name"
def test_get_function_info_no_names(mocker):
# Arrange
global instruction_list
signature_database_mock = SignatureDb()
mocker.patch.object(signature_database_mock, "get")
signature_database_mock.get.return_value = []
# Act
function_hash, entry_point, function_name = get_function_info(
0, instruction_list, signature_database_mock
)
# Assert
assert function_name == "_function_0x10203040"

File diff suppressed because one or more lines are too long

@ -3,33 +3,45 @@ from mythril.ether.ethcontract import ETHContract
class ETHContractTestCase(unittest.TestCase):
def setUp(self):
self.code = "0x60606040525b603c5b60006010603e565b9050593681016040523660008237602060003683856040603f5a0204f41560545760206000f35bfe5b50565b005b73c3b2ae46792547a96b9f84405e36d0e07edcd05c5b905600a165627a7a7230582062a884f947232ada573f95940cce9c8bfb7e4e14e21df5af4e884941afb55e590029"
self.creation_code = "0x60606040525b603c5b60006010603e565b9050593681016040523660008237602060003683856040603f5a0204f41560545760206000f35bfe5b50565b005b73c3b2ae46792547a96b9f84405e36d0e07edcd05c5b905600a165627a7a7230582062a884f947232ada573f95940cce9c8bfb7e4e14e21df5af4e884941afb55e590029"
class Getinstruction_listTestCase(ETHContractTestCase):
class Getinstruction_listTestCase(ETHContractTestCase):
def runTest(self):
contract = ETHContract(self.code, self.creation_code)
disassembly = contract.disassembly
self.assertEqual(len(disassembly.instruction_list), 53, 'Error disassembling code using ETHContract.get_instruction_list()')
self.assertEqual(
len(disassembly.instruction_list),
53,
"Error disassembling code using ETHContract.get_instruction_list()",
)
class GetEASMTestCase(ETHContractTestCase):
class GetEASMTestCase(ETHContractTestCase):
def runTest(self):
contract = ETHContract(self.code)
instruction_list = contract.get_easm()
self.assertTrue("PUSH1 0x60" in instruction_list, 'Error obtaining EASM code through ETHContract.get_easm()')
self.assertTrue(
"PUSH1 0x60" in instruction_list,
"Error obtaining EASM code through ETHContract.get_easm()",
)
class MatchesExpressionTestCase(ETHContractTestCase):
class MatchesExpressionTestCase(ETHContractTestCase):
def runTest(self):
contract = ETHContract(self.code)
self.assertTrue(contract.matches_expression("code#PUSH1# or code#PUSH1#"), 'Unexpected result in expression matching')
self.assertFalse(contract.matches_expression("func#abcdef#"), 'Unexpected result in expression matching')
self.assertTrue(
contract.matches_expression("code#PUSH1# or code#PUSH1#"),
"Unexpected result in expression matching",
)
self.assertFalse(
contract.matches_expression("func#abcdef#"),
"Unexpected result in expression matching",
)

@ -5,22 +5,30 @@ from mythril.ether.soliditycontract import ETHContract
from tests import *
import re
class GraphTest(BaseTestCase):
class GraphTest(BaseTestCase):
def test_generate_graph(self):
for input_file in TESTDATA_INPUTS.iterdir():
output_expected = TESTDATA_OUTPUTS_EXPECTED / (input_file.name + ".graph.html")
output_current = TESTDATA_OUTPUTS_CURRENT / (input_file.name + ".graph.html")
output_expected = TESTDATA_OUTPUTS_EXPECTED / (
input_file.name + ".graph.html"
)
output_current = TESTDATA_OUTPUTS_CURRENT / (
input_file.name + ".graph.html"
)
contract = ETHContract(input_file.read_text())
sym = SymExecWrapper(contract, address=(util.get_indexed_address(0)), strategy="dfs")
sym = SymExecWrapper(
contract, address=(util.get_indexed_address(0)), strategy="dfs"
)
html = generate_graph(sym)
output_current.write_text(html)
lines_expected = re.findall(r"'label': '.*'", str(output_current.read_text()))
lines_expected = re.findall(
r"'label': '.*'", str(output_current.read_text())
)
lines_found = re.findall(r"'label': '.*'", str(output_current.read_text()))
if not (lines_expected == lines_found):
self.found_changed_files(input_file, output_expected, output_current)

@ -0,0 +1,52 @@
{
"sha3_0" : {
"_info" : {
"comment" : "",
"filledwith" : "cpp-1.3.0+commit.6e0ce939.Linux.g++",
"lllcversion" : "Version: 0.4.18-develop.2017.9.25+commit.a72237f2.Linux.g++",
"source" : "src/VMTestsFiller/vmSha3Test/sha3_0Filler.json",
"sourceHash" : "843009e4e97d7dd905578ea884db6d80c07f57d58679ec181dc761e1e51ae035"
},
"callcreates" : [
],
"env" : {
"currentCoinbase" : "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100",
"currentGasLimit" : "0x0f4240",
"currentNumber" : "0x00",
"currentTimestamp" : "0x01"
},
"exec" : {
"address" : "0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6",
"caller" : "0xcd1722f3947def4cf144679da39c4c32bdc35681",
"code" : "0x6000600020600055",
"data" : "0x",
"gas" : "0x174876e800",
"gasPrice" : "0x3b9aca00",
"origin" : "0xcd1722f3947def4cf144679da39c4c32bdc35681",
"value" : "0x0de0b6b3a7640000"
},
"gas" : "0x17487699b9",
"logs" : "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347",
"out" : "0x",
"post" : {
"0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x152d02c7e14af6800000",
"code" : "0x6000600020600055",
"nonce" : "0x00",
"storage" : {
"0x00" : "0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470"
}
}
},
"pre" : {
"0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x152d02c7e14af6800000",
"code" : "0x6000600020600055",
"nonce" : "0x00",
"storage" : {
}
}
}
}
}

@ -0,0 +1,52 @@
{
"sha3_1" : {
"_info" : {
"comment" : "",
"filledwith" : "cpp-1.3.0+commit.6e0ce939.Linux.g++",
"lllcversion" : "Version: 0.4.18-develop.2017.9.25+commit.a72237f2.Linux.g++",
"source" : "src/VMTestsFiller/vmSha3Test/sha3_1Filler.json",
"sourceHash" : "5f786aded76570c96466f5a4c4632a5a0b362ffc1e4124667cdb1e9328d1d81d"
},
"callcreates" : [
],
"env" : {
"currentCoinbase" : "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100",
"currentGasLimit" : "0x0f4240",
"currentNumber" : "0x00",
"currentTimestamp" : "0x01"
},
"exec" : {
"address" : "0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6",
"caller" : "0xcd1722f3947def4cf144679da39c4c32bdc35681",
"code" : "0x6005600420600055",
"data" : "0x",
"gas" : "0x0186a0",
"gasPrice" : "0x5af3107a4000",
"origin" : "0xcd1722f3947def4cf144679da39c4c32bdc35681",
"value" : "0x0de0b6b3a7640000"
},
"gas" : "0x013850",
"logs" : "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347",
"out" : "0x",
"post" : {
"0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x152d02c7e14af6800000",
"code" : "0x6005600420600055",
"nonce" : "0x00",
"storage" : {
"0x00" : "0xc41589e7559804ea4a2080dad19d876a024ccb05117835447d72ce08c1d020ec"
}
}
},
"pre" : {
"0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x152d02c7e14af6800000",
"code" : "0x6005600420600055",
"nonce" : "0x00",
"storage" : {
}
}
}
}
}

@ -0,0 +1,52 @@
{
"sha3_2" : {
"_info" : {
"comment" : "",
"filledwith" : "cpp-1.3.0+commit.6e0ce939.Linux.g++",
"lllcversion" : "Version: 0.4.18-develop.2017.9.25+commit.a72237f2.Linux.g++",
"source" : "src/VMTestsFiller/vmSha3Test/sha3_2Filler.json",
"sourceHash" : "bb3f59dc995c834eaf315b4819900c30ba4e97ef60163aed05946c70e841691f"
},
"callcreates" : [
],
"env" : {
"currentCoinbase" : "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100",
"currentGasLimit" : "0x0f4240",
"currentNumber" : "0x00",
"currentTimestamp" : "0x01"
},
"exec" : {
"address" : "0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6",
"caller" : "0xcd1722f3947def4cf144679da39c4c32bdc35681",
"code" : "0x600a600a20600055",
"data" : "0x",
"gas" : "0x0186a0",
"gasPrice" : "0x5af3107a4000",
"origin" : "0xcd1722f3947def4cf144679da39c4c32bdc35681",
"value" : "0x0de0b6b3a7640000"
},
"gas" : "0x013850",
"logs" : "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347",
"out" : "0x",
"post" : {
"0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x152d02c7e14af6800000",
"code" : "0x600a600a20600055",
"nonce" : "0x00",
"storage" : {
"0x00" : "0x6bd2dd6bd408cbee33429358bf24fdc64612fbf8b1b4db604518f40ffd34b607"
}
}
},
"pre" : {
"0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x152d02c7e14af6800000",
"code" : "0x600a600a20600055",
"nonce" : "0x00",
"storage" : {
}
}
}
}
}

@ -0,0 +1,37 @@
{
"sha3_3" : {
"_info" : {
"comment" : "",
"filledwith" : "cpp-1.3.0+commit.6e0ce939.Linux.g++",
"lllcversion" : "Version: 0.4.18-develop.2017.9.25+commit.a72237f2.Linux.g++",
"source" : "src/VMTestsFiller/vmSha3Test/sha3_3Filler.json",
"sourceHash" : "1f474f7dac8971615e641354d809db332975d1ea5ca589d855fb02a1da559033"
},
"env" : {
"currentCoinbase" : "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100",
"currentGasLimit" : "0x0f4240",
"currentNumber" : "0x00",
"currentTimestamp" : "0x01"
},
"exec" : {
"address" : "0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6",
"caller" : "0xcd1722f3947def4cf144679da39c4c32bdc35681",
"code" : "0x620fffff6103e820600055",
"data" : "0x",
"gas" : "0x0186a0",
"gasPrice" : "0x5af3107a4000",
"origin" : "0xcd1722f3947def4cf144679da39c4c32bdc35681",
"value" : "0x0de0b6b3a7640000"
},
"pre" : {
"0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x152d02c7e14af6800000",
"code" : "0x620fffff6103e820600055",
"nonce" : "0x00",
"storage" : {
}
}
}
}
}

@ -0,0 +1,37 @@
{
"sha3_4" : {
"_info" : {
"comment" : "",
"filledwith" : "cpp-1.3.0+commit.6e0ce939.Linux.g++",
"lllcversion" : "Version: 0.4.18-develop.2017.9.25+commit.a72237f2.Linux.g++",
"source" : "src/VMTestsFiller/vmSha3Test/sha3_4Filler.json",
"sourceHash" : "100da75ff0b63159ca86aa4ef7457a956027af5c6c1ed1f0fa894aaa63849887"
},
"env" : {
"currentCoinbase" : "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100",
"currentGasLimit" : "0x0f4240",
"currentNumber" : "0x00",
"currentTimestamp" : "0x01"
},
"exec" : {
"address" : "0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6",
"caller" : "0xcd1722f3947def4cf144679da39c4c32bdc35681",
"code" : "0x6064640fffffffff20600055",
"data" : "0x",
"gas" : "0x0186a0",
"gasPrice" : "0x5af3107a4000",
"origin" : "0xcd1722f3947def4cf144679da39c4c32bdc35681",
"value" : "0x0de0b6b3a7640000"
},
"pre" : {
"0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x152d02c7e14af6800000",
"code" : "0x6064640fffffffff20600055",
"nonce" : "0x00",
"storage" : {
}
}
}
}
}

@ -0,0 +1,37 @@
{
"sha3_5" : {
"_info" : {
"comment" : "",
"filledwith" : "cpp-1.3.0+commit.6e0ce939.Linux.g++",
"lllcversion" : "Version: 0.4.18-develop.2017.9.25+commit.a72237f2.Linux.g++",
"source" : "src/VMTestsFiller/vmSha3Test/sha3_5Filler.json",
"sourceHash" : "066bcf3a8e9e7b4c15ec2240c8e1bb0d53de0230c76989e21e4b6aaac83f577d"
},
"env" : {
"currentCoinbase" : "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100",
"currentGasLimit" : "0x0f4240",
"currentNumber" : "0x00",
"currentTimestamp" : "0x01"
},
"exec" : {
"address" : "0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6",
"caller" : "0xcd1722f3947def4cf144679da39c4c32bdc35681",
"code" : "0x640fffffffff61271020600055",
"data" : "0x",
"gas" : "0x0186a0",
"gasPrice" : "0x5af3107a4000",
"origin" : "0xcd1722f3947def4cf144679da39c4c32bdc35681",
"value" : "0x0de0b6b3a7640000"
},
"pre" : {
"0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x152d02c7e14af6800000",
"code" : "0x640fffffffff61271020600055",
"nonce" : "0x00",
"storage" : {
}
}
}
}
}

@ -0,0 +1,37 @@
{
"sha3_6" : {
"_info" : {
"comment" : "",
"filledwith" : "cpp-1.3.0+commit.6e0ce939.Linux.g++",
"lllcversion" : "Version: 0.4.18-develop.2017.9.25+commit.a72237f2.Linux.g++",
"source" : "src/VMTestsFiller/vmSha3Test/sha3_6Filler.json",
"sourceHash" : "c360c6583bf965674d153f11c243c1e0807e95e99bc9bcb684a7ad2c7155dd40"
},
"env" : {
"currentCoinbase" : "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100",
"currentGasLimit" : "0x0f4240",
"currentNumber" : "0x00",
"currentTimestamp" : "0x01"
},
"exec" : {
"address" : "0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6",
"caller" : "0xcd1722f3947def4cf144679da39c4c32bdc35681",
"code" : "0x7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff20600055",
"data" : "0x",
"gas" : "0x0186a0",
"gasPrice" : "0x5af3107a4000",
"origin" : "0xcd1722f3947def4cf144679da39c4c32bdc35681",
"value" : "0x0de0b6b3a7640000"
},
"pre" : {
"0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x152d02c7e14af6800000",
"code" : "0x7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff20600055",
"nonce" : "0x00",
"storage" : {
}
}
}
}
}

@ -0,0 +1,37 @@
{
"sha3_bigOffset" : {
"_info" : {
"comment" : "",
"filledwith" : "cpp-1.3.0+commit.6e0ce939.Linux.g++",
"lllcversion" : "Version: 0.4.18-develop.2017.9.25+commit.a72237f2.Linux.g++",
"source" : "src/VMTestsFiller/vmSha3Test/sha3_bigOffsetFiller.json",
"sourceHash" : "1ae2cdfa2e3ab1cac89d8b3d535c3ee50601ebc6098fdbddadca74980eec6382"
},
"env" : {
"currentCoinbase" : "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100",
"currentGasLimit" : "0x0f4240",
"currentNumber" : "0x00",
"currentTimestamp" : "0x01"
},
"exec" : {
"address" : "0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6",
"caller" : "0xcd1722f3947def4cf144679da39c4c32bdc35681",
"code" : "0x60027e0fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff20600055",
"data" : "0x",
"gas" : "0x010000000000",
"gasPrice" : "0x01",
"origin" : "0xcd1722f3947def4cf144679da39c4c32bdc35681",
"value" : "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"
},
"pre" : {
"0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
"code" : "0x60027e0fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff20600055",
"nonce" : "0x00",
"storage" : {
}
}
}
}
}

@ -0,0 +1,52 @@
{
"sha3_bigOffset2" : {
"_info" : {
"comment" : "",
"filledwith" : "cpp-1.3.0+commit.6e0ce939.Linux.g++",
"lllcversion" : "Version: 0.4.18-develop.2017.9.25+commit.a72237f2.Linux.g++",
"source" : "src/VMTestsFiller/vmSha3Test/sha3_bigOffset2Filler.json",
"sourceHash" : "2bf8d14886b1001b266c29bd9f9e764f7e6965e851bfe1440e536735fca993dc"
},
"callcreates" : [
],
"env" : {
"currentCoinbase" : "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100",
"currentGasLimit" : "0x0f4240",
"currentNumber" : "0x00",
"currentTimestamp" : "0x01"
},
"exec" : {
"address" : "0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6",
"caller" : "0xcd1722f3947def4cf144679da39c4c32bdc35681",
"code" : "0x6002630100000020600055",
"data" : "0x",
"gas" : "0x0100000000",
"gasPrice" : "0x01",
"origin" : "0xcd1722f3947def4cf144679da39c4c32bdc35681",
"value" : "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"
},
"gas" : "0xdfe7a9b0",
"logs" : "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347",
"out" : "0x",
"post" : {
"0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
"code" : "0x6002630100000020600055",
"nonce" : "0x00",
"storage" : {
"0x00" : "0x54a8c0ab653c15bfb48b47fd011ba2b9617af01cb45cab344acd57c924d56798"
}
}
},
"pre" : {
"0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
"code" : "0x6002630100000020600055",
"nonce" : "0x00",
"storage" : {
}
}
}
}
}

@ -0,0 +1,37 @@
{
"sha3_bigSize" : {
"_info" : {
"comment" : "",
"filledwith" : "cpp-1.3.0+commit.6e0ce939.Linux.g++",
"lllcversion" : "Version: 0.4.18-develop.2017.9.25+commit.a72237f2.Linux.g++",
"source" : "src/VMTestsFiller/vmSha3Test/sha3_bigSizeFiller.json",
"sourceHash" : "571bfd9a15c6b0e317f96a92f745aee1d800aa4486c1a101b3e016120ffb5415"
},
"env" : {
"currentCoinbase" : "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100",
"currentGasLimit" : "0x0f4240",
"currentNumber" : "0x00",
"currentTimestamp" : "0x01"
},
"exec" : {
"address" : "0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6",
"caller" : "0xcd1722f3947def4cf144679da39c4c32bdc35681",
"code" : "0x7effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff20600055",
"data" : "0x",
"gas" : "0x010000000000",
"gasPrice" : "0x01",
"origin" : "0xcd1722f3947def4cf144679da39c4c32bdc35681",
"value" : "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"
},
"pre" : {
"0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
"code" : "0x7effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff20600055",
"nonce" : "0x00",
"storage" : {
}
}
}
}
}

@ -0,0 +1,52 @@
{
"sha3_memSizeNoQuadraticCost31" : {
"_info" : {
"comment" : "",
"filledwith" : "cpp-1.3.0+commit.6e0ce939.Linux.g++",
"lllcversion" : "Version: 0.4.18-develop.2017.9.25+commit.a72237f2.Linux.g++",
"source" : "src/VMTestsFiller/vmSha3Test/sha3_memSizeNoQuadraticCost31Filler.json",
"sourceHash" : "04553284981ef7338bdeac0e029652313a2643169833e386ca34bfa3d5e5942a"
},
"callcreates" : [
],
"env" : {
"currentCoinbase" : "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100",
"currentGasLimit" : "0x0f4240",
"currentNumber" : "0x00",
"currentTimestamp" : "0x01"
},
"exec" : {
"address" : "0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6",
"caller" : "0xcd1722f3947def4cf144679da39c4c32bdc35681",
"code" : "0x60016103c020600055",
"data" : "0x",
"gas" : "0x0100000000",
"gasPrice" : "0x01",
"origin" : "0xcd1722f3947def4cf144679da39c4c32bdc35681",
"value" : "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"
},
"gas" : "0xffffb155",
"logs" : "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347",
"out" : "0x",
"post" : {
"0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
"code" : "0x60016103c020600055",
"nonce" : "0x00",
"storage" : {
"0x00" : "0xbc36789e7a1e281436464229828f817d6612f7b477d66591ff96a9e064bcc98a"
}
}
},
"pre" : {
"0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
"code" : "0x60016103c020600055",
"nonce" : "0x00",
"storage" : {
}
}
}
}
}

@ -0,0 +1,52 @@
{
"sha3_memSizeQuadraticCost32" : {
"_info" : {
"comment" : "",
"filledwith" : "cpp-1.3.0+commit.6e0ce939.Linux.g++",
"lllcversion" : "Version: 0.4.18-develop.2017.9.25+commit.a72237f2.Linux.g++",
"source" : "src/VMTestsFiller/vmSha3Test/sha3_memSizeQuadraticCost32Filler.json",
"sourceHash" : "70f68e0328222cc2c995bf932f2f8f65f5d4c7e9f040a51bbf4dae3cad9110cf"
},
"callcreates" : [
],
"env" : {
"currentCoinbase" : "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100",
"currentGasLimit" : "0x0f4240",
"currentNumber" : "0x00",
"currentTimestamp" : "0x01"
},
"exec" : {
"address" : "0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6",
"caller" : "0xcd1722f3947def4cf144679da39c4c32bdc35681",
"code" : "0x60016103e020600055",
"data" : "0x",
"gas" : "0x0100000000",
"gasPrice" : "0x01",
"origin" : "0xcd1722f3947def4cf144679da39c4c32bdc35681",
"value" : "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"
},
"gas" : "0xffffb151",
"logs" : "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347",
"out" : "0x",
"post" : {
"0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
"code" : "0x60016103e020600055",
"nonce" : "0x00",
"storage" : {
"0x00" : "0xbc36789e7a1e281436464229828f817d6612f7b477d66591ff96a9e064bcc98a"
}
}
},
"pre" : {
"0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
"code" : "0x60016103e020600055",
"nonce" : "0x00",
"storage" : {
}
}
}
}
}

@ -0,0 +1,52 @@
{
"sha3_memSizeQuadraticCost32_zeroSize" : {
"_info" : {
"comment" : "",
"filledwith" : "cpp-1.3.0+commit.6e0ce939.Linux.g++",
"lllcversion" : "Version: 0.4.18-develop.2017.9.25+commit.a72237f2.Linux.g++",
"source" : "src/VMTestsFiller/vmSha3Test/sha3_memSizeQuadraticCost32_zeroSizeFiller.json",
"sourceHash" : "96094eee3e3fcd478fe3780ff053e64bf5391616bfdc6c2017bf12dcc5d30366"
},
"callcreates" : [
],
"env" : {
"currentCoinbase" : "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100",
"currentGasLimit" : "0x0f4240",
"currentNumber" : "0x00",
"currentTimestamp" : "0x01"
},
"exec" : {
"address" : "0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6",
"caller" : "0xcd1722f3947def4cf144679da39c4c32bdc35681",
"code" : "0x600061040020600055",
"data" : "0x",
"gas" : "0x0100000000",
"gasPrice" : "0x01",
"origin" : "0xcd1722f3947def4cf144679da39c4c32bdc35681",
"value" : "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"
},
"gas" : "0xffffb1b9",
"logs" : "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347",
"out" : "0x",
"post" : {
"0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
"code" : "0x600061040020600055",
"nonce" : "0x00",
"storage" : {
"0x00" : "0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470"
}
}
},
"pre" : {
"0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
"code" : "0x600061040020600055",
"nonce" : "0x00",
"storage" : {
}
}
}
}
}

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save