Merge branch 'develop' into master

pull/615/head
Nathan 6 years ago committed by GitHub
commit 7e73a42051
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 16
      Dockerfile
  2. 33
      README.md
  3. 2
      mythril/analysis/modules/deprecated_ops.py
  4. 6
      mythril/analysis/modules/multiple_sends.py
  5. 2
      mythril/analysis/modules/transaction_order_dependence.py
  6. 2
      mythril/analysis/ops.py
  7. 21
      mythril/analysis/symbolic.py
  8. 2
      mythril/analysis/templates/callgraph.html
  9. 10
      mythril/analysis/traceexplore.py
  10. 112
      mythril/disassembler/disassembly.py
  11. 8
      mythril/ether/asm.py
  12. 15
      mythril/ether/ethcontract.py
  13. 115
      mythril/ether/evm.py
  14. 2
      mythril/ether/util.py
  15. 0
      mythril/ethereum/__init__.py
  16. 0
      mythril/ethereum/interface/__init__.py
  17. 0
      mythril/ethereum/interface/leveldb/__init__.py
  18. 39
      mythril/ethereum/interface/leveldb/accountindexing.py
  19. 149
      mythril/ethereum/interface/leveldb/client.py
  20. 16
      mythril/ethereum/interface/leveldb/eth_db.py
  21. 62
      mythril/ethereum/interface/leveldb/state.py
  22. 0
      mythril/ethereum/interface/rpc/__init__.py
  23. 28
      mythril/ethereum/interface/rpc/base_client.py
  24. 4
      mythril/ethereum/interface/rpc/client.py
  25. 0
      mythril/ethereum/interface/rpc/constants.py
  26. 0
      mythril/ethereum/interface/rpc/exceptions.py
  27. 16
      mythril/ethereum/interface/rpc/utils.py
  28. 30
      mythril/interfaces/cli.py
  29. 22
      mythril/laser/ethereum/call.py
  30. 107
      mythril/laser/ethereum/instructions.py
  31. 2
      mythril/laser/ethereum/state.py
  32. 25
      mythril/laser/ethereum/strategy/__init__.py
  33. 87
      mythril/laser/ethereum/strategy/basic.py
  34. 55
      mythril/laser/ethereum/svm.py
  35. 11
      mythril/laser/ethereum/taint_analysis.py
  36. 17
      mythril/laser/ethereum/transaction/transaction_models.py
  37. 13
      mythril/laser/ethereum/util.py
  38. 42
      mythril/mythril.py
  39. 4
      mythril/support/loader.py
  40. 8
      mythril/support/signatures.py
  41. 2
      mythril/support/truffle.py
  42. 2
      requirements.txt
  43. 4
      setup.py
  44. 4
      static/Ownable.html
  45. 5
      static/assertions.html
  46. 4
      static/mythril.html
  47. BIN
      static/mythril.png
  48. BIN
      static/mythril_new.png
  49. 2
      tests/analysis/test_delegatecall.py
  50. 0
      tests/disassembler/__init__.py
  51. 61
      tests/disassembler/disassembly.py
  52. 51
      tests/laser/evm_testsuite/VMTests/vmTests/suicide.json
  53. 2
      tests/laser/evm_testsuite/evm_test.py
  54. 2
      tests/laser/state/mstack_test.py
  55. 2
      tests/laser/transaction/symbolic_test.py
  56. 12
      tests/native_test.py
  57. 2
      tests/report_test.py
  58. 2
      tests/rpc_test.py
  59. 3
      tests/taint_runner_test.py
  60. 2
      tests/testdata/outputs_expected/calls.sol.o.graph.html
  61. 2
      tests/testdata/outputs_expected/environments.sol.o.graph.html
  62. 2
      tests/testdata/outputs_expected/ether_send.sol.o.graph.html
  63. 2
      tests/testdata/outputs_expected/exceptions.sol.o.graph.html
  64. 2
      tests/testdata/outputs_expected/kinds_of_calls.sol.o.graph.html
  65. 2
      tests/testdata/outputs_expected/metacoin.sol.o.graph.html
  66. 2
      tests/testdata/outputs_expected/multi_contracts.sol.o.graph.html
  67. 2
      tests/testdata/outputs_expected/nonascii.sol.o.graph.html
  68. 2
      tests/testdata/outputs_expected/origin.sol.o.graph.html
  69. 2
      tests/testdata/outputs_expected/origin.sol.o.json
  70. 2
      tests/testdata/outputs_expected/origin.sol.o.markdown
  71. 2
      tests/testdata/outputs_expected/origin.sol.o.text
  72. 2
      tests/testdata/outputs_expected/overflow.sol.o.graph.html
  73. 2
      tests/testdata/outputs_expected/returnvalue.sol.o.graph.html
  74. 2
      tests/testdata/outputs_expected/suicide.sol.o.graph.html
  75. 2
      tests/testdata/outputs_expected/underflow.sol.o.graph.html

@ -1,7 +1,5 @@
FROM ubuntu:bionic FROM ubuntu:bionic
COPY . /opt/mythril
RUN apt-get update \ RUN apt-get update \
&& apt-get install -y \ && apt-get install -y \
build-essential \ build-essential \
@ -18,14 +16,20 @@ RUN apt-get update \
python3-dev \ python3-dev \
pandoc \ pandoc \
git \ git \
&& ln -s /usr/bin/python3 /usr/local/bin/python \ && ln -s /usr/bin/python3 /usr/local/bin/python
&& cd /opt/mythril \
&& pip3 install -r requirements.txt \ COPY ./requirements.txt /opt/mythril/requirements.txt
&& python setup.py install
RUN cd /opt/mythril \
&& pip3 install -r requirements.txt
RUN locale-gen en_US.UTF-8 RUN locale-gen en_US.UTF-8
ENV LANG en_US.UTF-8 ENV LANG en_US.UTF-8
ENV LANGUAGE en_US.en ENV LANGUAGE en_US.en
ENV LC_ALL en_US.UTF-8 ENV LC_ALL en_US.UTF-8
COPY . /opt/mythril
RUN cd /opt/mythril \
&& python setup.py install
ENTRYPOINT ["/usr/local/bin/myth"] ENTRYPOINT ["/usr/local/bin/myth"]

@ -1,17 +1,21 @@
# Mythril OSS [![Tweet](https://img.shields.io/twitter/url/http/shields.io.svg?style=social)](https://twitter.com/intent/tweet?text=Mythril%20-%20Security%20Analyzer%20for%20Ethereum%20Smart%20Contracts&url=https://www.github.com/ConsenSys/mythril) # Mythril Classic
<p align="center">
<img src="/static/mythril_new.png" height="320px"/>
</p>
[![Discord](https://img.shields.io/discord/481002907366588416.svg)](https://discord.gg/E3YrVtG) [![Discord](https://img.shields.io/discord/481002907366588416.svg)](https://discord.gg/E3YrVtG)
[![PyPI](https://badge.fury.io/py/mythril.svg)](https://pypi.python.org/pypi/mythril) [![PyPI](https://badge.fury.io/py/mythril.svg)](https://pypi.python.org/pypi/mythril)
![Master Build Status](https://img.shields.io/circleci/project/github/ConsenSys/mythril/master.svg) ![Master Build Status](https://img.shields.io/circleci/project/github/ConsenSys/mythril-classic/master.svg)
[![Waffle.io - Columns and their card count](https://badge.waffle.io/ConsenSys/mythril.svg?columns=all)](https://waffle.io/ConsenSys/mythril) [![Waffle.io - Columns and their card count](https://badge.waffle.io/ConsenSys/mythril-classic.svg?columns=In%20Progress)](https://waffle.io/ConsenSys/mythril-classic/)
[![Sonarcloud - Maintainability](https://sonarcloud.io/api/project_badges/measure?project=mythril&metric=sqale_rating)](https://sonarcloud.io/dashboard?id=mythril) [![Sonarcloud - Maintainability](https://sonarcloud.io/api/project_badges/measure?project=mythril&metric=sqale_rating)](https://sonarcloud.io/dashboard?id=mythril)
[![PyPI Statistics](https://pypistats.com/badge/mythril.svg)](https://pypistats.com/package/mythril)
<img height="120px" align="right" src="https://github.com/ConsenSys/mythril/raw/master/static/mythril.png" alt="mythril" /> Mythril Classic is an open-source security analysis tool for Ethereum smart contracts. It uses concolic analysis, taint analysis and control flow checking to detect a variety of security vulnerabilities.
Mythril OSS is the classic security analysis tool for Ethereum smart contracts. It uses concolic analysis, taint analysis and control flow checking to detect a variety of security vulnerabilities.
Whether you want to contribute, need support, or want to learn what we have cooking for the future, our [Discord server](https://discord.gg/E3YrVtG) will serve your needs! Whether you want to contribute, need support, or want to learn what we have cooking for the future, our [Discord server](https://discord.gg/E3YrVtG) will serve your needs.
Oh and by the way, we're now building a whole security tools ecosystem with [Mythril Platform](https://mythril.ai). You should definitely check that out as well. Oh and by the way, we're also building an easy-to-use security analysis platform (a.k.a. "the INFURA for smart contract security") that anybody can use to create purpose-built security tools. It's called [Mythril Platform](https://mythril.ai) and you should definitely [check it out](https://media.consensys.net/mythril-platform-api-is-upping-the-smart-contract-security-game-eee1d2642488).
## Installation and setup ## Installation and setup
@ -31,21 +35,10 @@ See the [Wiki](https://github.com/ConsenSys/mythril/wiki/Installation-and-Setup)
## Usage ## Usage
Instructions for using the 'myth' tool are found on the [Wiki](https://github.com/ConsenSys/mythril/wiki). Instructions for using Mythril Classic are found on the [Wiki](https://github.com/ConsenSys/mythril-classic/wiki).
For support or general discussions please join the Mythril community on [Discord](https://discord.gg/E3YrVtG). For support or general discussions please join the Mythril community on [Discord](https://discord.gg/E3YrVtG).
## Vulnerability Remediation ## Vulnerability Remediation
Visit the [Smart Contract Vulnerability Classification Registry](https://smartcontractsecurity.github.io/SWC-registry/) to find detailed information and remediation guidance for the vulnerabilities reported. Visit the [Smart Contract Vulnerability Classification Registry](https://smartcontractsecurity.github.io/SWC-registry/) to find detailed information and remediation guidance for the vulnerabilities reported.
## Presentations, papers and articles
- [Analyzing Ethereum Smart Contracts for Vulnerabilities](https://hackernoon.com/scanning-ethereum-smart-contracts-for-vulnerabilities-b5caefd995df)
- [What Caused the Parity SUICIDE Vulnerability & How to Detect Similar Bugs](https://hackernoon.com/what-caused-the-latest-100-million-ethereum-bug-and-a-detection-tool-for-similar-bugs-7b80f8ab7279)
- [Detecting Integer Overflows in Ethereum Smart Contracts](https://media.consensys.net/detecting-batchoverflow-and-similar-flaws-in-ethereum-smart-contracts-93cf5a5aaac8)
- [How Formal Verification Can Ensure Flawless Smart Contracts](https://media.consensys.net/how-formal-verification-can-ensure-flawless-smart-contracts-cbda8ad99bd1)
- [Smashing Smart Contracts for Fun and Real Profit](https://hackernoon.com/hitb2018ams-smashing-smart-contracts-for-fun-and-real-profit-720f5e3ac777)
- [HITBSecConf 2018 - Presentation video](https://www.youtube.com/watch?v=iqf6epACgds)
- [EDCon Toronto 2018 - Mythril: Find bugs and verify security properties in your contracts](https://www.youtube.com/watch?v=NJ9StJThxZY&feature=youtu.be&t=3h3m18s)

@ -24,7 +24,7 @@ def execute(statespace):
instruction = state.get_current_instruction() instruction = state.get_current_instruction()
if instruction['opcode'] == "ORIGIN": if instruction['opcode'] == "ORIGIN":
description = "Function %s retrieves the transaction origin (tx.origin) using the ORIGIN opcode. " \ description = "The function `{}` retrieves the transaction origin (tx.origin) using the ORIGIN opcode. " \
"Use msg.sender instead.\nSee also: " \ "Use msg.sender instead.\nSee also: " \
"https://solidity.readthedocs.io/en/develop/security-considerations.html#tx-origin".format(node.function_name) "https://solidity.readthedocs.io/en/develop/security-considerations.html#tx-origin".format(node.function_name)

@ -25,8 +25,8 @@ def execute(statespace):
swc_id=MULTIPLE_SENDS, title="Multiple Calls", _type="Informational") swc_id=MULTIPLE_SENDS, title="Multiple Calls", _type="Informational")
issue.description = \ issue.description = \
"Multiple sends exist in one transaction, try to isolate each external call into its own transaction." \ "Multiple sends exist in one transaction. Try to isolate each external call into its own transaction," \
" As external calls can fail accidentally or deliberately.\nConsecutive calls: \n" " as external calls can fail accidentally or deliberately.\nConsecutive calls: \n"
for finding in findings: for finding in findings:
issue.description += \ issue.description += \
@ -38,7 +38,7 @@ def execute(statespace):
def _explore_nodes(call, statespace): def _explore_nodes(call, statespace):
children = _child_nodes(statespace, call.node) children = _child_nodes(statespace, call.node)
sending_children = list(filter(lambda call: call.node in children, statespace.calls)) sending_children = list(filter(lambda c: c.node in children, statespace.calls))
return sending_children return sending_children

@ -112,7 +112,7 @@ def _get_influencing_sstores(statespace, interesting_storages):
index, value = sstore_state.mstate.stack[-1], sstore_state.mstate.stack[-2] index, value = sstore_state.mstate.stack[-1], sstore_state.mstate.stack[-2]
try: try:
index = util.get_concrete_int(index) index = util.get_concrete_int(index)
except AttributeError: except TypeError:
index = str(index) index = str(index)
if "storage_{}".format(index) not in interesting_storages: if "storage_{}".format(index) not in interesting_storages:
continue continue

@ -21,7 +21,7 @@ class Variable:
def get_variable(i): def get_variable(i):
try: try:
return Variable(util.get_concrete_int(i), VarType.CONCRETE) return Variable(util.get_concrete_int(i), VarType.CONCRETE)
except AttributeError: except TypeError:
return Variable(simplify(i), VarType.SYMBOLIC) return Variable(simplify(i), VarType.SYMBOLIC)

@ -4,23 +4,27 @@ from mythril.ether.soliditycontract import SolidityContract
import copy import copy
import logging import logging
from .ops import get_variable, SStore, Call, VarType from .ops import get_variable, SStore, Call, VarType
from mythril.laser.ethereum.strategy.basic import DepthFirstSearchStrategy, BreadthFirstSearchStrategy from mythril.laser.ethereum.strategy.basic import DepthFirstSearchStrategy, BreadthFirstSearchStrategy, \
ReturnRandomNaivelyStrategy, ReturnWeightedRandomStrategy
class SymExecWrapper: class SymExecWrapper:
''' """
Wrapper class for the LASER Symbolic virtual machine. Symbolically executes the code and does a bit of pre-analysis for convenience. Wrapper class for the LASER Symbolic virtual machine. Symbolically executes the code and does a bit of pre-analysis for convenience.
''' """
def __init__(self, contract, address, strategy, dynloader=None, max_depth=22, def __init__(self, contract, address, strategy, dynloader=None, max_depth=22,
execution_timeout=None, create_timeout=None): execution_timeout=None, create_timeout=None, max_transaction_count=3):
s_strategy = None
if strategy == 'dfs': if strategy == 'dfs':
s_strategy = DepthFirstSearchStrategy s_strategy = DepthFirstSearchStrategy
elif strategy == 'bfs': elif strategy == 'bfs':
s_strategy = BreadthFirstSearchStrategy s_strategy = BreadthFirstSearchStrategy
elif strategy == 'naive-random':
s_strategy = ReturnRandomNaivelyStrategy
elif strategy == 'weighted-random':
s_strategy = ReturnWeightedRandomStrategy
else: else:
raise ValueError("Invalid strategy argument supplied") raise ValueError("Invalid strategy argument supplied")
@ -30,7 +34,8 @@ class SymExecWrapper:
self.laser = svm.LaserEVM(self.accounts, dynamic_loader=dynloader, max_depth=max_depth, self.laser = svm.LaserEVM(self.accounts, dynamic_loader=dynloader, max_depth=max_depth,
execution_timeout=execution_timeout, strategy=s_strategy, execution_timeout=execution_timeout, strategy=s_strategy,
create_timeout=create_timeout) create_timeout=create_timeout,
max_transaction_count=max_transaction_count)
if isinstance(contract, SolidityContract): if isinstance(contract, SolidityContract):
self.laser.sym_exec(creation_code=contract.creation_code, contract_name=contract.name) self.laser.sym_exec(creation_code=contract.creation_code, contract_name=contract.name)
@ -67,7 +72,7 @@ class SymExecWrapper:
# ignore prebuilts # ignore prebuilts
continue continue
if (meminstart.type == VarType.CONCRETE and meminsz.type == VarType.CONCRETE): if meminstart.type == VarType.CONCRETE and meminsz.type == VarType.CONCRETE:
self.calls.append(Call(self.nodes[key], state, state_index, op, to, gas, value, state.mstate.memory[meminstart.val:meminsz.val * 4])) self.calls.append(Call(self.nodes[key], state, state_index, op, to, gas, value, state.mstate.memory[meminstart.val:meminsz.val * 4]))
else: else:
self.calls.append(Call(self.nodes[key], state, state_index, op, to, gas, value)) self.calls.append(Call(self.nodes[key], state, state_index, op, to, gas, value))
@ -105,7 +110,7 @@ class SymExecWrapper:
taint = True taint = True
for constraint in s.node.constraints: for constraint in s.node.constraints:
if ("caller" in str(constraint)): if "caller" in str(constraint):
taint = False taint = False
break break

@ -1,7 +1,7 @@
<!DOCTYPE html> <!DOCTYPE html>
<html> <html>
<head> <head>
<title> Laser - Call Graph</title> <title>Call Graph</title>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.css" integrity="sha256-iq5ygGJ7021Pi7H5S+QAUXCPUfaBzfqeplbg/KlEssg=" crossorigin="anonymous" /> <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.css" integrity="sha256-iq5ygGJ7021Pi7H5S+QAUXCPUfaBzfqeplbg/KlEssg=" crossorigin="anonymous" />
<script src="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.js" integrity="sha256-JuQeAGbk9rG/EoRMixuy5X8syzICcvB0dj3KindZkY0=" crossorigin="anonymous"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.js" integrity="sha256-JuQeAGbk9rG/EoRMixuy5X8syzICcvB0dj3KindZkY0=" crossorigin="anonymous"></script>

@ -13,8 +13,8 @@ colors = [
{'border': '#4753bf', 'background': '#3b46a1', 'highlight': {'border': '#fff', 'background': '#424db3'}}, {'border': '#4753bf', 'background': '#3b46a1', 'highlight': {'border': '#fff', 'background': '#424db3'}},
] ]
def get_serializable_statespace(statespace):
def get_serializable_statespace(statespace):
nodes = [] nodes = []
edges = [] edges = []
@ -40,10 +40,10 @@ def get_serializable_statespace(statespace):
color = color_map[node.get_cfg_dict()['contract_name']] color = color_map[node.get_cfg_dict()['contract_name']]
def get_state_accounts(state): def get_state_accounts(node_state):
state_accounts = [] state_accounts = []
for key in state.accounts: for key in node_state.accounts:
account = state.accounts[key].as_dict account = node_state.accounts[key].as_dict
account.pop('code', None) account.pop('code', None)
account['balance'] = str(account['balance']) account['balance'] = str(account['balance'])
@ -81,7 +81,7 @@ def get_serializable_statespace(statespace):
for edge in statespace.edges: for edge in statespace.edges:
if (edge.condition is None): if edge.condition is None:
label = "" label = ""
else: else:

@ -4,51 +4,91 @@ import logging
class Disassembly(object): class Disassembly(object):
"""
Disassembly class
def __init__(self, code, enable_online_lookup=True): Stores bytecode, and its disassembly.
Additionally it will gather the following information on the existing functions in the disassembled code:
- function hashes
- function name to entry point mapping
- function entry point to function name mapping
"""
def __init__(self, code: str, enable_online_lookup: bool=False):
self.bytecode = code
self.instruction_list = asm.disassemble(util.safe_decode(code)) self.instruction_list = asm.disassemble(util.safe_decode(code))
self.func_hashes = [] self.func_hashes = []
self.func_to_addr = {} self.function_name_to_address = {}
self.addr_to_func = {} self.address_to_function_name = {}
self.bytecode = code
signatures = SignatureDb(enable_online_lookup=enable_online_lookup) # control if you want to have online sighash lookups signatures = SignatureDb(
enable_online_lookup=enable_online_lookup
) # control if you want to have online signature hash lookups
try: try:
signatures.open() # open from default locations signatures.open() # open from default locations
except FileNotFoundError: except FileNotFoundError:
logging.info("Missing function signature file. Resolving of function names from signature file disabled.") logging.info(
"Missing function signature file. Resolving of function names from signature file disabled."
# Parse jump table & resolve function names )
jmptable_indices = asm.find_opcode_sequence(["PUSH4", "EQ"], self.instruction_list) # Need to take from PUSH1 to PUSH4 because solc seems to remove excess 0s at the beginning for optimizing
jump_table_indices = asm.find_opcode_sequence(
for i in jmptable_indices: [("PUSH1", "PUSH2", "PUSH3", "PUSH4"), ("EQ",)], self.instruction_list
func_hash = self.instruction_list[i]['argument'] )
self.func_hashes.append(func_hash)
try: for index in jump_table_indices:
# tries local cache, file and optional online lookup function_hash, jump_target, function_name = get_function_info(
# may return more than one function signature. since we cannot probe for the correct one we'll use the first index, self.instruction_list, signatures
func_names = signatures.get(func_hash) )
if len(func_names) > 1: self.func_hashes.append(function_hash)
# ambigious result
func_name = "**ambiguous** %s" % func_names[0] # return first hit but note that result was ambiguous if jump_target is not None and function_name is not None:
else: self.function_name_to_address[function_name] = jump_target
# only one item self.address_to_function_name[jump_target] = function_name
func_name = func_names[0]
except KeyError:
func_name = "_function_" + func_hash
try:
offset = self.instruction_list[i + 2]['argument']
jump_target = int(offset, 16)
self.func_to_addr[func_name] = jump_target
self.addr_to_func[jump_target] = func_name
except:
continue
signatures.write() # store resolved signatures (potentially resolved online) signatures.write() # store resolved signatures (potentially resolved online)
def get_easm(self): def get_easm(self):
# todo: tintinweb - print funcsig resolved data from self.addr_to_func?
return asm.instruction_list_to_easm(self.instruction_list) return asm.instruction_list_to_easm(self.instruction_list)
def get_function_info(index: int, instruction_list: list, signature_database: SignatureDb) -> (str, int, str):
"""
Finds the function information for a call table entry
Solidity uses the first 4 bytes of the calldata to indicate which function the message call should execute
The generated code that directs execution to the correct function looks like this:
- PUSH function_hash
- EQ
- PUSH entry_point
- JUMPI
This function takes an index that points to the first instruction, and from that finds out the function hash,
function entry and the function name.
:param index: Start of the entry pattern
:param instruction_list: Instruction list for the contract that is being analyzed
:param signature_database: Database used to map function hashes to their respective function names
:return: function hash, function entry point, function name
"""
# Append with missing 0s at the beginning
function_hash = "0x" + instruction_list[index]["argument"][2:].rjust(8, "0")
function_names = signature_database.get(function_hash)
if len(function_names) > 1:
# In this case there was an ambiguous result
function_name = (
"**ambiguous** {}".format(function_names[0])
)
elif len(function_names) == 1:
function_name = function_names[0]
else:
function_name = "_function_" + function_hash
try:
offset = instruction_list[index + 2]["argument"]
entry_point = int(offset, 16)
except (KeyError, IndexError):
return function_hash, None, None
return function_hash, entry_point, function_name

@ -70,17 +70,17 @@ def find_opcode_sequence(pattern, instruction_list):
for i in range(0, len(instruction_list) - pattern_length + 1): for i in range(0, len(instruction_list) - pattern_length + 1):
if instruction_list[i]['opcode'] == pattern[0]: if instruction_list[i]['opcode'] in pattern[0]:
matched = True matched = True
for j in range(1, len(pattern)): for j in range(1, len(pattern)):
if not (instruction_list[i + j]['opcode'] == pattern[j]): if not (instruction_list[i + j]['opcode'] in pattern[j]):
matched = False matched = False
break break
if (matched): if matched:
match_indexes.append(i) match_indexes.append(i)
return match_indexes return match_indexes
@ -102,7 +102,7 @@ def disassemble(bytecode):
instruction = {'address': addr} instruction = {'address': addr}
try: try:
if (sys.version_info > (3, 0)): if sys.version_info > (3, 0):
opcode = opcodes[bytecode[addr]] opcode = opcodes[bytecode[addr]]
else: else:
opcode = opcodes[ord(bytecode[addr])] opcode = opcodes[ord(bytecode[addr])]

@ -6,16 +6,17 @@ import re
class ETHContract(persistent.Persistent): class ETHContract(persistent.Persistent):
def __init__(self, code, creation_code="", name="Unknown", enable_online_lookup=True): def __init__(self, code, creation_code="", name="Unknown", enable_online_lookup=False):
self.creation_code = creation_code
self.name = name
# Workaround: We currently do not support compile-time linking. # Workaround: We currently do not support compile-time linking.
# Dynamic contract addresses of the format __[contract-name]_____________ are replaced with a generic address # Dynamic contract addresses of the format __[contract-name]_____________ are replaced with a generic address
# Apply this for creation_code & code
code = re.sub(r'(_+.*_+)', 'aa' * 20, code) creation_code = re.sub(r'(_{2}.{38})', 'aa' * 20, creation_code)
code = re.sub(r'(_{2}.{38})', 'aa' * 20, code)
self.creation_code = creation_code
self.name = name
self.code = code self.code = code
self.disassembly = Disassembly(code, enable_online_lookup=enable_online_lookup) self.disassembly = Disassembly(code, enable_online_lookup=enable_online_lookup)
self.creation_disassembly = Disassembly(creation_code, enable_online_lookup=enable_online_lookup) self.creation_disassembly = Disassembly(creation_code, enable_online_lookup=enable_online_lookup)
@ -49,7 +50,7 @@ class ETHContract(persistent.Persistent):
m = re.match(r'^code#([a-zA-Z0-9\s,\[\]]+)#', token) m = re.match(r'^code#([a-zA-Z0-9\s,\[\]]+)#', token)
if (m): if m:
if easm_code is None: if easm_code is None:
easm_code = self.get_easm() easm_code = self.get_easm()
@ -59,7 +60,7 @@ class ETHContract(persistent.Persistent):
m = re.match(r'^func#([a-zA-Z0-9\s_,(\\)\[\]]+)#$', token) m = re.match(r'^func#([a-zA-Z0-9\s_,(\\)\[\]]+)#$', token)
if (m): if m:
sign_hash = "0x" + utils.sha3(m.group(1))[:4].hex() sign_hash = "0x" + utils.sha3(m.group(1))[:4].hex()

@ -7,69 +7,52 @@ from io import StringIO
import re import re
def trace(code, calldata = ""): def trace(code, calldata=""):
log_handlers = ['eth.vm.op', 'eth.vm.op.stack', 'eth.vm.op.memory', 'eth.vm.op.storage']
log_handlers = ['eth.vm.op', 'eth.vm.op.stack', 'eth.vm.op.memory', 'eth.vm.op.storage'] output = StringIO()
stream_handler = StreamHandler(output)
output = StringIO()
stream_handler = StreamHandler(output) for handler in log_handlers:
log_vm_op = get_logger(handler)
for handler in log_handlers: log_vm_op.setLevel("TRACE")
log_vm_op = get_logger(handler) log_vm_op.addHandler(stream_handler)
log_vm_op.setLevel("TRACE")
log_vm_op.addHandler(stream_handler) addr = bytes.fromhex('0123456789ABCDEF0123456789ABCDEF01234567')
state = State()
addr = bytes.fromhex('0123456789ABCDEF0123456789ABCDEF01234567')
ext = messages.VMExt(state, transactions.Transaction(0, 0, 21000, addr, 0, addr))
state = State() message = vm.Message(addr, addr, 0, 21000, calldata)
vm.vm_execute(ext, message, util.safe_decode(code))
ext = messages.VMExt(state, transactions.Transaction(0, 0, 21000, addr, 0, addr)) stream_handler.flush()
ret = output.getvalue()
message = vm.Message(addr, addr, 0, 21000, calldata) lines = ret.split("\n")
res, gas, dat = vm.vm_execute(ext, message, util.safe_decode(code)) state_trace = []
for line in lines:
stream_handler.flush() m = re.search(r'pc=b\'(\d+)\'.*op=([A-Z0-9]+)', line)
if m:
ret = output.getvalue() pc = m.group(1)
op = m.group(2)
lines = ret.split("\n") m = re.match(r'.*stack=(\[.*?\])', line)
trace = [] if m:
stackitems = re.findall(r'b\'(\d+)\'', m.group(1))
for line in lines: stack = "["
m = re.search(r'pc=b\'(\d+)\'.*op=([A-Z0-9]+)', line) if len(stackitems):
for i in range(0, len(stackitems) - 1):
if m: stack += hex(int(stackitems[i])) + ", "
pc = m.group(1) stack += hex(int(stackitems[-1]))
op = m.group(2)
stack += "]"
m = re.match(r'.*stack=(\[.*?\])', line) else:
stack = "[]"
if (m):
if re.match(r'^PUSH.*', op):
stackitems = re.findall(r'b\'(\d+)\'', m.group(1)) val = re.search(r'pushvalue=(\d+)', line).group(1)
pushvalue = hex(int(val))
stack = "["; state_trace.append({'pc': pc, 'op': op, 'stack': stack, 'pushvalue': pushvalue})
else:
if (len(stackitems)): state_trace.append({'pc': pc, 'op': op, 'stack': stack})
for i in range(0, len(stackitems) - 1): return state_trace
stack += hex(int(stackitems[i])) + ", "
stack += hex(int(stackitems[-1]))
stack += "]"
else:
stack = "[]"
if (re.match(r'^PUSH.*', op)):
val = re.search(r'pushvalue=(\d+)', line).group(1)
pushvalue = hex(int(val))
trace.append({'pc': pc, 'op': op, 'stack': stack, 'pushvalue': pushvalue})
else:
trace.append({'pc': pc, 'op': op, 'stack': stack})
return trace

@ -10,7 +10,7 @@ import json
def safe_decode(hex_encoded_string): def safe_decode(hex_encoded_string):
if (hex_encoded_string.startswith("0x")): if hex_encoded_string.startswith("0x"):
return bytes.fromhex(hex_encoded_string[2:]) return bytes.fromhex(hex_encoded_string[2:])
else: else:
return bytes.fromhex(hex_encoded_string) return bytes.fromhex(hex_encoded_string)

@ -34,9 +34,9 @@ class CountableList(object):
class ReceiptForStorage(rlp.Serializable): class ReceiptForStorage(rlp.Serializable):
''' """
Receipt format stored in levelDB Receipt format stored in levelDB
''' """
fields = [ fields = [
('state_root', binary), ('state_root', binary),
@ -50,9 +50,9 @@ class ReceiptForStorage(rlp.Serializable):
class AccountIndexer(object): class AccountIndexer(object):
''' """
Updates address index Updates address index
''' """
def __init__(self, ethDB): def __init__(self, ethDB):
self.db = ethDB self.db = ethDB
@ -62,29 +62,28 @@ class AccountIndexer(object):
self.updateIfNeeded() self.updateIfNeeded()
def get_contract_by_hash(self, contract_hash): def get_contract_by_hash(self, contract_hash):
''' """
get mapped address by its hash, if not found try indexing get mapped contract_address by its hash, if not found try indexing
''' """
address = self.db.reader._get_address_by_hash(contract_hash) contract_address = self.db.reader._get_address_by_hash(contract_hash)
if address is not None: if contract_address is not None:
return address return contract_address
else: else:
raise AddressNotFoundError raise AddressNotFoundError
return self.db.reader._get_address_by_hash(contract_hash)
def _process(self, startblock): def _process(self, startblock):
''' """
Processesing method Processesing method
''' """
logging.debug("Processing blocks %d to %d" % (startblock, startblock + BATCH_SIZE)) logging.debug("Processing blocks %d to %d" % (startblock, startblock + BATCH_SIZE))
addresses = [] addresses = []
for blockNum in range(startblock, startblock + BATCH_SIZE): for blockNum in range(startblock, startblock + BATCH_SIZE):
hash = self.db.reader._get_block_hash(blockNum) block_hash = self.db.reader._get_block_hash(blockNum)
if hash is not None: if block_hash is not None:
receipts = self.db.reader._get_block_receipts(hash, blockNum) receipts = self.db.reader._get_block_receipts(block_hash, blockNum)
for receipt in receipts: for receipt in receipts:
if receipt.contractAddress is not None and not all(b == 0 for b in receipt.contractAddress): if receipt.contractAddress is not None and not all(b == 0 for b in receipt.contractAddress):
@ -96,9 +95,9 @@ class AccountIndexer(object):
return addresses return addresses
def updateIfNeeded(self): def updateIfNeeded(self):
''' """
update address index update address index
''' """
headBlock = self.db.reader._get_head_block() headBlock = self.db.reader._get_head_block()
if headBlock is not None: if headBlock is not None:
# avoid restarting search if head block is same & we already initialized # avoid restarting search if head block is same & we already initialized
@ -128,7 +127,7 @@ class AccountIndexer(object):
count = 0 count = 0
processed = 0 processed = 0
while (blockNum <= self.lastBlock): while blockNum <= self.lastBlock:
# leveldb cannot be accessed on multiple processes (not even readonly) # leveldb cannot be accessed on multiple processes (not even readonly)
# multithread version performs significantly worse than serial # multithread version performs significantly worse than serial
try: try:

@ -1,12 +1,12 @@
import binascii import binascii
import rlp import rlp
from mythril.leveldb.accountindexing import CountableList from mythril.ethereum.interface.leveldb.accountindexing import CountableList
from mythril.leveldb.accountindexing import ReceiptForStorage, AccountIndexer from mythril.ethereum.interface.leveldb.accountindexing import ReceiptForStorage, AccountIndexer
import logging import logging
from ethereum import utils from ethereum import utils
from ethereum.block import BlockHeader, Block from ethereum.block import BlockHeader, Block
from mythril.leveldb.state import State from mythril.ethereum.interface.leveldb.state import State
from mythril.leveldb.eth_db import ETH_DB from mythril.ethereum.interface.leveldb.eth_db import ETH_DB
from mythril.ether.ethcontract import ETHContract from mythril.ether.ethcontract import ETHContract
from mythril.exceptions import AddressNotFoundError from mythril.exceptions import AddressNotFoundError
@ -26,23 +26,23 @@ address_mapping_head_key = b'accountMapping' # head (latest) number of indexed
def _format_block_number(number): def _format_block_number(number):
''' """
formats block number to uint64 big endian formats block number to uint64 big endian
''' """
return utils.zpad(utils.int_to_big_endian(number), 8) return utils.zpad(utils.int_to_big_endian(number), 8)
def _encode_hex(v): def _encode_hex(v):
''' """
encodes hash as hex encodes hash as hex
''' """
return '0x' + utils.encode_hex(v) return '0x' + utils.encode_hex(v)
class LevelDBReader(object): class LevelDBReader(object):
''' """
level db reading interface, can be used with snapshot level db reading interface, can be used with snapshot
''' """
def __init__(self, db): def __init__(self, db):
self.db = db self.db = db
@ -50,125 +50,116 @@ class LevelDBReader(object):
self.head_state = None self.head_state = None
def _get_head_state(self): def _get_head_state(self):
''' """
gets head state gets head state
''' """
if not self.head_state: if not self.head_state:
root = self._get_head_block().state_root root = self._get_head_block().state_root
self.head_state = State(self.db, root) self.head_state = State(self.db, root)
return self.head_state return self.head_state
def _get_account(self, address): def _get_account(self, address):
''' """
gets account by address gets account by address
''' """
state = self._get_head_state() state = self._get_head_state()
account_address = binascii.a2b_hex(utils.remove_0x_head(address)) account_address = binascii.a2b_hex(utils.remove_0x_head(address))
return state.get_and_cache_account(account_address) return state.get_and_cache_account(account_address)
def _get_block_hash(self, number): def _get_block_hash(self, number):
''' """
gets block hash by block number gets block hash by block number
''' """
num = _format_block_number(number) num = _format_block_number(number)
hash_key = header_prefix + num + num_suffix hash_key = header_prefix + num + num_suffix
return self.db.get(hash_key) return self.db.get(hash_key)
def _get_head_block(self): def _get_head_block(self):
''' """
gets head block header gets head block header
''' """
if not self.head_block_header: if not self.head_block_header:
hash = self.db.get(head_header_key) block_hash = self.db.get(head_header_key)
num = self._get_block_number(hash) num = self._get_block_number(block_hash)
self.head_block_header = self._get_block_header(hash, num) self.head_block_header = self._get_block_header(block_hash, num)
# find header with valid state # find header with valid state
while not self.db.get(self.head_block_header.state_root) and self.head_block_header.prevhash is not None: while not self.db.get(self.head_block_header.state_root) and self.head_block_header.prevhash is not None:
hash = self.head_block_header.prevhash block_hash = self.head_block_header.prevhash
num = self._get_block_number(hash) num = self._get_block_number(block_hash)
self.head_block_header = self._get_block_header(hash, num) self.head_block_header = self._get_block_header(block_hash, num)
return self.head_block_header return self.head_block_header
def _get_block_number(self, hash): def _get_block_number(self, block_hash):
''' """Get block number by its hash"""
gets block number by hash number_key = block_hash_prefix + block_hash
'''
number_key = block_hash_prefix + hash
return self.db.get(number_key) return self.db.get(number_key)
def _get_block_header(self, hash, num): def _get_block_header(self, block_hash, num):
''' """Get block header by block header hash & number"""
get block header by block header hash & number header_key = header_prefix + num + block_hash
'''
header_key = header_prefix + num + hash
block_header_data = self.db.get(header_key) block_header_data = self.db.get(header_key)
header = rlp.decode(block_header_data, sedes=BlockHeader) header = rlp.decode(block_header_data, sedes=BlockHeader)
return header return header
def _get_address_by_hash(self, hash): def _get_address_by_hash(self, block_hash):
''' """Get mapped address by its hash"""
get mapped address by its hash address_key = address_prefix + block_hash
'''
address_key = address_prefix + hash
return self.db.get(address_key) return self.db.get(address_key)
def _get_last_indexed_number(self): def _get_last_indexed_number(self):
''' """Get latest indexed block number"""
latest indexed block number
'''
return self.db.get(address_mapping_head_key) return self.db.get(address_mapping_head_key)
def _get_block_receipts(self, hash, num): def _get_block_receipts(self, block_hash, num):
''' """Get block transaction receipts by block header hash & number"""
get block transaction receipts by block header hash & number
'''
number = _format_block_number(num) number = _format_block_number(num)
receipts_key = block_receipts_prefix + number + hash receipts_key = block_receipts_prefix + number + block_hash
receipts_data = self.db.get(receipts_key) receipts_data = self.db.get(receipts_key)
receipts = rlp.decode(receipts_data, sedes=CountableList(ReceiptForStorage)) receipts = rlp.decode(receipts_data, sedes=CountableList(ReceiptForStorage))
return receipts return receipts
class LevelDBWriter(object): class LevelDBWriter(object):
''' """
level db writing interface level db writing interface
''' """
def __init__(self, db): def __init__(self, db):
self.db = db self.db = db
self.wb = None self.wb = None
def _set_last_indexed_number(self, number): def _set_last_indexed_number(self, number):
''' """
sets latest indexed block number sets latest indexed block number
''' """
return self.db.put(address_mapping_head_key, _format_block_number(number)) return self.db.put(address_mapping_head_key, _format_block_number(number))
def _start_writing(self): def _start_writing(self):
''' """
start writing a batch start writing a batch
''' """
self.wb = self.db.write_batch() self.wb = self.db.write_batch()
def _commit_batch(self): def _commit_batch(self):
''' """
commit batch commit batch
''' """
self.wb.write() self.wb.write()
def _store_account_address(self, address): def _store_account_address(self, address):
''' """
get block transaction receipts by block header hash & number get block transaction receipts by block header hash & number
''' """
address_key = address_prefix + utils.sha3(address) address_key = address_prefix + utils.sha3(address)
self.wb.put(address_key, address) self.wb.put(address_key, address)
class EthLevelDB(object): class EthLevelDB(object):
''' """
Go-Ethereum LevelDB client class Go-Ethereum LevelDB client class
''' """
def __init__(self, path): def __init__(self, path):
self.path = path self.path = path
@ -177,9 +168,9 @@ class EthLevelDB(object):
self.writer = LevelDBWriter(self.db) self.writer = LevelDBWriter(self.db)
def get_contracts(self): def get_contracts(self):
''' """
iterate through all contracts iterate through all contracts
''' """
for account in self.reader._get_head_state().get_all_accounts(): for account in self.reader._get_head_state().get_all_accounts():
if account.code is not None: if account.code is not None:
code = _encode_hex(account.code) code = _encode_hex(account.code)
@ -188,9 +179,9 @@ class EthLevelDB(object):
yield contract, account.address, account.balance yield contract, account.address, account.balance
def search(self, expression, callback_func): def search(self, expression, callback_func):
''' """
searches through all contract accounts searches through all contract accounts
''' """
cnt = 0 cnt = 0
indexer = AccountIndexer(self) indexer = AccountIndexer(self)
@ -216,28 +207,26 @@ class EthLevelDB(object):
if not cnt % 1000: if not cnt % 1000:
logging.info("Searched %d contracts" % cnt) logging.info("Searched %d contracts" % cnt)
def contract_hash_to_address(self, hash): def contract_hash_to_address(self, contract_hash):
''' """Tries to find corresponding account address"""
tries to find corresponding account address
'''
address_hash = binascii.a2b_hex(utils.remove_0x_head(hash)) address_hash = binascii.a2b_hex(utils.remove_0x_head(contract_hash))
indexer = AccountIndexer(self) indexer = AccountIndexer(self)
return _encode_hex(indexer.get_contract_by_hash(address_hash)) return _encode_hex(indexer.get_contract_by_hash(address_hash))
def eth_getBlockHeaderByNumber(self, number): def eth_getBlockHeaderByNumber(self, number):
''' """
gets block header by block number gets block header by block number
''' """
hash = self.reader._get_block_hash(number) block_hash = self.reader._get_block_hash(number)
block_number = _format_block_number(number) block_number = _format_block_number(number)
return self.reader._get_block_header(hash, block_number) return self.reader._get_block_header(block_hash, block_number)
def eth_getBlockByNumber(self, number): def eth_getBlockByNumber(self, number):
''' """
gets block body by block number gets block body by block number
''' """
block_hash = self.reader._get_block_hash(number) block_hash = self.reader._get_block_hash(number)
block_number = _format_block_number(number) block_number = _format_block_number(number)
body_key = body_prefix + block_number + block_hash body_key = body_prefix + block_number + block_hash
@ -246,22 +235,22 @@ class EthLevelDB(object):
return body return body
def eth_getCode(self, address): def eth_getCode(self, address):
''' """
gets account code gets account code
''' """
account = self.reader._get_account(address) account = self.reader._get_account(address)
return _encode_hex(account.code) return _encode_hex(account.code)
def eth_getBalance(self, address): def eth_getBalance(self, address):
''' """
gets account balance gets account balance
''' """
account = self.reader._get_account(address) account = self.reader._get_account(address)
return account.balance return account.balance
def eth_getStorageAt(self, address, position): def eth_getStorageAt(self, address, position):
''' """
gets account storage data at position gets account storage data at position
''' """
account = self.reader._get_account(address) account = self.reader._get_account(address)
return _encode_hex(utils.zpad(utils.encode_int(account.get_storage_data(position)), 32)) return _encode_hex(utils.zpad(utils.encode_int(account.get_storage_data(position)), 32))

@ -3,27 +3,27 @@ from ethereum.db import BaseDB
class ETH_DB(BaseDB): class ETH_DB(BaseDB):
''' """
adopts pythereum BaseDB using plyvel adopts pythereum BaseDB using plyvel
''' """
def __init__(self, path): def __init__(self, path):
self.db = plyvel.DB(path) self.db = plyvel.DB(path)
def get(self, key): def get(self, key):
''' """
gets value for key gets value for key
''' """
return self.db.get(key) return self.db.get(key)
def put(self, key, value): def put(self, key, value):
''' """
puts value for key puts value for key
''' """
self.db.put(key, value) self.db.put(key, value)
def write_batch(self): def write_batch(self):
''' """
start writing a batch start writing a batch
''' """
return self.db.write_batch() return self.db.write_batch()

@ -32,9 +32,9 @@ STATE_DEFAULTS = {
class Account(rlp.Serializable): class Account(rlp.Serializable):
''' """
adjusted account from ethereum.state adjusted account from ethereum.state
''' """
fields = [ fields = [
('nonce', big_endian_int), ('nonce', big_endian_int),
@ -43,9 +43,9 @@ class Account(rlp.Serializable):
('code_hash', hash32) ('code_hash', hash32)
] ]
def __init__(self, nonce, balance, storage, code_hash, db, address): def __init__(self, nonce, balance, storage, code_hash, db, addr):
self.db = db self.db = db
self.address = address self.address = addr
super(Account, self).__init__(nonce, balance, storage, code_hash) super(Account, self).__init__(nonce, balance, storage, code_hash)
self.storage_cache = {} self.storage_cache = {}
self.storage_trie = SecureTrie(Trie(self.db)) self.storage_trie = SecureTrie(Trie(self.db))
@ -57,15 +57,15 @@ class Account(rlp.Serializable):
@property @property
def code(self): def code(self):
''' """
code rlp data code rlp data
''' """
return self.db.get(self.code_hash) return self.db.get(self.code_hash)
def get_storage_data(self, key): def get_storage_data(self, key):
''' """
get storage data get storage data
''' """
if key not in self.storage_cache: if key not in self.storage_cache:
v = self.storage_trie.get(utils.encode_int32(key)) v = self.storage_trie.get(utils.encode_int32(key))
self.storage_cache[key] = utils.big_endian_to_int( self.storage_cache[key] = utils.big_endian_to_int(
@ -73,25 +73,25 @@ class Account(rlp.Serializable):
return self.storage_cache[key] return self.storage_cache[key]
@classmethod @classmethod
def blank_account(cls, db, address, initial_nonce=0): def blank_account(cls, db, addr, initial_nonce=0):
''' """
creates a blank account creates a blank account
''' """
db.put(BLANK_HASH, b'') db.put(BLANK_HASH, b'')
o = cls(initial_nonce, 0, trie.BLANK_ROOT, BLANK_HASH, db, address) o = cls(initial_nonce, 0, trie.BLANK_ROOT, BLANK_HASH, db, addr)
o.existent_at_start = False o.existent_at_start = False
return o return o
def is_blank(self): def is_blank(self):
''' """
checks if is a blank account checks if is a blank account
''' """
return self.nonce == 0 and self.balance == 0 and self.code_hash == BLANK_HASH return self.nonce == 0 and self.balance == 0 and self.code_hash == BLANK_HASH
class State(): class State:
''' """
adjusted state from ethereum.state adjusted state from ethereum.state
''' """
def __init__(self, db, root): def __init__(self, db, root):
self.db = db self.db = db
@ -100,29 +100,29 @@ class State():
self.journal = [] self.journal = []
self.cache = {} self.cache = {}
def get_and_cache_account(self, address): def get_and_cache_account(self, addr):
''' """Gets and caches an account for an addres, creates blank if not found"""
gets and caches an account for an addres, creates blank if not found
''' if addr in self.cache:
if address in self.cache: return self.cache[addr]
return self.cache[address] rlpdata = self.secure_trie.get(addr)
rlpdata = self.secure_trie.get(address) if rlpdata == trie.BLANK_NODE and len(addr) == 32: # support for hashed addresses
if rlpdata == trie.BLANK_NODE and len(address) == 32: # support for hashed addresses rlpdata = self.trie.get(addr)
rlpdata = self.trie.get(address)
if rlpdata != trie.BLANK_NODE: if rlpdata != trie.BLANK_NODE:
o = rlp.decode(rlpdata, Account, db=self.db, address=address) o = rlp.decode(rlpdata, Account, db=self.db, address=addr)
else: else:
o = Account.blank_account( o = Account.blank_account(
self.db, address, 0) self.db, addr, 0)
self.cache[address] = o self.cache[addr] = o
o._mutable = True o._mutable = True
o._cached_rlp = None o._cached_rlp = None
return o return o
def get_all_accounts(self): def get_all_accounts(self):
''' """
iterates through trie to and yields non-blank leafs as accounts iterates through trie to and yields non-blank leafs as accounts
''' """
for address_hash, rlpdata in self.secure_trie.trie.iter_branch(): for address_hash, rlpdata in self.secure_trie.trie.iter_branch():
if rlpdata != trie.BLANK_NODE: if rlpdata != trie.BLANK_NODE:
yield rlp.decode(rlpdata, Account, db=self.db, address=address_hash) yield rlp.decode(rlpdata, Account, db=self.db, address=address_hash)

@ -20,64 +20,64 @@ class BaseClient(object):
pass pass
def eth_coinbase(self): def eth_coinbase(self):
''' """
https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_coinbase https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_coinbase
TESTED TESTED
''' """
return self._call('eth_coinbase') return self._call('eth_coinbase')
def eth_blockNumber(self): def eth_blockNumber(self):
''' """
https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_blocknumber https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_blocknumber
TESTED TESTED
''' """
return hex_to_dec(self._call('eth_blockNumber')) return hex_to_dec(self._call('eth_blockNumber'))
def eth_getBalance(self, address=None, block=BLOCK_TAG_LATEST): def eth_getBalance(self, address=None, block=BLOCK_TAG_LATEST):
''' """
https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getbalance https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getbalance
TESTED TESTED
''' """
address = address or self.eth_coinbase() address = address or self.eth_coinbase()
block = validate_block(block) block = validate_block(block)
return hex_to_dec(self._call('eth_getBalance', [address, block])) return hex_to_dec(self._call('eth_getBalance', [address, block]))
def eth_getStorageAt(self, address=None, position=0, block=BLOCK_TAG_LATEST): def eth_getStorageAt(self, address=None, position=0, block=BLOCK_TAG_LATEST):
''' """
https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getstorageat https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getstorageat
TESTED TESTED
''' """
block = validate_block(block) block = validate_block(block)
return self._call('eth_getStorageAt', [address, hex(position), block]) return self._call('eth_getStorageAt', [address, hex(position), block])
def eth_getCode(self, address, default_block=BLOCK_TAG_LATEST): def eth_getCode(self, address, default_block=BLOCK_TAG_LATEST):
''' """
https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getcode https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getcode
NEEDS TESTING NEEDS TESTING
''' """
if isinstance(default_block, str): if isinstance(default_block, str):
if default_block not in BLOCK_TAGS: if default_block not in BLOCK_TAGS:
raise ValueError raise ValueError
return self._call('eth_getCode', [address, default_block]) return self._call('eth_getCode', [address, default_block])
def eth_getBlockByNumber(self, block=BLOCK_TAG_LATEST, tx_objects=True): def eth_getBlockByNumber(self, block=BLOCK_TAG_LATEST, tx_objects=True):
''' """
https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getblockbynumber https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getblockbynumber
TESTED TESTED
''' """
block = validate_block(block) block = validate_block(block)
return self._call('eth_getBlockByNumber', [block, tx_objects]) return self._call('eth_getBlockByNumber', [block, tx_objects])
def eth_getTransactionReceipt(self, tx_hash): def eth_getTransactionReceipt(self, tx_hash):
''' """
https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_gettransactionreceipt https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_gettransactionreceipt
TESTED TESTED
''' """
return self._call('eth_getTransactionReceipt', [tx_hash]) return self._call('eth_getTransactionReceipt', [tx_hash])

@ -17,9 +17,9 @@ JSON_MEDIA_TYPE = 'application/json'
This code is adapted from: https://github.com/ConsenSys/ethjsonrpc This code is adapted from: https://github.com/ConsenSys/ethjsonrpc
''' '''
class EthJsonRpc(BaseClient): class EthJsonRpc(BaseClient):
''' """
Ethereum JSON-RPC client class Ethereum JSON-RPC client class
''' """
def __init__(self, host='localhost', port=GETH_DEFAULT_RPC_PORT, tls=False): def __init__(self, host='localhost', port=GETH_DEFAULT_RPC_PORT, tls=False):
self.host = host self.host = host

@ -2,17 +2,17 @@ from .constants import BLOCK_TAGS
def hex_to_dec(x): def hex_to_dec(x):
''' """
Convert hex to decimal Convert hex to decimal
''' """
return int(x, 16) return int(x, 16)
def clean_hex(d): def clean_hex(d):
''' """
Convert decimal to hex and remove the "L" suffix that is appended to large Convert decimal to hex and remove the "L" suffix that is appended to large
numbers numbers
''' """
return hex(d).rstrip('L') return hex(d).rstrip('L')
def validate_block(block): def validate_block(block):
@ -25,14 +25,14 @@ def validate_block(block):
def wei_to_ether(wei): def wei_to_ether(wei):
''' """
Convert wei to ether Convert wei to ether
''' """
return 1.0 * wei / 10**18 return 1.0 * wei / 10**18
def ether_to_wei(ether): def ether_to_wei(ether):
''' """
Convert ether to wei Convert ether to wei
''' """
return ether * 10**18 return ether * 10**18

@ -5,7 +5,7 @@
http://www.github.com/ConsenSys/mythril http://www.github.com/ConsenSys/mythril
""" """
import logging import logging, coloredlogs
import json import json
import sys import sys
import argparse import argparse
@ -15,10 +15,11 @@ import argparse
from mythril.exceptions import CriticalError, AddressNotFoundError from mythril.exceptions import CriticalError, AddressNotFoundError
from mythril.mythril import Mythril from mythril.mythril import Mythril
from mythril.version import VERSION from mythril.version import VERSION
import mythril.support.signatures as sigs
def exit_with_error(format, message): def exit_with_error(format_, message):
if format == 'text' or format == 'markdown': if format_ == 'text' or format_ == 'markdown':
print(message) print(message)
else: else:
result = {'success': False, 'error': str(message), 'issues': []} result = {'success': False, 'error': str(message), 'issues': []}
@ -69,7 +70,11 @@ def main():
options = parser.add_argument_group('options') options = parser.add_argument_group('options')
options.add_argument('-m', '--modules', help='Comma-separated list of security analysis modules', metavar='MODULES') options.add_argument('-m', '--modules', help='Comma-separated list of security analysis modules', metavar='MODULES')
options.add_argument('--max-depth', type=int, default=22, help='Maximum recursion depth for symbolic execution') options.add_argument('--max-depth', type=int, default=22, help='Maximum recursion depth for symbolic execution')
options.add_argument('--strategy', choices=['dfs', 'bfs'], default='dfs', help='Symbolic execution strategy')
options.add_argument('--strategy', choices=['dfs', 'bfs', 'naive-random', 'weighted-random'],
default='dfs', help='Symbolic execution strategy')
options.add_argument('--max-transaction-count', type=int, default=3, help='Maximum number of transactions issued by laser')
options.add_argument('--execution-timeout', type=int, default=600, help="The amount of seconds to spend on symbolic execution") options.add_argument('--execution-timeout', type=int, default=600, help="The amount of seconds to spend on symbolic execution")
options.add_argument('--create-timeout', type=int, default=10, help="The amount of seconds to spend on " options.add_argument('--create-timeout', type=int, default=10, help="The amount of seconds to spend on "
"the initial contract creation") "the initial contract creation")
@ -77,6 +82,7 @@ def main():
options.add_argument('--phrack', action='store_true', help='Phrack-style call graph') options.add_argument('--phrack', action='store_true', help='Phrack-style call graph')
options.add_argument('--enable-physics', action='store_true', help='enable graph physics simulation') options.add_argument('--enable-physics', action='store_true', help='enable graph physics simulation')
options.add_argument('-v', type=int, help='log level (0-2)', metavar='LOG_LEVEL') options.add_argument('-v', type=int, help='log level (0-2)', metavar='LOG_LEVEL')
options.add_argument('-q', '--query-signature', action='store_true', help='Lookup function signatures through www.4byte.directory')
rpc = parser.add_argument_group('RPC options') rpc = parser.add_argument_group('RPC options')
rpc.add_argument('-i', action='store_true', help='Preset: Infura Node service (Mainnet)') rpc.add_argument('-i', action='store_true', help='Preset: Infura Node service (Mainnet)')
@ -101,12 +107,20 @@ def main():
parser.print_help() parser.print_help()
sys.exit() sys.exit()
if args.v: if args.v:
if 0 <= args.v < 3: if 0 <= args.v < 3:
logging.basicConfig(level=[logging.NOTSET, logging.INFO, logging.DEBUG][args.v]) coloredlogs.install(
fmt='%(name)s[%(process)d] %(levelname)s %(message)s',
level=[logging.NOTSET, logging.INFO, logging.DEBUG][args.v]
)
else: else:
exit_with_error(args.outform, "Invalid -v value, you can find valid values in usage") exit_with_error(args.outform, "Invalid -v value, you can find valid values in usage")
if args.query_signature:
if sigs.ethereum_input_decoder == None:
exit_with_error(args.outform, "The --query-signature function requires the python package ethereum-input-decoder")
# -- commands -- # -- commands --
if args.hash: if args.hash:
print(Mythril.hash_for_function_signature(args.hash)) print(Mythril.hash_for_function_signature(args.hash))
@ -118,7 +132,8 @@ def main():
# solc_args = None, dynld = None, max_recursion_depth = 12): # solc_args = None, dynld = None, max_recursion_depth = 12):
mythril = Mythril(solv=args.solv, dynld=args.dynld, mythril = Mythril(solv=args.solv, dynld=args.dynld,
solc_args=args.solc_args) solc_args=args.solc_args,
enable_online_lookup=args.query_signature)
if args.dynld and not (args.rpc or args.i): if args.dynld and not (args.rpc or args.i):
mythril.set_api_from_config_path() mythril.set_api_from_config_path()
@ -215,7 +230,8 @@ def main():
modules=[m.strip() for m in args.modules.strip().split(",")] if args.modules else [], modules=[m.strip() for m in args.modules.strip().split(",")] if args.modules else [],
verbose_report=args.verbose_report, verbose_report=args.verbose_report,
max_depth=args.max_depth, execution_timeout=args.execution_timeout, max_depth=args.max_depth, execution_timeout=args.execution_timeout,
create_timeout=args.create_timeout) create_timeout=args.create_timeout,
max_transaction_count=args.max_transaction_count)
outputs = { outputs = {
'json': report.as_json(), 'json': report.as_json(),
'text': report.as_text(), 'text': report.as_text(),

@ -48,8 +48,8 @@ def get_callee_address(global_state:GlobalState, dynamic_loader: DynLoader, symb
try: try:
callee_address = hex(util.get_concrete_int(symbolic_to_address)) callee_address = hex(util.get_concrete_int(symbolic_to_address))
except AttributeError: except TypeError:
logging.info("Symbolic call encountered") logging.debug("Symbolic call encountered")
match = re.search(r'storage_(\d+)', str(simplify(symbolic_to_address))) match = re.search(r'storage_(\d+)', str(simplify(symbolic_to_address)))
logging.debug("CALL to: " + str(simplify(symbolic_to_address))) logging.debug("CALL to: " + str(simplify(symbolic_to_address)))
@ -58,11 +58,12 @@ def get_callee_address(global_state:GlobalState, dynamic_loader: DynLoader, symb
raise ValueError() raise ValueError()
index = int(match.group(1)) index = int(match.group(1))
logging.info("Dynamic contract address at storage index {}".format(index)) logging.debug("Dynamic contract address at storage index {}".format(index))
# attempt to read the contract address from instance storage # attempt to read the contract address from instance storage
try: try:
callee_address = dynamic_loader.read_storage(environment.active_account.address, index) callee_address = dynamic_loader.read_storage(environment.active_account.address, index)
# TODO: verify whether this happens or not
except: except:
logging.debug("Error accessing contract storage.") logging.debug("Error accessing contract storage.")
raise ValueError raise ValueError
@ -89,22 +90,22 @@ def get_callee_account(global_state, callee_address, dynamic_loader):
return global_state.accounts[callee_address] return global_state.accounts[callee_address]
except KeyError: except KeyError:
# We have a valid call address, but contract is not in the modules list # We have a valid call address, but contract is not in the modules list
logging.info("Module with address " + callee_address + " not loaded.") logging.debug("Module with address " + callee_address + " not loaded.")
if dynamic_loader is None: if dynamic_loader is None:
raise ValueError() raise ValueError()
logging.info("Attempting to load dependency") logging.debug("Attempting to load dependency")
try: try:
code = dynamic_loader.dynld(environment.active_account.address, callee_address) code = dynamic_loader.dynld(environment.active_account.address, callee_address)
except Exception as e: except Exception:
logging.info("Unable to execute dynamic loader.") logging.debug("Unable to execute dynamic loader.")
raise ValueError() raise ValueError()
if code is None: if code is None:
logging.info("No code returned, not a contract account?") logging.debug("No code returned, not a contract account?")
raise ValueError() raise ValueError()
logging.info("Dependency loaded: " + callee_address) logging.debug("Dependency loaded: " + callee_address)
callee_account = Account(callee_address, code, callee_address, dynamic_loader=dynamic_loader) callee_account = Account(callee_address, code, callee_address, dynamic_loader=dynamic_loader)
accounts[callee_address] = callee_account accounts[callee_address] = callee_account
@ -112,7 +113,6 @@ def get_callee_account(global_state, callee_address, dynamic_loader):
return callee_account return callee_account
def get_call_data(global_state, memory_start, memory_size, pad=True): def get_call_data(global_state, memory_start, memory_size, pad=True):
""" """
Gets call_data from the global_state Gets call_data from the global_state
@ -145,7 +145,7 @@ def get_call_data(global_state, memory_start, memory_size, pad=True):
) )
call_data_type = CalldataType.CONCRETE call_data_type = CalldataType.CONCRETE
logging.debug("Calldata: " + str(call_data)) logging.debug("Calldata: " + str(call_data))
except AttributeError: except TypeError:
logging.info("Unsupported symbolic calldata offset") logging.info("Unsupported symbolic calldata offset")
call_data_type = CalldataType.SYMBOLIC call_data_type = CalldataType.SYMBOLIC
call_data = Calldata('{}_internalcall'.format(transaction_id)) call_data = Calldata('{}_internalcall'.format(transaction_id))

@ -175,7 +175,7 @@ class Instruction:
result = simplify(Concat(BitVecVal(0, 248), Extract(offset + 7, offset, op1))) result = simplify(Concat(BitVecVal(0, 248), Extract(offset + 7, offset, op1)))
else: else:
result = 0 result = 0
except AttributeError: except TypeError:
logging.debug("BYTE: Unsupported symbolic byte offset") logging.debug("BYTE: Unsupported symbolic byte offset")
result = global_state.new_bitvec(str(simplify(op1)) + "[" + str(simplify(op0)) + "]", 256) result = global_state.new_bitvec(str(simplify(op1)) + "[" + str(simplify(op0)) + "]", 256)
@ -265,18 +265,17 @@ class Instruction:
try: try:
s0 = util.get_concrete_int(s0) s0 = util.get_concrete_int(s0)
s1 = util.get_concrete_int(s1) s1 = util.get_concrete_int(s1)
except TypeError:
return []
if s0 <= 31: if s0 <= 31:
testbit = s0 * 8 + 7 testbit = s0 * 8 + 7
if s1 & (1 << testbit): if s1 & (1 << testbit):
state.stack.append(s1 | (TT256 - (1 << testbit))) state.stack.append(s1 | (TT256 - (1 << testbit)))
else:
state.stack.append(s1 & ((1 << testbit) - 1))
else: else:
state.stack.append(s1) state.stack.append(s1 & ((1 << testbit) - 1))
# TODO: broad exception handler else:
except: state.stack.append(s1)
return []
return [global_state] return [global_state]
@ -371,24 +370,22 @@ class Instruction:
try: try:
mstart = util.get_concrete_int(op0) mstart = util.get_concrete_int(op0)
# FIXME: broad exception catch except TypeError:
except:
logging.debug("Unsupported symbolic memory offset in CALLDATACOPY") logging.debug("Unsupported symbolic memory offset in CALLDATACOPY")
return [global_state] return [global_state]
dstart_sym = False dstart_sym = False
try: try:
dstart = util.get_concrete_int(op1) dstart = util.get_concrete_int(op1)
# FIXME: broad exception catch except TypeError:
except: logging.debug("Unsupported symbolic calldata offset in CALLDATACOPY")
dstart = simplify(op1) dstart = simplify(op1)
dstart_sym = True dstart_sym = True
size_sym = False size_sym = False
try: try:
size = util.get_concrete_int(op2) size = util.get_concrete_int(op2)
# FIXME: broad exception catch except TypeError:
except:
logging.debug("Unsupported symbolic size in CALLDATACOPY") logging.debug("Unsupported symbolic size in CALLDATACOPY")
size = simplify(op2) size = simplify(op2)
size_sym = True size_sym = True
@ -403,8 +400,7 @@ class Instruction:
if size > 0: if size > 0:
try: try:
state.mem_extend(mstart, size) state.mem_extend(mstart, size)
# FIXME: broad exception catch except TypeError:
except:
logging.debug("Memory allocation error: mstart = " + str(mstart) + ", size = " + str(size)) logging.debug("Memory allocation error: mstart = " + str(mstart) + ", size = " + str(size))
state.mem_extend(mstart, 1) state.mem_extend(mstart, 1)
state.memory[mstart] = global_state.new_bitvec( state.memory[mstart] = global_state.new_bitvec(
@ -422,7 +418,7 @@ class Instruction:
for i in range(0, len(new_memory), 32): for i in range(0, len(new_memory), 32):
state.memory[i+mstart] = simplify(Concat(new_memory[i:i+32])) state.memory[i+mstart] = simplify(Concat(new_memory[i:i+32]))
except: except IndexError:
logging.debug("Exception copying calldata to memory") logging.debug("Exception copying calldata to memory")
state.memory[mstart] = global_state.new_bitvec( state.memory[mstart] = global_state.new_bitvec(
@ -472,13 +468,11 @@ class Instruction:
global keccak_function_manager global keccak_function_manager
state = global_state.mstate state = global_state.mstate
environment = global_state.environment
op0, op1 = state.stack.pop(), state.stack.pop() op0, op1 = state.stack.pop(), state.stack.pop()
try: try:
index, length = util.get_concrete_int(op0), util.get_concrete_int(op1) index, length = util.get_concrete_int(op0), util.get_concrete_int(op1)
# FIXME: broad exception catch except TypeError:
except:
# Can't access symbolic memory offsets # Can't access symbolic memory offsets
if is_expr(op0): if is_expr(op0):
op0 = simplify(op0) op0 = simplify(op0)
@ -490,7 +484,7 @@ class Instruction:
data = b''.join([util.get_concrete_int(i).to_bytes(1, byteorder='big') data = b''.join([util.get_concrete_int(i).to_bytes(1, byteorder='big')
for i in state.memory[index: index + length]]) for i in state.memory[index: index + length]])
except AttributeError: except TypeError:
argument = str(state.memory[index]).replace(" ", "_") argument = str(state.memory[index]).replace(" ", "_")
result = BitVec("KECCAC[{}]".format(argument), 256) result = BitVec("KECCAC[{}]".format(argument), 256)
@ -515,14 +509,14 @@ class Instruction:
try: try:
concrete_memory_offset = helper.get_concrete_int(memory_offset) concrete_memory_offset = helper.get_concrete_int(memory_offset)
except AttributeError: except TypeError:
logging.debug("Unsupported symbolic memory offset in CODECOPY") logging.debug("Unsupported symbolic memory offset in CODECOPY")
return [global_state] return [global_state]
try: try:
concrete_size = helper.get_concrete_int(size) concrete_size = helper.get_concrete_int(size)
global_state.mstate.mem_extend(concrete_memory_offset, concrete_size) global_state.mstate.mem_extend(concrete_memory_offset, concrete_size)
except: except TypeError:
# except both attribute error and Exception # except both attribute error and Exception
global_state.mstate.mem_extend(concrete_memory_offset, 1) global_state.mstate.mem_extend(concrete_memory_offset, 1)
global_state.mstate.memory[concrete_memory_offset] = \ global_state.mstate.memory[concrete_memory_offset] = \
@ -531,7 +525,7 @@ class Instruction:
try: try:
concrete_code_offset = helper.get_concrete_int(code_offset) concrete_code_offset = helper.get_concrete_int(code_offset)
except AttributeError: except TypeError:
logging.debug("Unsupported symbolic code offset in CODECOPY") logging.debug("Unsupported symbolic code offset in CODECOPY")
global_state.mstate.mem_extend(concrete_memory_offset, concrete_size) global_state.mstate.mem_extend(concrete_memory_offset, concrete_size)
for i in range(concrete_size): for i in range(concrete_size):
@ -565,7 +559,7 @@ class Instruction:
environment = global_state.environment environment = global_state.environment
try: try:
addr = hex(helper.get_concrete_int(addr)) addr = hex(helper.get_concrete_int(addr))
except AttributeError: except TypeError:
logging.info("unsupported symbolic address for EXTCODESIZE") logging.info("unsupported symbolic address for EXTCODESIZE")
state.stack.append(global_state.new_bitvec("extcodesize_" + str(addr), 256)) state.stack.append(global_state.new_bitvec("extcodesize_" + str(addr), 256))
return [global_state] return [global_state]
@ -639,7 +633,7 @@ class Instruction:
try: try:
offset = util.get_concrete_int(op0) offset = util.get_concrete_int(op0)
except AttributeError: except TypeError:
logging.debug("Can't MLOAD from symbolic index") logging.debug("Can't MLOAD from symbolic index")
data = global_state.new_bitvec("mem[" + str(simplify(op0)) + "]", 256) data = global_state.new_bitvec("mem[" + str(simplify(op0)) + "]", 256)
state.stack.append(data) state.stack.append(data)
@ -664,7 +658,7 @@ class Instruction:
try: try:
mstart = util.get_concrete_int(op0) mstart = util.get_concrete_int(op0)
except AttributeError: except TypeError:
logging.debug("MSTORE to symbolic index. Not supported") logging.debug("MSTORE to symbolic index. Not supported")
return [global_state] return [global_state]
@ -678,16 +672,11 @@ class Instruction:
try: try:
# Attempt to concretize value # Attempt to concretize value
_bytes = util.concrete_int_to_bytes(value) _bytes = util.concrete_int_to_bytes(value)
state.memory[mstart: mstart + len(_bytes)] = _bytes
i = 0
for b in _bytes:
state.memory[mstart + i] = _bytes[i]
i += 1
except: except:
try: try:
state.memory[mstart] = value state.memory[mstart] = value
except: except TypeError:
logging.debug("Invalid memory access") logging.debug("Invalid memory access")
return [global_state] return [global_state]
@ -699,7 +688,7 @@ class Instruction:
try: try:
offset = util.get_concrete_int(op0) offset = util.get_concrete_int(op0)
except AttributeError: except TypeError:
logging.debug("MSTORE to symbolic index. Not supported") logging.debug("MSTORE to symbolic index. Not supported")
return [global_state] return [global_state]
@ -720,7 +709,7 @@ class Instruction:
index = util.get_concrete_int(index) index = util.get_concrete_int(index)
return self._sload_helper(global_state, index) return self._sload_helper(global_state, index)
except AttributeError: except TypeError:
if not keccak_function_manager.is_keccak(index): if not keccak_function_manager.is_keccak(index):
return self._sload_helper(global_state, str(index)) return self._sload_helper(global_state, str(index))
@ -748,7 +737,8 @@ class Instruction:
return self._sload_helper(global_state, str(index)) return self._sload_helper(global_state, str(index))
def _sload_helper(self, global_state, index, constraints=None): @staticmethod
def _sload_helper(global_state, index, constraints=None):
try: try:
data = global_state.environment.active_account.storage[index] data = global_state.environment.active_account.storage[index]
except KeyError: except KeyError:
@ -761,8 +751,8 @@ class Instruction:
global_state.mstate.stack.append(data) global_state.mstate.stack.append(data)
return [global_state] return [global_state]
@staticmethod
def _get_constraints(self, keccak_keys, this_key, argument): def _get_constraints(keccak_keys, this_key, argument):
global keccak_function_manager global keccak_function_manager
for keccak_key in keccak_keys: for keccak_key in keccak_keys:
if keccak_key == this_key: if keccak_key == this_key:
@ -781,7 +771,7 @@ class Instruction:
try: try:
index = util.get_concrete_int(index) index = util.get_concrete_int(index)
return self._sstore_helper(global_state, index, value) return self._sstore_helper(global_state, index, value)
except AttributeError: except TypeError:
is_keccak = keccak_function_manager.is_keccak(index) is_keccak = keccak_function_manager.is_keccak(index)
if not is_keccak: if not is_keccak:
return self._sstore_helper(global_state, str(index), value) return self._sstore_helper(global_state, str(index), value)
@ -812,7 +802,8 @@ class Instruction:
return self._sstore_helper(global_state, str(index), value) return self._sstore_helper(global_state, str(index), value)
def _sstore_helper(self, global_state, index, value, constraint=None): @staticmethod
def _sstore_helper(global_state, index, value, constraint=None):
try: try:
global_state.environment.active_account = deepcopy(global_state.environment.active_account) global_state.environment.active_account = deepcopy(global_state.environment.active_account)
global_state.accounts[ global_state.accounts[
@ -834,7 +825,7 @@ class Instruction:
disassembly = global_state.environment.code disassembly = global_state.environment.code
try: try:
jump_addr = util.get_concrete_int(state.stack.pop()) jump_addr = util.get_concrete_int(state.stack.pop())
except AttributeError: except TypeError:
raise InvalidJumpDestination("Invalid jump argument (symbolic address)") raise InvalidJumpDestination("Invalid jump argument (symbolic address)")
except IndexError: except IndexError:
raise StackUnderflowException() raise StackUnderflowException()
@ -864,8 +855,7 @@ class Instruction:
try: try:
jump_addr = util.get_concrete_int(op0) jump_addr = util.get_concrete_int(op0)
# FIXME: to broad exception handler except TypeError:
except:
logging.debug("Skipping JUMPI to invalid destination.") logging.debug("Skipping JUMPI to invalid destination.")
global_state.mstate.pc += 1 global_state.mstate.pc += 1
return [global_state] return [global_state]
@ -925,7 +915,7 @@ class Instruction:
state = global_state.mstate state = global_state.mstate
dpth = int(self.op_code[3:]) dpth = int(self.op_code[3:])
state.stack.pop(), state.stack.pop() state.stack.pop(), state.stack.pop()
[state.stack.pop() for x in range(dpth)] [state.stack.pop() for _ in range(dpth)]
# Not supported # Not supported
return [global_state] return [global_state]
@ -945,7 +935,7 @@ class Instruction:
return_data = [global_state.new_bitvec("return_data", 256)] return_data = [global_state.new_bitvec("return_data", 256)]
try: try:
return_data = state.memory[util.get_concrete_int(offset):util.get_concrete_int(offset + length)] return_data = state.memory[util.get_concrete_int(offset):util.get_concrete_int(offset + length)]
except AttributeError: except TypeError:
logging.debug("Return with symbolic length or offset. Not supported") logging.debug("Return with symbolic length or offset. Not supported")
global_state.current_transaction.end(global_state, return_data) global_state.current_transaction.end(global_state, return_data)
@ -970,7 +960,14 @@ class Instruction:
@StateTransition() @StateTransition()
def revert_(self, global_state): def revert_(self, global_state):
return [] state = global_state.mstate
offset, length = state.stack.pop(), state.stack.pop()
return_data = [global_state.new_bitvec("return_data", 256)]
try:
return_data = state.memory[util.get_concrete_int(offset):util.get_concrete_int(offset + length)]
except TypeError:
logging.debug("Return with symbolic length or offset. Not supported")
global_state.current_transaction.end(global_state, return_data=return_data, revert=True)
@StateTransition() @StateTransition()
def assert_fail_(self, global_state): def assert_fail_(self, global_state):
@ -995,7 +992,7 @@ class Instruction:
callee_address, callee_account, call_data, value, call_data_type, gas, memory_out_offset, memory_out_size = get_call_parameters( callee_address, callee_account, call_data, value, call_data_type, gas, memory_out_offset, memory_out_size = get_call_parameters(
global_state, self.dynamic_loader, True) global_state, self.dynamic_loader, True)
except ValueError as e: except ValueError as e:
logging.info( logging.debug(
"Could not determine required parameters for call, putting fresh symbol on the stack. \n{}".format(e) "Could not determine required parameters for call, putting fresh symbol on the stack. \n{}".format(e)
) )
# TODO: decide what to do in this case # TODO: decide what to do in this case
@ -1012,7 +1009,7 @@ class Instruction:
try: try:
mem_out_start = helper.get_concrete_int(memory_out_offset) mem_out_start = helper.get_concrete_int(memory_out_offset)
mem_out_sz = memory_out_size.as_long() mem_out_sz = memory_out_size.as_long()
except AttributeError: except TypeError:
logging.debug("CALL with symbolic start or offset not supported") logging.debug("CALL with symbolic start or offset not supported")
return [global_state] return [global_state]
@ -1069,7 +1066,7 @@ class Instruction:
try: try:
memory_out_offset = util.get_concrete_int(memory_out_offset) if isinstance(memory_out_offset, ExprRef) else memory_out_offset memory_out_offset = util.get_concrete_int(memory_out_offset) if isinstance(memory_out_offset, ExprRef) else memory_out_offset
memory_out_size = util.get_concrete_int(memory_out_size) if isinstance(memory_out_size, ExprRef) else memory_out_size memory_out_size = util.get_concrete_int(memory_out_size) if isinstance(memory_out_size, ExprRef) else memory_out_size
except AttributeError: except TypeError:
global_state.mstate.stack.append(global_state.new_bitvec("retval_" + str(instr['address']), 256)) global_state.mstate.stack.append(global_state.new_bitvec("retval_" + str(instr['address']), 256))
return [global_state] return [global_state]
@ -1137,7 +1134,7 @@ class Instruction:
try: try:
memory_out_offset = util.get_concrete_int(memory_out_offset) if isinstance(memory_out_offset, ExprRef) else memory_out_offset memory_out_offset = util.get_concrete_int(memory_out_offset) if isinstance(memory_out_offset, ExprRef) else memory_out_offset
memory_out_size = util.get_concrete_int(memory_out_size) if isinstance(memory_out_size, ExprRef) else memory_out_size memory_out_size = util.get_concrete_int(memory_out_size) if isinstance(memory_out_size, ExprRef) else memory_out_size
except AttributeError: except TypeError:
global_state.mstate.stack.append(global_state.new_bitvec("retval_" + str(instr['address']), 256)) global_state.mstate.stack.append(global_state.new_bitvec("retval_" + str(instr['address']), 256))
return [global_state] return [global_state]
@ -1209,7 +1206,7 @@ class Instruction:
ExprRef) else memory_out_offset ExprRef) else memory_out_offset
memory_out_size = util.get_concrete_int(memory_out_size) if isinstance(memory_out_size, memory_out_size = util.get_concrete_int(memory_out_size) if isinstance(memory_out_size,
ExprRef) else memory_out_size ExprRef) else memory_out_size
except AttributeError: except TypeError:
global_state.mstate.stack.append(global_state.new_bitvec("retval_" + str(instr['address']), 256)) global_state.mstate.stack.append(global_state.new_bitvec("retval_" + str(instr['address']), 256))
return [global_state] return [global_state]

@ -349,10 +349,10 @@ class GlobalState:
def new_bitvec(self, name, size=256): def new_bitvec(self, name, size=256):
transaction_id = self.current_transaction.id transaction_id = self.current_transaction.id
node_id = self.node.uid
return BitVec("{}_{}".format(transaction_id, name), size) return BitVec("{}_{}".format(transaction_id, name), size)
class WorldState: class WorldState:
""" """
The WorldState class represents the world state as described in the yellow paper The WorldState class represents the world state as described in the yellow paper

@ -0,0 +1,25 @@
from abc import ABC, abstractmethod
class BasicSearchStrategy(ABC):
__slots__ = 'work_list', 'max_depth'
def __init__(self, work_list, max_depth):
self.work_list = work_list
self.max_depth = max_depth
def __iter__(self):
return self
@abstractmethod
def get_strategic_global_state(self):
raise NotImplementedError("Must be implemented by a subclass")
def __next__(self):
try:
global_state = self.get_strategic_global_state()
if global_state.mstate.depth >= self.max_depth:
return self.__next__()
return global_state
except IndexError:
raise StopIteration

@ -1,54 +1,67 @@
""" """
This module implements basic symbolic execution search strategies This module implements basic symbolic execution search strategies
""" """
from random import randrange
from . import BasicSearchStrategy
try:
from random import choices
except ImportError:
class DepthFirstSearchStrategy: # This is for supporting python versions < 3.6
from itertools import accumulate
from random import random
from bisect import bisect
def choices(population, weights=None):
"""
Returns a random element out of the population based on weight.
If the relative weights or cumulative weights are not specified,
the selections are made with equal probability.
"""
if weights is None:
return [population[int(random() * len(population))]]
cum_weights = accumulate(weights)
return [population[bisect(cum_weights, random()*cum_weights[-1], 0, len(population)-1)]]
class DepthFirstSearchStrategy(BasicSearchStrategy):
""" """
Implements a depth first search strategy Implements a depth first search strategy
I.E. Follow one path to a leaf, and then continue to the next one I.E. Follow one path to a leaf, and then continue to the next one
""" """
def __init__(self, work_list, max_depth):
self.work_list = work_list
self.max_depth = max_depth
def __iter__(self):
return self
def __next__(self): def get_strategic_global_state(self):
""" Picks the next state to execute """ return self.work_list.pop()
try:
# This strategies assumes that new states are appended at the end of the work_list
# By taking the last element we effectively pick the "newest" states, which amounts to dfs
global_state = self.work_list.pop()
if global_state.mstate.depth >= self.max_depth:
return self.__next__()
return global_state
except IndexError:
raise StopIteration()
class BreadthFirstSearchStrategy: class BreadthFirstSearchStrategy(BasicSearchStrategy):
""" """
Implements a breadth first search strategy Implements a breadth first search strategy
I.E. Execute all states of a "level" before continuing I.E. Execute all states of a "level" before continuing
""" """
def __init__(self, work_list, max_depth):
self.work_list = work_list def get_strategic_global_state(self):
self.max_depth = max_depth return self.work_list.pop(0)
def __iter__(self):
return self class ReturnRandomNaivelyStrategy(BasicSearchStrategy):
"""
def __next__(self): chooses a random state from the worklist with equal likelihood
""" Picks the next state to execute """ """
try: def get_strategic_global_state(self):
# This strategies assumes that new states are appended at the end of the work_list if len(self.work_list) > 0:
# By taking the first element we effectively pick the "oldest" states, which amounts to bfs return self.work_list.pop(randrange(len(self.work_list)))
global_state = self.work_list.pop(0) else:
if global_state.mstate.depth >= self.max_depth: raise IndexError
return self.__next__()
return global_state
except IndexError: class ReturnWeightedRandomStrategy(BasicSearchStrategy):
raise StopIteration() """
chooses a random state from the worklist with likelihood based on inverse proportion to depth
"""
def get_strategic_global_state(self):
probability_distribution = [1/(global_state.mstate.depth+1) for global_state in self.work_list]
return self.work_list.pop(choices(range(len(self.work_list)), probability_distribution)[0])

@ -1,4 +1,5 @@
import logging import logging
from mythril.disassembler.disassembly import Disassembly
from mythril.laser.ethereum.state import WorldState from mythril.laser.ethereum.state import WorldState
from mythril.laser.ethereum.transaction import TransactionStartSignal, TransactionEndSignal, \ from mythril.laser.ethereum.transaction import TransactionStartSignal, TransactionEndSignal, \
ContractCreationTransaction ContractCreationTransaction
@ -28,7 +29,7 @@ class LaserEVM:
""" """
def __init__(self, accounts, dynamic_loader=None, max_depth=float('inf'), execution_timeout=60, create_timeout=10, def __init__(self, accounts, dynamic_loader=None, max_depth=float('inf'), execution_timeout=60, create_timeout=10,
strategy=DepthFirstSearchStrategy): strategy=DepthFirstSearchStrategy, max_transaction_count=3):
world_state = WorldState() world_state = WorldState()
world_state.accounts = accounts world_state.accounts = accounts
# this sets the initial world state # this sets the initial world state
@ -45,6 +46,7 @@ class LaserEVM:
self.work_list = [] self.work_list = []
self.strategy = strategy(self.work_list, max_depth) self.strategy = strategy(self.work_list, max_depth)
self.max_depth = max_depth self.max_depth = max_depth
self.max_transaction_count = max_transaction_count
self.execution_timeout = execution_timeout self.execution_timeout = execution_timeout
self.create_timeout = create_timeout self.create_timeout = create_timeout
@ -73,16 +75,20 @@ class LaserEVM:
logging.info("Finished contract creation, found {} open states".format(len(self.open_states))) logging.info("Finished contract creation, found {} open states".format(len(self.open_states)))
if len(self.open_states) == 0: if len(self.open_states) == 0:
logging.warning("No contract was created during the execution of contract creation " logging.warning("No contract was created during the execution of contract creation "
"Increase the resources for creation execution (--max-depth or --create_timeout)") "Increase the resources for creation execution (--max-depth or --create-timeout)")
# Reset code coverage # Reset code coverage
self.coverage = {} self.coverage = {}
self.time = datetime.now() for i in range(self.max_transaction_count):
logging.info("Starting message call transaction") initial_coverage = self._get_covered_instructions()
execute_message_call(self, created_account.address)
self.time = datetime.now() self.time = datetime.now()
execute_message_call(self, created_account.address) logging.info("Starting message call transaction, iteration: {}".format(i))
execute_message_call(self, created_account.address)
end_coverage = self._get_covered_instructions()
if end_coverage == initial_coverage:
break
logging.info("Finished symbolic execution") logging.info("Finished symbolic execution")
logging.info("%d nodes, %d edges, %d total states", len(self.nodes), len(self.edges), self.total_states) logging.info("%d nodes, %d edges, %d total states", len(self.nodes), len(self.edges), self.total_states)
@ -90,6 +96,13 @@ class LaserEVM:
cov = reduce(lambda sum_, val: sum_ + 1 if val else sum_, coverage[1]) / float(coverage[0]) * 100 cov = reduce(lambda sum_, val: sum_ + 1 if val else sum_, coverage[1]) / float(coverage[0]) * 100
logging.info("Achieved {} coverage for code: {}".format(cov, code)) logging.info("Achieved {} coverage for code: {}".format(cov, code))
def _get_covered_instructions(self) -> int:
""" Gets the total number of covered instructions for all accounts in the svm"""
total_covered_instructions = 0
for _, cv in self.coverage.items():
total_covered_instructions += reduce(lambda sum_, val: sum_ + 1 if val else sum_, cv[1])
return total_covered_instructions
def exec(self, create=False): def exec(self, create=False):
for global_state in self.strategy: for global_state in self.strategy:
if self.execution_timeout and not create: if self.execution_timeout and not create:
@ -138,30 +151,31 @@ class LaserEVM:
new_global_states = self._end_message_call(return_global_state, global_state, new_global_states = self._end_message_call(return_global_state, global_state,
revert_changes=True, return_data=None) revert_changes=True, return_data=None)
except TransactionStartSignal as e: except TransactionStartSignal as start_signal:
# Setup new global state # Setup new global state
new_global_state = e.transaction.initial_global_state() new_global_state = start_signal.transaction.initial_global_state()
new_global_state.transaction_stack = copy(global_state.transaction_stack) + [(e.transaction, global_state)] new_global_state.transaction_stack = copy(global_state.transaction_stack) + [(start_signal.transaction, global_state)]
new_global_state.node = global_state.node new_global_state.node = global_state.node
new_global_state.mstate.constraints = global_state.mstate.constraints new_global_state.mstate.constraints = global_state.mstate.constraints
return [new_global_state], op_code return [new_global_state], op_code
except TransactionEndSignal as e: except TransactionEndSignal as end_signal:
transaction, return_global_state = e.global_state.transaction_stack.pop() transaction, return_global_state = end_signal.global_state.transaction_stack.pop()
if return_global_state is None: if return_global_state is None:
if not isinstance(transaction, ContractCreationTransaction) or transaction.return_data: if (not isinstance(transaction, ContractCreationTransaction) or transaction.return_data) and not end_signal.revert:
e.global_state.world_state.node = global_state.node end_signal.global_state.world_state.node = global_state.node
self.open_states.append(e.global_state.world_state) self.open_states.append(end_signal.global_state.world_state)
new_global_states = [] new_global_states = []
else: else:
# First execute the post hook for the transaction ending instruction # First execute the post hook for the transaction ending instruction
self._execute_post_hook(op_code, [e.global_state]) self._execute_post_hook(op_code, [end_signal.global_state])
new_global_states = self._end_message_call(return_global_state, global_state, new_global_states = self._end_message_call(return_global_state, global_state,
revert_changes=False, return_data=transaction.return_data) revert_changes=False or end_signal.revert,
return_data=transaction.return_data)
self._execute_post_hook(op_code, new_global_states) self._execute_post_hook(op_code, new_global_states)
@ -246,13 +260,12 @@ class LaserEVM:
environment = state.environment environment = state.environment
disassembly = environment.code disassembly = environment.code
if address in state.environment.code.addr_to_func: if address in disassembly.address_to_function_name:
# Enter a new function # Enter a new function
environment.active_function_name = disassembly.address_to_function_name[address]
environment.active_function_name = disassembly.addr_to_func[address]
new_node.flags |= NodeFlags.FUNC_ENTRY new_node.flags |= NodeFlags.FUNC_ENTRY
logging.info( logging.debug(
"- Entering function " + environment.active_account.contract_name + ":" + new_node.function_name) "- Entering function " + environment.active_account.contract_name + ":" + new_node.function_name)
elif address == 0: elif address == 0:
environment.active_function_name = "fallback" environment.active_function_name = "fallback"

@ -109,7 +109,8 @@ class TaintRunner:
records = TaintRunner.execute_node(node, record, index) records = TaintRunner.execute_node(node, record, index)
result.add_records(records) result.add_records(records)
if len(records) == 0: # continue if there is no record to work on
continue
children = TaintRunner.children(node, statespace, environment, transaction_stack_length) children = TaintRunner.children(node, statespace, environment, transaction_stack_length)
for child in children: for child in children:
current_nodes.append((child, records[-1], 0)) current_nodes.append((child, records[-1], 0))
@ -212,7 +213,7 @@ class TaintRunner:
_ = record.stack.pop() _ = record.stack.pop()
try: try:
index = helper.get_concrete_int(op0) index = helper.get_concrete_int(op0)
except AttributeError: except TypeError:
logging.debug("Can't MLOAD taint track symbolically") logging.debug("Can't MLOAD taint track symbolically")
record.stack.append(False) record.stack.append(False)
return return
@ -224,7 +225,7 @@ class TaintRunner:
_, value_taint = record.stack.pop(), record.stack.pop() _, value_taint = record.stack.pop(), record.stack.pop()
try: try:
index = helper.get_concrete_int(op0) index = helper.get_concrete_int(op0)
except AttributeError: except TypeError:
logging.debug("Can't mstore taint track symbolically") logging.debug("Can't mstore taint track symbolically")
return return
@ -235,7 +236,7 @@ class TaintRunner:
_ = record.stack.pop() _ = record.stack.pop()
try: try:
index = helper.get_concrete_int(op0) index = helper.get_concrete_int(op0)
except AttributeError: except TypeError:
logging.debug("Can't MLOAD taint track symbolically") logging.debug("Can't MLOAD taint track symbolically")
record.stack.append(False) record.stack.append(False)
return return
@ -247,7 +248,7 @@ class TaintRunner:
_, value_taint = record.stack.pop(), record.stack.pop() _, value_taint = record.stack.pop(), record.stack.pop()
try: try:
index = helper.get_concrete_int(op0) index = helper.get_concrete_int(op0)
except AttributeError: except TypeError:
logging.debug("Can't mstore taint track symbolically") logging.debug("Can't mstore taint track symbolically")
return return

@ -12,10 +12,12 @@ def get_next_transaction_id():
_next_transaction_id += 1 _next_transaction_id += 1
return _next_transaction_id return _next_transaction_id
class TransactionEndSignal(Exception): class TransactionEndSignal(Exception):
""" Exception raised when a transaction is finalized""" """ Exception raised when a transaction is finalized"""
def __init__(self, global_state): def __init__(self, global_state, revert=False):
self.global_state = global_state self.global_state = global_state
self.revert = revert
class TransactionStartSignal(Exception): class TransactionStartSignal(Exception):
@ -71,9 +73,9 @@ class MessageCallTransaction:
return global_state return global_state
def end(self, global_state, return_data=None): def end(self, global_state, return_data=None, revert=False):
self.return_data = return_data self.return_data = return_data
raise TransactionEndSignal(global_state) raise TransactionEndSignal(global_state, revert)
class ContractCreationTransaction: class ContractCreationTransaction:
@ -126,9 +128,9 @@ class ContractCreationTransaction:
return global_state return global_state
def end(self, global_state, return_data=None): def end(self, global_state, return_data=None, revert=False):
if not all([isinstance(element, int) for element in return_data]): if not all([isinstance(element, int) for element in return_data]) or len(return_data) == 0:
self.return_data = None self.return_data = None
raise TransactionEndSignal(global_state) raise TransactionEndSignal(global_state)
@ -136,5 +138,8 @@ class ContractCreationTransaction:
global_state.environment.active_account.code = Disassembly(contract_code) global_state.environment.active_account.code = Disassembly(contract_code)
self.return_data = global_state.environment.active_account.address self.return_data = global_state.environment.active_account.address
assert global_state.environment.active_account.code.instruction_list != []
raise TransactionEndSignal(global_state, revert=revert)
raise TransactionEndSignal(global_state)

@ -10,13 +10,15 @@ TT256M1 = 2 ** 256 - 1
TT255 = 2 ** 255 TT255 = 2 ** 255
def sha3(seed): def sha3(seed):
return _sha3.keccak_256(bytes(seed)).digest() return _sha3.keccak_256(bytes(seed)).digest()
def safe_decode(hex_encoded_string): def safe_decode(hex_encoded_string):
if (hex_encoded_string.startswith("0x")): if hex_encoded_string.startswith("0x"):
return bytes.fromhex(hex_encoded_string[2:]) return bytes.fromhex(hex_encoded_string[2:])
else: else:
return bytes.fromhex(hex_encoded_string) return bytes.fromhex(hex_encoded_string)
@ -80,9 +82,12 @@ def get_concrete_int(item):
elif is_true(simplified): elif is_true(simplified):
return 1 return 1
else: else:
raise ValueError("Symbolic boolref encountered") raise TypeError("Symbolic boolref encountered")
return simplify(item).as_long() try:
return simplify(item).as_long()
except AttributeError:
raise TypeError("Got a symbolic BitVecRef")
def concrete_int_from_bytes(_bytes, start_index): def concrete_int_from_bytes(_bytes, start_index):
@ -99,7 +104,7 @@ def concrete_int_to_bytes(val):
# logging.debug("concrete_int_to_bytes " + str(val)) # logging.debug("concrete_int_to_bytes " + str(val))
if (type(val) == int): if type(val) == int:
return val.to_bytes(32, byteorder='big') return val.to_bytes(32, byteorder='big')
return (simplify(val).as_long()).to_bytes(32, byteorder='big') return (simplify(val).as_long()).to_bytes(32, byteorder='big')

@ -20,8 +20,8 @@ import platform
from mythril.ether import util from mythril.ether import util
from mythril.ether.ethcontract import ETHContract from mythril.ether.ethcontract import ETHContract
from mythril.ether.soliditycontract import SolidityContract, get_contracts_from_file from mythril.ether.soliditycontract import SolidityContract, get_contracts_from_file
from mythril.rpc.client import EthJsonRpc from mythril.ethereum.interface.rpc.client import EthJsonRpc
from mythril.rpc.exceptions import ConnectionError from mythril.ethereum.interface.rpc.exceptions import ConnectionError
from mythril.support import signatures from mythril.support import signatures
from mythril.support.truffle import analyze_truffle_project from mythril.support.truffle import analyze_truffle_project
from mythril.support.loader import DynLoader from mythril.support.loader import DynLoader
@ -31,7 +31,7 @@ from mythril.analysis.callgraph import generate_graph
from mythril.analysis.traceexplore import get_serializable_statespace from mythril.analysis.traceexplore import get_serializable_statespace
from mythril.analysis.security import fire_lasers from mythril.analysis.security import fire_lasers
from mythril.analysis.report import Report from mythril.analysis.report import Report
from mythril.leveldb.client import EthLevelDB from mythril.ethereum.interface.leveldb.client import EthLevelDB
# logging.basicConfig(level=logging.DEBUG) # logging.basicConfig(level=logging.DEBUG)
@ -76,18 +76,20 @@ class Mythril(object):
""" """
def __init__(self, solv=None, def __init__(self, solv=None,
solc_args=None, dynld=False): solc_args=None, dynld=False,
enable_online_lookup=False):
self.solv = solv self.solv = solv
self.solc_args = solc_args self.solc_args = solc_args
self.dynld = dynld self.dynld = dynld
self.enable_online_lookup = enable_online_lookup
self.mythril_dir = self._init_mythril_dir() self.mythril_dir = self._init_mythril_dir()
self.sigs = signatures.SignatureDb() self.sigs = signatures.SignatureDb(enable_online_lookup=self.enable_online_lookup)
try: try:
self.sigs.open() # tries mythril_dir/signatures.json by default (provide path= arg to make this configurable) self.sigs.open() # tries mythril_dir/signatures.json by default (provide path= arg to make this configurable)
except FileNotFoundError as fnfe: except FileNotFoundError:
logging.info( logging.info(
"No signature database found. Creating database if sigs are loaded in: " + self.sigs.signatures_file + "\n" + "No signature database found. Creating database if sigs are loaded in: " + self.sigs.signatures_file + "\n" +
"Consider replacing it with the pre-initialized database at https://raw.githubusercontent.com/ConsenSys/mythril/master/signatures.json") "Consider replacing it with the pre-initialized database at https://raw.githubusercontent.com/ConsenSys/mythril/master/signatures.json")
@ -103,7 +105,8 @@ class Mythril(object):
self.contracts = [] # loaded contracts self.contracts = [] # loaded contracts
def _init_mythril_dir(self): @staticmethod
def _init_mythril_dir():
try: try:
mythril_dir = os.environ['MYTHRIL_DIR'] mythril_dir = os.environ['MYTHRIL_DIR']
except KeyError: except KeyError:
@ -179,7 +182,8 @@ class Mythril(object):
def analyze_truffle_project(self, *args, **kwargs): def analyze_truffle_project(self, *args, **kwargs):
return analyze_truffle_project(self.sigs, *args, **kwargs) # just passthru by passing signatures for now return analyze_truffle_project(self.sigs, *args, **kwargs) # just passthru by passing signatures for now
def _init_solc_binary(self, version): @staticmethod
def _init_solc_binary(version):
# Figure out solc binary and version # Figure out solc binary and version
# Only proper versions are supported. No nightlies, commits etc (such as available in remix) # Only proper versions are supported. No nightlies, commits etc (such as available in remix)
@ -259,8 +263,7 @@ class Mythril(object):
def search_db(self, search): def search_db(self, search):
def search_callback(contract, address, balance): def search_callback(_, address, balance):
print("Address: " + address + ", balance: " + str(balance)) print("Address: " + address + ", balance: " + str(balance))
try: try:
@ -277,7 +280,7 @@ class Mythril(object):
def load_from_bytecode(self, code): def load_from_bytecode(self, code):
address = util.get_indexed_address(0) address = util.get_indexed_address(0)
self.contracts.append(ETHContract(code, name="MAIN")) self.contracts.append(ETHContract(code, name="MAIN", enable_online_lookup=self.enable_online_lookup))
return address, self.contracts[-1] # return address and contract object return address, self.contracts[-1] # return address and contract object
def load_from_address(self, address): def load_from_address(self, address):
@ -288,15 +291,15 @@ class Mythril(object):
code = self.eth.eth_getCode(address) code = self.eth.eth_getCode(address)
except FileNotFoundError as e: except FileNotFoundError as e:
raise CriticalError("IPC error: " + str(e)) raise CriticalError("IPC error: " + str(e))
except ConnectionError as e: except ConnectionError:
raise CriticalError("Could not connect to RPC server. Make sure that your node is running and that RPC parameters are set correctly.") raise CriticalError("Could not connect to RPC server. Make sure that your node is running and that RPC parameters are set correctly.")
except Exception as e: except Exception as e:
raise CriticalError("IPC / RPC error: " + str(e)) raise CriticalError("IPC / RPC error: " + str(e))
else: else:
if code == "0x" or code == "0x0": if code == "0x" or code == "0x0":
raise CriticalError("Received an empty response from eth_getCode. Check the contract address and verify that you are on the correct chain.") raise CriticalError("Received an empty response from eth_getCode. Check the contract address and verify that you are on the correct chain.")
else: else:
self.contracts.append(ETHContract(code, name=address)) self.contracts.append(ETHContract(code, name=address, enable_online_lookup=self.enable_online_lookup))
return address, self.contracts[-1] # return address and contract object return address, self.contracts[-1] # return address and contract object
def load_from_solidity(self, solidity_files): def load_from_solidity(self, solidity_files):
@ -358,14 +361,16 @@ class Mythril(object):
return generate_graph(sym, physics=enable_physics, phrackify=phrackify) return generate_graph(sym, physics=enable_physics, phrackify=phrackify)
def fire_lasers(self, strategy, contracts=None, address=None, def fire_lasers(self, strategy, contracts=None, address=None,
modules=None, verbose_report=False, max_depth=None, execution_timeout=None, create_timeout=None): modules=None, verbose_report=False, max_depth=None, execution_timeout=None, create_timeout=None,
max_transaction_count=None):
all_issues = [] all_issues = []
for contract in (contracts or self.contracts): for contract in (contracts or self.contracts):
sym = SymExecWrapper(contract, address, strategy, sym = SymExecWrapper(contract, address, strategy,
dynloader=DynLoader(self.eth) if self.dynld else None, dynloader=DynLoader(self.eth) if self.dynld else None,
max_depth=max_depth, execution_timeout=execution_timeout, max_depth=max_depth, execution_timeout=execution_timeout,
create_timeout=create_timeout) create_timeout=create_timeout,
max_transaction_count=max_transaction_count)
issues = fire_lasers(sym, modules) issues = fire_lasers(sym, modules)
@ -431,11 +436,12 @@ class Mythril(object):
outtxt.append("{}: {}".format(hex(i), self.eth.eth_getStorageAt(address, i))) outtxt.append("{}: {}".format(hex(i), self.eth.eth_getStorageAt(address, i)))
except FileNotFoundError as e: except FileNotFoundError as e:
raise CriticalError("IPC error: " + str(e)) raise CriticalError("IPC error: " + str(e))
except ConnectionError as e: except ConnectionError:
raise CriticalError("Could not connect to RPC server. Make sure that your node is running and that RPC parameters are set correctly.") raise CriticalError("Could not connect to RPC server. Make sure that your node is running and that RPC parameters are set correctly.")
return '\n'.join(outtxt) return '\n'.join(outtxt)
def disassemble(self, contract): @staticmethod
def disassemble(contract):
return contract.get_easm() return contract.get_easm()
@staticmethod @staticmethod

@ -37,7 +37,7 @@ class DynLoader:
m = re.match(r'^(0x[0-9a-fA-F]{40})$', dependency_address) m = re.match(r'^(0x[0-9a-fA-F]{40})$', dependency_address)
if (m): if m:
dependency_address = m.group(1) dependency_address = m.group(1)
else: else:
@ -47,7 +47,7 @@ class DynLoader:
code = self.eth.eth_getCode(dependency_address) code = self.eth.eth_getCode(dependency_address)
if (code == "0x"): if code == "0x":
return None return None
else: else:
return Disassembly(code) return Disassembly(code)

@ -11,7 +11,6 @@ from subprocess import Popen, PIPE
from mythril.exceptions import CompilerError from mythril.exceptions import CompilerError
# todo: tintinweb - make this a normal requirement? (deps: eth-abi and requests, both already required by mythril)
try: try:
# load if available but do not fail # load if available but do not fail
import ethereum_input_decoder import ethereum_input_decoder
@ -54,7 +53,7 @@ except ImportError:
class SignatureDb(object): class SignatureDb(object):
def __init__(self, enable_online_lookup=True): def __init__(self, enable_online_lookup=False):
""" """
Constr Constr
:param enable_online_lookup: enable onlien signature hash lookup :param enable_online_lookup: enable onlien signature hash lookup
@ -165,9 +164,12 @@ class SignatureDb(object):
except FourByteDirectoryOnlineLookupError as fbdole: except FourByteDirectoryOnlineLookupError as fbdole:
self.online_directory_unavailable_until = time.time() + 2 * 60 # wait at least 2 mins to try again self.online_directory_unavailable_until = time.time() + 2 * 60 # wait at least 2 mins to try again
logging.warning("online function signature lookup not available. will not try to lookup hash for the next 2 minutes. exception: %r" % fbdole) logging.warning("online function signature lookup not available. will not try to lookup hash for the next 2 minutes. exception: %r" % fbdole)
if sighash not in self.signatures:
return []
if type(self.signatures[sighash]) != list: if type(self.signatures[sighash]) != list:
return [self.signatures[sighash]] return [self.signatures[sighash]]
return self.signatures[sighash] # raise keyerror return self.signatures[sighash]
def __getitem__(self, item): def __getitem__(self, item):
""" """

@ -40,7 +40,7 @@ def analyze_truffle_project(sigs, args):
if len(bytecode) < 4: if len(bytecode) < 4:
continue continue
sigs.import_from_solidity_source(contractdata['sourcePath']) sigs.import_from_solidity_source(contractdata['sourcePath'], solc_args=args.solc_args)
sigs.write() sigs.write()
ethcontract = ETHContract(bytecode, name=name) ethcontract = ETHContract(bytecode, name=name)

@ -1,8 +1,10 @@
coloredlogs>=10.0
configparser>=3.5.0 configparser>=3.5.0
coverage coverage
eth_abi>=1.0.0 eth_abi>=1.0.0
eth-account>=0.1.0a2 eth-account>=0.1.0a2
ethereum>=2.3.2 ethereum>=2.3.2
ethereum-input-decoder>=0.2.2
eth-hash>=0.1.0 eth-hash>=0.1.0
eth-keyfile>=0.5.1 eth-keyfile>=0.5.1
eth-keys>=0.2.0b3 eth-keys>=0.2.0b3

@ -82,6 +82,7 @@ setup(
packages=find_packages(exclude=['contrib', 'docs', 'tests']), packages=find_packages(exclude=['contrib', 'docs', 'tests']),
install_requires=[ install_requires=[
'coloredlogs>=10.0',
'ethereum>=2.3.2', 'ethereum>=2.3.2',
'z3-solver>=4.5', 'z3-solver>=4.5',
'requests', 'requests',
@ -103,7 +104,8 @@ setup(
'py-flags', 'py-flags',
'mock', 'mock',
'configparser>=3.5.0', 'configparser>=3.5.0',
'persistent>=4.2.0' 'persistent>=4.2.0',
'ethereum-input-decoder>=0.2.2'
], ],
tests_require=[ tests_require=[

@ -1,5 +1,7 @@
<!DOCTYPE html>
<html> <html>
<head> <head>
<title>Call Graph</title>
<style type="text/css"> <style type="text/css">
#mynetwork { #mynetwork {
background-color: #232625; background-color: #232625;

@ -1,6 +1,7 @@
<!DOCTYPE html>
<html> <html>
<head> <head>
<title>Call Graph</title>
<style type="text/css"> <style type="text/css">
#mynetwork { #mynetwork {

@ -1,5 +1,7 @@
<!DOCTYPE html>
<html> <html>
<head> <head>
<title>Call Graph</title>
<style type="text/css"> <style type="text/css">
#mynetwork { #mynetwork {
background-color: #232625; background-color: #232625;

Binary file not shown.

Before

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

@ -189,7 +189,7 @@ def test_delegate_call(sym_mock, concrete_mock, curr_instruction):
statespace.calls = [call] statespace.calls = [call]
# act # act
issues = execute(statespace) execute(statespace)
# assert # assert
assert concrete_mock.call_count == 1 assert concrete_mock.call_count == 1

@ -0,0 +1,61 @@
from mythril.disassembler.disassembly import *
instruction_list = [
{"opcode": "PUSH4", "argument": "0x10203040"},
{"opcode": "EQ"},
{"opcode": "PUSH4", "argument": "0x40302010"},
{"opcode": "JUMPI"},
]
def test_get_function_info(mocker):
# Arrange
global instruction_list
signature_database_mock = SignatureDb()
mocker.patch.object(signature_database_mock, "get")
signature_database_mock.get.return_value = ["function_name"]
# Act
function_hash, entry_point, function_name = get_function_info(
0, instruction_list, signature_database_mock
)
# Assert
assert function_hash == "0x10203040"
assert entry_point == 0x40302010
assert function_name == "function_name"
def test_get_function_info_multiple_names(mocker):
# Arrange
global instruction_list
signature_database_mock = SignatureDb()
mocker.patch.object(signature_database_mock, "get")
signature_database_mock.get.return_value = ["function_name", "another_name"]
# Act
function_hash, entry_point, function_name = get_function_info(
0, instruction_list, signature_database_mock
)
# Assert
assert function_name == "**ambiguous** function_name"
def test_get_function_info_no_names(mocker):
# Arrange
global instruction_list
signature_database_mock = SignatureDb()
mocker.patch.object(signature_database_mock, "get")
signature_database_mock.get.return_value = []
# Act
function_hash, entry_point, function_name = get_function_info(
0, instruction_list, signature_database_mock
)
# Assert
assert function_name == "_function_0x10203040"

@ -0,0 +1,51 @@
{
"suicide" : {
"_info" : {
"comment" : "",
"filledwith" : "testeth 1.5.0.dev2-52+commit.d419e0a2",
"lllcversion" : "Version: 0.4.26-develop.2018.9.19+commit.785cbf40.Linux.g++",
"source" : "src/VMTestsFiller/vmTests/suicideFiller.json",
"sourceHash" : "4622c577440f9db4b3954a1de60bf2fac55886dcb0ec4ecaf906c25bc77372e7"
},
"callcreates" : [
],
"env" : {
"currentCoinbase" : "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100",
"currentGasLimit" : "0x0f4240",
"currentNumber" : "0x00",
"currentTimestamp" : "0x01"
},
"exec" : {
"address" : "0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6",
"caller" : "0xcd1722f3947def4cf144679da39c4c32bdc35681",
"code" : "0x33ff",
"data" : "0x",
"gas" : "0x0186a0",
"gasPrice" : "0x5af3107a4000",
"origin" : "0xcd1722f3947def4cf144679da39c4c32bdc35681",
"value" : "0x0de0b6b3a7640000"
},
"gas" : "0x01869e",
"logs" : "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347",
"out" : "0x",
"post" : {
"0xcd1722f3947def4cf144679da39c4c32bdc35681" : {
"balance" : "0x152d02c7e14af6800000",
"code" : "0x",
"nonce" : "0x00",
"storage" : {
}
}
},
"pre" : {
"0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x152d02c7e14af6800000",
"code" : "0x33ff",
"nonce" : "0x00",
"storage" : {
}
}
}
}
}

@ -12,7 +12,7 @@ import pytest
evm_test_dir = Path(__file__).parent / 'VMTests' evm_test_dir = Path(__file__).parent / 'VMTests'
test_types = ['vmArithmeticTest', 'vmBitwiseLogicOperation', 'vmPushDupSwapTest'] test_types = ['vmArithmeticTest', 'vmBitwiseLogicOperation', 'vmPushDupSwapTest', 'vmTests']
def load_test_data(designations): def load_test_data(designations):

@ -45,7 +45,7 @@ class MachineStackTest(BaseTestCase):
mstack = MachineStack([0, 1]) mstack = MachineStack([0, 1])
with pytest.raises(NotImplementedError): with pytest.raises(NotImplementedError):
mstack = mstack + [2] mstack + [2]
@staticmethod @staticmethod
def test_mstack_no_support_iadd(): def test_mstack_no_support_iadd():

@ -49,7 +49,7 @@ def test_execute_contract_creation(mocked_setup: MagicMock):
mocked_setup.side_effect = _is_contract_creation mocked_setup.side_effect = _is_contract_creation
# Act # Act
new_account = execute_contract_creation(laser_evm, "606000") execute_contract_creation(laser_evm, "606000")
# Assert # Assert
# mocked_setup.assert_called() # mocked_setup.assert_called()

@ -6,13 +6,13 @@ from mythril.laser.ethereum import svm
from tests import * from tests import *
SHA256_TEST = [ (0,False) for i in range(6)] SHA256_TEST = [(0, False) for _ in range(6)]
RIPEMD160_TEST = [ (0,False) for i in range(6)] RIPEMD160_TEST = [(0, False) for _ in range(6)]
ECRECOVER_TEST = [ (0,False) for i in range(9)] ECRECOVER_TEST = [(0, False) for _ in range(9)]
IDENTITY_TEST = [ (0, False) for i in range(4)] IDENTITY_TEST = [(0, False) for _ in range(4)]
SHA256_TEST[0] = (5555555555555555, True) #These are Random numbers to check whether the 'if condition' is entered or not(True means entered) SHA256_TEST[0] = (5555555555555555, True) #These are Random numbers to check whether the 'if condition' is entered or not(True means entered)
SHA256_TEST[1] = (323232325445454546, True) SHA256_TEST[1] = (323232325445454546, True)
@ -98,9 +98,9 @@ def _test_natives(laser_info, test_list, test_name):
assert(success == len(test_list)) assert(success == len(test_list))
class NativeTests(BaseTestCase): class NativeTests(BaseTestCase):
def runTest(self): @staticmethod
def runTest():
disassembly = SolidityContract('./tests/native_tests.sol').disassembly disassembly = SolidityContract('./tests/native_tests.sol').disassembly
account = Account("0x0000000000000000000000000000000000000000", disassembly) account = Account("0x0000000000000000000000000000000000000000", disassembly)
accounts = {account.address: account} accounts = {account.address: account}

@ -23,7 +23,7 @@ def _fix_debug_data(json_str):
def _generate_report(input_file): def _generate_report(input_file):
contract = ETHContract(input_file.read_text()) contract = ETHContract(input_file.read_text(), enable_online_lookup=False)
sym = SymExecWrapper(contract, address=(util.get_indexed_address(0)), strategy="dfs", execution_timeout=30) sym = SymExecWrapper(contract, address=(util.get_indexed_address(0)), strategy="dfs", execution_timeout=30)
issues = fire_lasers(sym) issues = fire_lasers(sym)

@ -1,6 +1,6 @@
from unittest import TestCase from unittest import TestCase
from mythril.rpc.client import EthJsonRpc from mythril.ethereum.interface.rpc.client import EthJsonRpc
class RpcTest(TestCase): class RpcTest(TestCase):
client = None client = None

@ -6,6 +6,7 @@ from mythril.laser.ethereum.cfg import Node, Edge
from mythril.laser.ethereum.state import MachineState, Account, Environment, GlobalState from mythril.laser.ethereum.state import MachineState, Account, Environment, GlobalState
from mythril.laser.ethereum.svm import LaserEVM from mythril.laser.ethereum.svm import LaserEVM
def test_execute_state(mocker): def test_execute_state(mocker):
record = TaintRecord() record = TaintRecord()
record.stack = [True, False, True] record.stack = [True, False, True]
@ -54,8 +55,6 @@ def test_execute_node(mocker):
assert state_1 in record.states assert state_1 in record.states
def test_execute(mocker): def test_execute(mocker):
active_account = Account('0x00') active_account = Account('0x00')
environment = Environment(active_account, None, None, None, None, None) environment = Environment(active_account, None, None, None, None, None)

@ -1,5 +1,7 @@
<!DOCTYPE html>
<html> <html>
<head> <head>
<title>Call Graph</title>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.css" integrity="sha256-iq5ygGJ7021Pi7H5S+QAUXCPUfaBzfqeplbg/KlEssg=" crossorigin="anonymous" /> <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.css" integrity="sha256-iq5ygGJ7021Pi7H5S+QAUXCPUfaBzfqeplbg/KlEssg=" crossorigin="anonymous" />
<script src="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.js" integrity="sha256-JuQeAGbk9rG/EoRMixuy5X8syzICcvB0dj3KindZkY0=" crossorigin="anonymous"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.js" integrity="sha256-JuQeAGbk9rG/EoRMixuy5X8syzICcvB0dj3KindZkY0=" crossorigin="anonymous"></script>

@ -1,5 +1,7 @@
<!DOCTYPE html>
<html> <html>
<head> <head>
<title>Call Graph</title>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.css" integrity="sha256-iq5ygGJ7021Pi7H5S+QAUXCPUfaBzfqeplbg/KlEssg=" crossorigin="anonymous" /> <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.css" integrity="sha256-iq5ygGJ7021Pi7H5S+QAUXCPUfaBzfqeplbg/KlEssg=" crossorigin="anonymous" />
<script src="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.js" integrity="sha256-JuQeAGbk9rG/EoRMixuy5X8syzICcvB0dj3KindZkY0=" crossorigin="anonymous"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.js" integrity="sha256-JuQeAGbk9rG/EoRMixuy5X8syzICcvB0dj3KindZkY0=" crossorigin="anonymous"></script>

@ -1,5 +1,7 @@
<!DOCTYPE html>
<html> <html>
<head> <head>
<title>Call Graph</title>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.css" integrity="sha256-iq5ygGJ7021Pi7H5S+QAUXCPUfaBzfqeplbg/KlEssg=" crossorigin="anonymous" /> <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.css" integrity="sha256-iq5ygGJ7021Pi7H5S+QAUXCPUfaBzfqeplbg/KlEssg=" crossorigin="anonymous" />
<script src="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.js" integrity="sha256-JuQeAGbk9rG/EoRMixuy5X8syzICcvB0dj3KindZkY0=" crossorigin="anonymous"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.js" integrity="sha256-JuQeAGbk9rG/EoRMixuy5X8syzICcvB0dj3KindZkY0=" crossorigin="anonymous"></script>

@ -1,5 +1,7 @@
<!DOCTYPE html>
<html> <html>
<head> <head>
<title>Call Graph</title>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.css" integrity="sha256-iq5ygGJ7021Pi7H5S+QAUXCPUfaBzfqeplbg/KlEssg=" crossorigin="anonymous" /> <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.css" integrity="sha256-iq5ygGJ7021Pi7H5S+QAUXCPUfaBzfqeplbg/KlEssg=" crossorigin="anonymous" />
<script src="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.js" integrity="sha256-JuQeAGbk9rG/EoRMixuy5X8syzICcvB0dj3KindZkY0=" crossorigin="anonymous"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.js" integrity="sha256-JuQeAGbk9rG/EoRMixuy5X8syzICcvB0dj3KindZkY0=" crossorigin="anonymous"></script>

@ -1,5 +1,7 @@
<!DOCTYPE html>
<html> <html>
<head> <head>
<title>Call Graph</title>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.css" integrity="sha256-iq5ygGJ7021Pi7H5S+QAUXCPUfaBzfqeplbg/KlEssg=" crossorigin="anonymous" /> <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.css" integrity="sha256-iq5ygGJ7021Pi7H5S+QAUXCPUfaBzfqeplbg/KlEssg=" crossorigin="anonymous" />
<script src="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.js" integrity="sha256-JuQeAGbk9rG/EoRMixuy5X8syzICcvB0dj3KindZkY0=" crossorigin="anonymous"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.js" integrity="sha256-JuQeAGbk9rG/EoRMixuy5X8syzICcvB0dj3KindZkY0=" crossorigin="anonymous"></script>

@ -1,5 +1,7 @@
<!DOCTYPE html>
<html> <html>
<head> <head>
<title>Call Graph</title>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.css" integrity="sha256-iq5ygGJ7021Pi7H5S+QAUXCPUfaBzfqeplbg/KlEssg=" crossorigin="anonymous" /> <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.css" integrity="sha256-iq5ygGJ7021Pi7H5S+QAUXCPUfaBzfqeplbg/KlEssg=" crossorigin="anonymous" />
<script src="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.js" integrity="sha256-JuQeAGbk9rG/EoRMixuy5X8syzICcvB0dj3KindZkY0=" crossorigin="anonymous"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.js" integrity="sha256-JuQeAGbk9rG/EoRMixuy5X8syzICcvB0dj3KindZkY0=" crossorigin="anonymous"></script>

@ -1,5 +1,7 @@
<!DOCTYPE html>
<html> <html>
<head> <head>
<title>Call Graph</title>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.css" integrity="sha256-iq5ygGJ7021Pi7H5S+QAUXCPUfaBzfqeplbg/KlEssg=" crossorigin="anonymous" /> <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.css" integrity="sha256-iq5ygGJ7021Pi7H5S+QAUXCPUfaBzfqeplbg/KlEssg=" crossorigin="anonymous" />
<script src="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.js" integrity="sha256-JuQeAGbk9rG/EoRMixuy5X8syzICcvB0dj3KindZkY0=" crossorigin="anonymous"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.js" integrity="sha256-JuQeAGbk9rG/EoRMixuy5X8syzICcvB0dj3KindZkY0=" crossorigin="anonymous"></script>

@ -1,5 +1,7 @@
<!DOCTYPE html>
<html> <html>
<head> <head>
<title>Call Graph</title>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.css" integrity="sha256-iq5ygGJ7021Pi7H5S+QAUXCPUfaBzfqeplbg/KlEssg=" crossorigin="anonymous" /> <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.css" integrity="sha256-iq5ygGJ7021Pi7H5S+QAUXCPUfaBzfqeplbg/KlEssg=" crossorigin="anonymous" />
<script src="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.js" integrity="sha256-JuQeAGbk9rG/EoRMixuy5X8syzICcvB0dj3KindZkY0=" crossorigin="anonymous"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.js" integrity="sha256-JuQeAGbk9rG/EoRMixuy5X8syzICcvB0dj3KindZkY0=" crossorigin="anonymous"></script>

@ -1,5 +1,7 @@
<!DOCTYPE html>
<html> <html>
<head> <head>
<title>Call Graph</title>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.css" integrity="sha256-iq5ygGJ7021Pi7H5S+QAUXCPUfaBzfqeplbg/KlEssg=" crossorigin="anonymous" /> <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.css" integrity="sha256-iq5ygGJ7021Pi7H5S+QAUXCPUfaBzfqeplbg/KlEssg=" crossorigin="anonymous" />
<script src="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.js" integrity="sha256-JuQeAGbk9rG/EoRMixuy5X8syzICcvB0dj3KindZkY0=" crossorigin="anonymous"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.js" integrity="sha256-JuQeAGbk9rG/EoRMixuy5X8syzICcvB0dj3KindZkY0=" crossorigin="anonymous"></script>

@ -1 +1 @@
{"error": null, "issues": [{"address": 317, "contract": "Unknown", "debug": "<DEBUG-DATA>", "description": "Function %s retrieves the transaction origin (tx.origin) using the ORIGIN opcode. Use msg.sender instead.\nSee also: https://solidity.readthedocs.io/en/develop/security-considerations.html#tx-origin", "function": "transferOwnership(address)", "swc_id": "115", "title": "Use of tx.origin", "type": "Warning"}], "success": true} {"error": null, "issues": [{"address": 317, "contract": "Unknown", "debug": "<DEBUG-DATA>", "description": "The function `transferOwnership(address)` retrieves the transaction origin (tx.origin) using the ORIGIN opcode. Use msg.sender instead.\nSee also: https://solidity.readthedocs.io/en/develop/security-considerations.html#tx-origin", "function": "transferOwnership(address)", "swc_id": "115", "title": "Use of tx.origin", "type": "Warning"}], "success": true}

@ -9,5 +9,5 @@
### Description ### Description
Function %s retrieves the transaction origin (tx.origin) using the ORIGIN opcode. Use msg.sender instead. The function `transferOwnership(address)` retrieves the transaction origin (tx.origin) using the ORIGIN opcode. Use msg.sender instead.
See also: https://solidity.readthedocs.io/en/develop/security-considerations.html#tx-origin See also: https://solidity.readthedocs.io/en/develop/security-considerations.html#tx-origin

@ -4,7 +4,7 @@ Type: Warning
Contract: Unknown Contract: Unknown
Function name: transferOwnership(address) Function name: transferOwnership(address)
PC address: 317 PC address: 317
Function %s retrieves the transaction origin (tx.origin) using the ORIGIN opcode. Use msg.sender instead. The function `transferOwnership(address)` retrieves the transaction origin (tx.origin) using the ORIGIN opcode. Use msg.sender instead.
See also: https://solidity.readthedocs.io/en/develop/security-considerations.html#tx-origin See also: https://solidity.readthedocs.io/en/develop/security-considerations.html#tx-origin
-------------------- --------------------

@ -1,5 +1,7 @@
<!DOCTYPE html>
<html> <html>
<head> <head>
<title>Call Graph</title>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.css" integrity="sha256-iq5ygGJ7021Pi7H5S+QAUXCPUfaBzfqeplbg/KlEssg=" crossorigin="anonymous" /> <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.css" integrity="sha256-iq5ygGJ7021Pi7H5S+QAUXCPUfaBzfqeplbg/KlEssg=" crossorigin="anonymous" />
<script src="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.js" integrity="sha256-JuQeAGbk9rG/EoRMixuy5X8syzICcvB0dj3KindZkY0=" crossorigin="anonymous"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.js" integrity="sha256-JuQeAGbk9rG/EoRMixuy5X8syzICcvB0dj3KindZkY0=" crossorigin="anonymous"></script>

@ -1,5 +1,7 @@
<!DOCTYPE html>
<html> <html>
<head> <head>
<title>Call Graph</title>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.css" integrity="sha256-iq5ygGJ7021Pi7H5S+QAUXCPUfaBzfqeplbg/KlEssg=" crossorigin="anonymous" /> <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.css" integrity="sha256-iq5ygGJ7021Pi7H5S+QAUXCPUfaBzfqeplbg/KlEssg=" crossorigin="anonymous" />
<script src="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.js" integrity="sha256-JuQeAGbk9rG/EoRMixuy5X8syzICcvB0dj3KindZkY0=" crossorigin="anonymous"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.js" integrity="sha256-JuQeAGbk9rG/EoRMixuy5X8syzICcvB0dj3KindZkY0=" crossorigin="anonymous"></script>

@ -1,5 +1,7 @@
<!DOCTYPE html>
<html> <html>
<head> <head>
<title>Call Graph</title>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.css" integrity="sha256-iq5ygGJ7021Pi7H5S+QAUXCPUfaBzfqeplbg/KlEssg=" crossorigin="anonymous" /> <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.css" integrity="sha256-iq5ygGJ7021Pi7H5S+QAUXCPUfaBzfqeplbg/KlEssg=" crossorigin="anonymous" />
<script src="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.js" integrity="sha256-JuQeAGbk9rG/EoRMixuy5X8syzICcvB0dj3KindZkY0=" crossorigin="anonymous"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.js" integrity="sha256-JuQeAGbk9rG/EoRMixuy5X8syzICcvB0dj3KindZkY0=" crossorigin="anonymous"></script>

@ -1,5 +1,7 @@
<!DOCTYPE html>
<html> <html>
<head> <head>
<title>Call Graph</title>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.css" integrity="sha256-iq5ygGJ7021Pi7H5S+QAUXCPUfaBzfqeplbg/KlEssg=" crossorigin="anonymous" /> <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.css" integrity="sha256-iq5ygGJ7021Pi7H5S+QAUXCPUfaBzfqeplbg/KlEssg=" crossorigin="anonymous" />
<script src="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.js" integrity="sha256-JuQeAGbk9rG/EoRMixuy5X8syzICcvB0dj3KindZkY0=" crossorigin="anonymous"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/vis/4.21.0/vis.min.js" integrity="sha256-JuQeAGbk9rG/EoRMixuy5X8syzICcvB0dj3KindZkY0=" crossorigin="anonymous"></script>

Loading…
Cancel
Save