resolve merge conflicts

pull/542/head
Nikhil Parasaram 6 years ago
commit a6c48451b2
  1. 10
      README.md
  2. 5
      mythril/analysis/modules/external_calls.py
  3. 5
      mythril/analysis/modules/integer.py
  4. 8
      mythril/analysis/symbolic.py
  5. 2
      mythril/analysis/traceexplore.py
  6. 12
      mythril/ether/asm.py
  7. 13
      mythril/ether/ethcontract.py
  8. 8
      mythril/ether/evm.py
  9. 13
      mythril/ether/util.py
  10. 0
      mythril/ethereum/__init__.py
  11. 0
      mythril/ethereum/interface/__init__.py
  12. 0
      mythril/ethereum/interface/leveldb/__init__.py
  13. 24
      mythril/ethereum/interface/leveldb/accountindexing.py
  14. 112
      mythril/ethereum/interface/leveldb/client.py
  15. 18
      mythril/ethereum/interface/leveldb/eth_db.py
  16. 36
      mythril/ethereum/interface/leveldb/state.py
  17. 0
      mythril/ethereum/interface/rpc/__init__.py
  18. 28
      mythril/ethereum/interface/rpc/base_client.py
  19. 4
      mythril/ethereum/interface/rpc/client.py
  20. 0
      mythril/ethereum/interface/rpc/constants.py
  21. 0
      mythril/ethereum/interface/rpc/exceptions.py
  22. 16
      mythril/ethereum/interface/rpc/utils.py
  23. 7
      mythril/interfaces/cli.py
  24. 14
      mythril/laser/ethereum/call.py
  25. 4
      mythril/laser/ethereum/evm_exceptions.py
  26. 33
      mythril/laser/ethereum/instructions.py
  27. 4
      mythril/laser/ethereum/state.py
  28. 94
      mythril/laser/ethereum/svm.py
  29. 7
      mythril/laser/ethereum/taint_analysis.py
  30. 14
      mythril/laser/ethereum/transaction/transaction_models.py
  31. 6
      mythril/laser/ethereum/util.py
  32. 21
      mythril/mythril.py
  33. 2
      mythril/support/loader.py
  34. 17
      mythril/support/signatures.py
  35. 1
      requirements.txt
  36. 1
      setup.py
  37. 51
      tests/laser/evm_testsuite/VMTests/vmTests/suicide.json
  38. 2
      tests/laser/evm_testsuite/evm_test.py
  39. 12
      tests/native_test.py
  40. 2
      tests/rpc_test.py

@ -2,16 +2,16 @@
[![Discord](https://img.shields.io/discord/481002907366588416.svg)](https://discord.gg/E3YrVtG) [![Discord](https://img.shields.io/discord/481002907366588416.svg)](https://discord.gg/E3YrVtG)
[![PyPI](https://badge.fury.io/py/mythril.svg)](https://pypi.python.org/pypi/mythril) [![PyPI](https://badge.fury.io/py/mythril.svg)](https://pypi.python.org/pypi/mythril)
![Master Build Status](https://img.shields.io/circleci/project/github/ConsenSys/mythril/master.svg) ![Master Build Status](https://img.shields.io/circleci/project/github/ConsenSys/mythril/master.svg)
[![Waffle.io - Columns and their card count](https://badge.waffle.io/ConsenSys/mythril.svg?columns=all)](https://waffle.io/ConsenSys/mythril) [![Waffle.io - Columns and their card count](https://badge.waffle.io/ConsenSys/mythril.svg?columns=In%20Progress)](https://waffle.io/ConsenSys/mythril)
[![Sonarcloud - Maintainability](https://sonarcloud.io/api/project_badges/measure?project=mythril&metric=sqale_rating)](https://sonarcloud.io/dashboard?id=mythril) [![Sonarcloud - Maintainability](https://sonarcloud.io/api/project_badges/measure?project=mythril&metric=sqale_rating)](https://sonarcloud.io/dashboard?id=mythril)
[![PyPI Statistics](https://pypistats.com/badge/mythril.svg)](https://pypistats.com/package/mythril)
<img height="120px" align="right" src="https://github.com/ConsenSys/mythril/raw/master/static/mythril.png" alt="mythril" /> <img height="120px" align="right" src="https://github.com/ConsenSys/mythril/raw/master/static/mythril.png" alt="mythril" />
Mythril OSS is the classic security analysis tool for Ethereum smart contracts. It uses concolic analysis, taint analysis and control flow checking to detect a variety of security vulnerabilities. Mythril OSS is the classic security analysis tool for Ethereum smart contracts. It uses concolic analysis, taint analysis and control flow checking to detect a variety of security vulnerabilities.
Whether you want to contribute, need support, or want to learn what we have cooking for the future, our [Discord server](https://discord.gg/E3YrVtG) will serve your needs! Whether you want to contribute, need support, or want to learn what we have cooking for the future, our [Discord server](https://discord.gg/E3YrVtG) will serve your needs!
Oh and by the way, we're now building a whole security tools ecosystem with [Mythril Platform](https://mythril.ai). You should definitely check that out as well. Oh and by the way, we're building an easy-to-use SaaS solution and tools ecosystem for Ethereum developers called [Mythril Platform](https://mythril.ai). You should definitely check that out as well.
## Installation and setup ## Installation and setup
@ -35,6 +35,10 @@ Instructions for using the 'myth' tool are found on the [Wiki](https://github.co
For support or general discussions please join the Mythril community on [Discord](https://discord.gg/E3YrVtG). For support or general discussions please join the Mythril community on [Discord](https://discord.gg/E3YrVtG).
## Vulnerability Remediation
Visit the [Smart Contract Vulnerability Classification Registry](https://smartcontractsecurity.github.io/SWC-registry/) to find detailed information and remediation guidance for the vulnerabilities reported.
## Presentations, papers and articles ## Presentations, papers and articles
- [Analyzing Ethereum Smart Contracts for Vulnerabilities](https://hackernoon.com/scanning-ethereum-smart-contracts-for-vulnerabilities-b5caefd995df) - [Analyzing Ethereum Smart Contracts for Vulnerabilities](https://hackernoon.com/scanning-ethereum-smart-contracts-for-vulnerabilities-b5caefd995df)

@ -16,8 +16,9 @@ Check for call.value()() to external addresses
MAX_SEARCH_DEPTH = 64 MAX_SEARCH_DEPTH = 64
def search_children(statespace, node, start_index=0, depth=0, results=[]): def search_children(statespace, node, start_index=0, depth=0, results=None):
if results is None:
results = []
logging.debug("SEARCHING NODE %d", node.uid) logging.debug("SEARCHING NODE %d", node.uid)
if depth < MAX_SEARCH_DEPTH: if depth < MAX_SEARCH_DEPTH:

@ -214,7 +214,7 @@ def _check_sstore(state, taint_result):
return taint_result.check(state, -2) return taint_result.check(state, -2)
def _search_children(statespace, node, expression, taint_result=None, constraint=[], index=0, depth=0, max_depth=64): def _search_children(statespace, node, expression, taint_result=None, constraint=None, index=0, depth=0, max_depth=64):
""" """
Checks the statespace for children states, with JUMPI or SSTORE instuctions, Checks the statespace for children states, with JUMPI or SSTORE instuctions,
for dependency on expression for dependency on expression
@ -227,6 +227,9 @@ def _search_children(statespace, node, expression, taint_result=None, constraint
:param max_depth: Max depth to explore :param max_depth: Max depth to explore
:return: List of states that match the opcodes and are dependent on expression :return: List of states that match the opcodes and are dependent on expression
""" """
if constraint is None:
constraint = []
logging.debug("SEARCHING NODE for usage of an overflowed variable %d", node.uid) logging.debug("SEARCHING NODE for usage of an overflowed variable %d", node.uid)
if taint_result is None: if taint_result is None:

@ -9,9 +9,9 @@ from mythril.laser.ethereum.strategy.basic import DepthFirstSearchStrategy, Brea
class SymExecWrapper: class SymExecWrapper:
''' """
Wrapper class for the LASER Symbolic virtual machine. Symbolically executes the code and does a bit of pre-analysis for convenience. Wrapper class for the LASER Symbolic virtual machine. Symbolically executes the code and does a bit of pre-analysis for convenience.
''' """
def __init__(self, contract, address, strategy, dynloader=None, max_depth=22, def __init__(self, contract, address, strategy, dynloader=None, max_depth=22,
execution_timeout=None, create_timeout=None): execution_timeout=None, create_timeout=None):
@ -67,7 +67,7 @@ class SymExecWrapper:
# ignore prebuilts # ignore prebuilts
continue continue
if (meminstart.type == VarType.CONCRETE and meminsz.type == VarType.CONCRETE): if meminstart.type == VarType.CONCRETE and meminsz.type == VarType.CONCRETE:
self.calls.append(Call(self.nodes[key], state, state_index, op, to, gas, value, state.mstate.memory[meminstart.val:meminsz.val * 4])) self.calls.append(Call(self.nodes[key], state, state_index, op, to, gas, value, state.mstate.memory[meminstart.val:meminsz.val * 4]))
else: else:
self.calls.append(Call(self.nodes[key], state, state_index, op, to, gas, value)) self.calls.append(Call(self.nodes[key], state, state_index, op, to, gas, value))
@ -105,7 +105,7 @@ class SymExecWrapper:
taint = True taint = True
for constraint in s.node.constraints: for constraint in s.node.constraints:
if ("caller" in str(constraint)): if "caller" in str(constraint):
taint = False taint = False
break break

@ -81,7 +81,7 @@ def get_serializable_statespace(statespace):
for edge in statespace.edges: for edge in statespace.edges:
if (edge.condition is None): if edge.condition is None:
label = "" label = ""
else: else:

@ -42,9 +42,7 @@ def easm_to_instruction_list(easm):
# Invalid code line # Invalid code line
continue continue
instruction = {} instruction = {'opcode': m.group(1)}
instruction['opcode'] = m.group(1)
if m.group(2): if m.group(2):
instruction['argument'] = m.group(2)[2:] instruction['argument'] = m.group(2)[2:]
@ -82,7 +80,7 @@ def find_opcode_sequence(pattern, instruction_list):
matched = False matched = False
break break
if (matched): if matched:
match_indexes.append(i) match_indexes.append(i)
return match_indexes return match_indexes
@ -101,12 +99,10 @@ def disassemble(bytecode):
while addr < length: while addr < length:
instruction = {} instruction = {'address': addr}
instruction['address'] = addr
try: try:
if (sys.version_info > (3, 0)): if sys.version_info > (3, 0):
opcode = opcodes[bytecode[addr]] opcode = opcodes[bytecode[addr]]
else: else:
opcode = opcodes[ord(bytecode[addr])] opcode = opcodes[ord(bytecode[addr])]

@ -7,15 +7,16 @@ import re
class ETHContract(persistent.Persistent): class ETHContract(persistent.Persistent):
def __init__(self, code, creation_code="", name="Unknown", enable_online_lookup=True): def __init__(self, code, creation_code="", name="Unknown", enable_online_lookup=True):
self.creation_code = creation_code
self.name = name
# Workaround: We currently do not support compile-time linking. # Workaround: We currently do not support compile-time linking.
# Dynamic contract addresses of the format __[contract-name]_____________ are replaced with a generic address # Dynamic contract addresses of the format __[contract-name]_____________ are replaced with a generic address
# Apply this for creation_code & code
creation_code = re.sub(r'(_+.*_+)', 'aa' * 20, creation_code)
code = re.sub(r'(_+.*_+)', 'aa' * 20, code) code = re.sub(r'(_+.*_+)', 'aa' * 20, code)
self.creation_code = creation_code
self.name = name
self.code = code self.code = code
self.disassembly = Disassembly(code, enable_online_lookup=enable_online_lookup) self.disassembly = Disassembly(code, enable_online_lookup=enable_online_lookup)
self.creation_disassembly = Disassembly(creation_code, enable_online_lookup=enable_online_lookup) self.creation_disassembly = Disassembly(creation_code, enable_online_lookup=enable_online_lookup)
@ -49,7 +50,7 @@ class ETHContract(persistent.Persistent):
m = re.match(r'^code#([a-zA-Z0-9\s,\[\]]+)#', token) m = re.match(r'^code#([a-zA-Z0-9\s,\[\]]+)#', token)
if (m): if m:
if easm_code is None: if easm_code is None:
easm_code = self.get_easm() easm_code = self.get_easm()
@ -59,7 +60,7 @@ class ETHContract(persistent.Persistent):
m = re.match(r'^func#([a-zA-Z0-9\s_,(\\)\[\]]+)#$', token) m = re.match(r'^func#([a-zA-Z0-9\s_,(\\)\[\]]+)#$', token)
if (m): if m:
sign_hash = "0x" + utils.sha3(m.group(1))[:4].hex() sign_hash = "0x" + utils.sha3(m.group(1))[:4].hex()

@ -47,13 +47,13 @@ def trace(code, calldata = ""):
m = re.match(r'.*stack=(\[.*?\])', line) m = re.match(r'.*stack=(\[.*?\])', line)
if (m): if m:
stackitems = re.findall(r'b\'(\d+)\'', m.group(1)) stackitems = re.findall(r'b\'(\d+)\'', m.group(1))
stack = "["; stack = "["
if (len(stackitems)): if len(stackitems):
for i in range(0, len(stackitems) - 1): for i in range(0, len(stackitems) - 1):
stack += hex(int(stackitems[i])) + ", " stack += hex(int(stackitems[i])) + ", "
@ -65,7 +65,7 @@ def trace(code, calldata = ""):
else: else:
stack = "[]" stack = "[]"
if (re.match(r'^PUSH.*', op)): if re.match(r'^PUSH.*', op):
val = re.search(r'pushvalue=(\d+)', line).group(1) val = re.search(r'pushvalue=(\d+)', line).group(1)
pushvalue = hex(int(val)) pushvalue = hex(int(val))
trace.append({'pc': pc, 'op': op, 'stack': stack, 'pushvalue': pushvalue}) trace.append({'pc': pc, 'op': op, 'stack': stack, 'pushvalue': pushvalue})

@ -10,7 +10,7 @@ import json
def safe_decode(hex_encoded_string): def safe_decode(hex_encoded_string):
if (hex_encoded_string.startswith("0x")): if hex_encoded_string.startswith("0x"):
return bytes.fromhex(hex_encoded_string[2:]) return bytes.fromhex(hex_encoded_string[2:])
else: else:
return bytes.fromhex(hex_encoded_string) return bytes.fromhex(hex_encoded_string)
@ -18,10 +18,17 @@ def safe_decode(hex_encoded_string):
def get_solc_json(file, solc_binary="solc", solc_args=None): def get_solc_json(file, solc_binary="solc", solc_args=None):
cmd = [solc_binary, "--combined-json", "bin,bin-runtime,srcmap,srcmap-runtime", '--allow-paths', "."] cmd = [solc_binary, "--combined-json", "bin,bin-runtime,srcmap,srcmap-runtime"]
if solc_args: if solc_args:
cmd.extend(solc_args.split(" ")) cmd.extend(solc_args.split())
if not "--allow-paths" in cmd:
cmd.extend(["--allow-paths", "."])
else:
for i, arg in enumerate(cmd):
if arg == "--allow-paths":
cmd[i + 1] += ",."
cmd.append(file) cmd.append(file)

@ -34,9 +34,9 @@ class CountableList(object):
class ReceiptForStorage(rlp.Serializable): class ReceiptForStorage(rlp.Serializable):
''' """
Receipt format stored in levelDB Receipt format stored in levelDB
''' """
fields = [ fields = [
('state_root', binary), ('state_root', binary),
@ -50,9 +50,9 @@ class ReceiptForStorage(rlp.Serializable):
class AccountIndexer(object): class AccountIndexer(object):
''' """
Updates address index Updates address index
''' """
def __init__(self, ethDB): def __init__(self, ethDB):
self.db = ethDB self.db = ethDB
@ -62,9 +62,9 @@ class AccountIndexer(object):
self.updateIfNeeded() self.updateIfNeeded()
def get_contract_by_hash(self, contract_hash): def get_contract_by_hash(self, contract_hash):
''' """
get mapped address by its hash, if not found try indexing get mapped address by its hash, if not found try indexing
''' """
address = self.db.reader._get_address_by_hash(contract_hash) address = self.db.reader._get_address_by_hash(contract_hash)
if address is not None: if address is not None:
return address return address
@ -74,9 +74,9 @@ class AccountIndexer(object):
return self.db.reader._get_address_by_hash(contract_hash) return self.db.reader._get_address_by_hash(contract_hash)
def _process(self, startblock): def _process(self, startblock):
''' """
Processesing method Processesing method
''' """
logging.debug("Processing blocks %d to %d" % (startblock, startblock + BATCH_SIZE)) logging.debug("Processing blocks %d to %d" % (startblock, startblock + BATCH_SIZE))
addresses = [] addresses = []
@ -96,9 +96,9 @@ class AccountIndexer(object):
return addresses return addresses
def updateIfNeeded(self): def updateIfNeeded(self):
''' """
update address index update address index
''' """
headBlock = self.db.reader._get_head_block() headBlock = self.db.reader._get_head_block()
if headBlock is not None: if headBlock is not None:
# avoid restarting search if head block is same & we already initialized # avoid restarting search if head block is same & we already initialized
@ -128,7 +128,7 @@ class AccountIndexer(object):
count = 0 count = 0
processed = 0 processed = 0
while (blockNum <= self.lastBlock): while blockNum <= self.lastBlock:
# leveldb cannot be accessed on multiple processes (not even readonly) # leveldb cannot be accessed on multiple processes (not even readonly)
# multithread version performs significantly worse than serial # multithread version performs significantly worse than serial
try: try:
@ -154,4 +154,4 @@ class AccountIndexer(object):
self.db.writer._set_last_indexed_number(self.lastProcessedBlock) self.db.writer._set_last_indexed_number(self.lastProcessedBlock)
print("Finished indexing") print("Finished indexing")
self.lastBlock = self.lastProcessedBlock self.lastBlock = self.lastProcessedBlock

@ -1,12 +1,12 @@
import binascii import binascii
import rlp import rlp
from mythril.leveldb.accountindexing import CountableList from mythril.ethereum.interface.leveldb.accountindexing import CountableList
from mythril.leveldb.accountindexing import ReceiptForStorage, AccountIndexer from mythril.ethereum.interface.leveldb.accountindexing import ReceiptForStorage, AccountIndexer
import logging import logging
from ethereum import utils from ethereum import utils
from ethereum.block import BlockHeader, Block from ethereum.block import BlockHeader, Block
from mythril.leveldb.state import State from mythril.ethereum.interface.leveldb.state import State
from mythril.leveldb.eth_db import ETH_DB from mythril.ethereum.interface.leveldb.eth_db import ETH_DB
from mythril.ether.ethcontract import ETHContract from mythril.ether.ethcontract import ETHContract
from mythril.exceptions import AddressNotFoundError from mythril.exceptions import AddressNotFoundError
@ -26,23 +26,23 @@ address_mapping_head_key = b'accountMapping' # head (latest) number of indexed
def _format_block_number(number): def _format_block_number(number):
''' """
formats block number to uint64 big endian formats block number to uint64 big endian
''' """
return utils.zpad(utils.int_to_big_endian(number), 8) return utils.zpad(utils.int_to_big_endian(number), 8)
def _encode_hex(v): def _encode_hex(v):
''' """
encodes hash as hex encodes hash as hex
''' """
return '0x' + utils.encode_hex(v) return '0x' + utils.encode_hex(v)
class LevelDBReader(object): class LevelDBReader(object):
''' """
level db reading interface, can be used with snapshot level db reading interface, can be used with snapshot
''' """
def __init__(self, db): def __init__(self, db):
self.db = db self.db = db
@ -50,34 +50,34 @@ class LevelDBReader(object):
self.head_state = None self.head_state = None
def _get_head_state(self): def _get_head_state(self):
''' """
gets head state gets head state
''' """
if not self.head_state: if not self.head_state:
root = self._get_head_block().state_root root = self._get_head_block().state_root
self.head_state = State(self.db, root) self.head_state = State(self.db, root)
return self.head_state return self.head_state
def _get_account(self, address): def _get_account(self, address):
''' """
gets account by address gets account by address
''' """
state = self._get_head_state() state = self._get_head_state()
account_address = binascii.a2b_hex(utils.remove_0x_head(address)) account_address = binascii.a2b_hex(utils.remove_0x_head(address))
return state.get_and_cache_account(account_address) return state.get_and_cache_account(account_address)
def _get_block_hash(self, number): def _get_block_hash(self, number):
''' """
gets block hash by block number gets block hash by block number
''' """
num = _format_block_number(number) num = _format_block_number(number)
hash_key = header_prefix + num + num_suffix hash_key = header_prefix + num + num_suffix
return self.db.get(hash_key) return self.db.get(hash_key)
def _get_head_block(self): def _get_head_block(self):
''' """
gets head block header gets head block header
''' """
if not self.head_block_header: if not self.head_block_header:
hash = self.db.get(head_header_key) hash = self.db.get(head_header_key)
num = self._get_block_number(hash) num = self._get_block_number(hash)
@ -91,38 +91,38 @@ class LevelDBReader(object):
return self.head_block_header return self.head_block_header
def _get_block_number(self, hash): def _get_block_number(self, hash):
''' """
gets block number by hash gets block number by hash
''' """
number_key = block_hash_prefix + hash number_key = block_hash_prefix + hash
return self.db.get(number_key) return self.db.get(number_key)
def _get_block_header(self, hash, num): def _get_block_header(self, hash, num):
''' """
get block header by block header hash & number get block header by block header hash & number
''' """
header_key = header_prefix + num + hash header_key = header_prefix + num + hash
block_header_data = self.db.get(header_key) block_header_data = self.db.get(header_key)
header = rlp.decode(block_header_data, sedes=BlockHeader) header = rlp.decode(block_header_data, sedes=BlockHeader)
return header return header
def _get_address_by_hash(self, hash): def _get_address_by_hash(self, hash):
''' """
get mapped address by its hash get mapped address by its hash
''' """
address_key = address_prefix + hash address_key = address_prefix + hash
return self.db.get(address_key) return self.db.get(address_key)
def _get_last_indexed_number(self): def _get_last_indexed_number(self):
''' """
latest indexed block number latest indexed block number
''' """
return self.db.get(address_mapping_head_key) return self.db.get(address_mapping_head_key)
def _get_block_receipts(self, hash, num): def _get_block_receipts(self, hash, num):
''' """
get block transaction receipts by block header hash & number get block transaction receipts by block header hash & number
''' """
number = _format_block_number(num) number = _format_block_number(num)
receipts_key = block_receipts_prefix + number + hash receipts_key = block_receipts_prefix + number + hash
receipts_data = self.db.get(receipts_key) receipts_data = self.db.get(receipts_key)
@ -131,44 +131,44 @@ class LevelDBReader(object):
class LevelDBWriter(object): class LevelDBWriter(object):
''' """
level db writing interface level db writing interface
''' """
def __init__(self, db): def __init__(self, db):
self.db = db self.db = db
self.wb = None self.wb = None
def _set_last_indexed_number(self, number): def _set_last_indexed_number(self, number):
''' """
sets latest indexed block number sets latest indexed block number
''' """
return self.db.put(address_mapping_head_key, _format_block_number(number)) return self.db.put(address_mapping_head_key, _format_block_number(number))
def _start_writing(self): def _start_writing(self):
''' """
start writing a batch start writing a batch
''' """
self.wb = self.db.write_batch() self.wb = self.db.write_batch()
def _commit_batch(self): def _commit_batch(self):
''' """
commit batch commit batch
''' """
self.wb.write() self.wb.write()
def _store_account_address(self, address): def _store_account_address(self, address):
''' """
get block transaction receipts by block header hash & number get block transaction receipts by block header hash & number
''' """
address_key = address_prefix + utils.sha3(address) address_key = address_prefix + utils.sha3(address)
self.wb.put(address_key, address) self.wb.put(address_key, address)
class EthLevelDB(object): class EthLevelDB(object):
''' """
Go-Ethereum LevelDB client class Go-Ethereum LevelDB client class
''' """
def __init__(self, path): def __init__(self, path):
self.path = path self.path = path
@ -177,9 +177,9 @@ class EthLevelDB(object):
self.writer = LevelDBWriter(self.db) self.writer = LevelDBWriter(self.db)
def get_contracts(self): def get_contracts(self):
''' """
iterate through all contracts iterate through all contracts
''' """
for account in self.reader._get_head_state().get_all_accounts(): for account in self.reader._get_head_state().get_all_accounts():
if account.code is not None: if account.code is not None:
code = _encode_hex(account.code) code = _encode_hex(account.code)
@ -188,9 +188,9 @@ class EthLevelDB(object):
yield contract, account.address, account.balance yield contract, account.address, account.balance
def search(self, expression, callback_func): def search(self, expression, callback_func):
''' """
searches through all contract accounts searches through all contract accounts
''' """
cnt = 0 cnt = 0
indexer = AccountIndexer(self) indexer = AccountIndexer(self)
@ -217,9 +217,9 @@ class EthLevelDB(object):
logging.info("Searched %d contracts" % cnt) logging.info("Searched %d contracts" % cnt)
def contract_hash_to_address(self, hash): def contract_hash_to_address(self, hash):
''' """
tries to find corresponding account address tries to find corresponding account address
''' """
address_hash = binascii.a2b_hex(utils.remove_0x_head(hash)) address_hash = binascii.a2b_hex(utils.remove_0x_head(hash))
indexer = AccountIndexer(self) indexer = AccountIndexer(self)
@ -227,17 +227,17 @@ class EthLevelDB(object):
return _encode_hex(indexer.get_contract_by_hash(address_hash)) return _encode_hex(indexer.get_contract_by_hash(address_hash))
def eth_getBlockHeaderByNumber(self, number): def eth_getBlockHeaderByNumber(self, number):
''' """
gets block header by block number gets block header by block number
''' """
hash = self.reader._get_block_hash(number) hash = self.reader._get_block_hash(number)
block_number = _format_block_number(number) block_number = _format_block_number(number)
return self.reader._get_block_header(hash, block_number) return self.reader._get_block_header(hash, block_number)
def eth_getBlockByNumber(self, number): def eth_getBlockByNumber(self, number):
''' """
gets block body by block number gets block body by block number
''' """
block_hash = self.reader._get_block_hash(number) block_hash = self.reader._get_block_hash(number)
block_number = _format_block_number(number) block_number = _format_block_number(number)
body_key = body_prefix + block_number + block_hash body_key = body_prefix + block_number + block_hash
@ -246,22 +246,22 @@ class EthLevelDB(object):
return body return body
def eth_getCode(self, address): def eth_getCode(self, address):
''' """
gets account code gets account code
''' """
account = self.reader._get_account(address) account = self.reader._get_account(address)
return _encode_hex(account.code) return _encode_hex(account.code)
def eth_getBalance(self, address): def eth_getBalance(self, address):
''' """
gets account balance gets account balance
''' """
account = self.reader._get_account(address) account = self.reader._get_account(address)
return account.balance return account.balance
def eth_getStorageAt(self, address, position): def eth_getStorageAt(self, address, position):
''' """
gets account storage data at position gets account storage data at position
''' """
account = self.reader._get_account(address) account = self.reader._get_account(address)
return _encode_hex(utils.zpad(utils.encode_int(account.get_storage_data(position)), 32)) return _encode_hex(utils.zpad(utils.encode_int(account.get_storage_data(position)), 32))

@ -3,27 +3,27 @@ from ethereum.db import BaseDB
class ETH_DB(BaseDB): class ETH_DB(BaseDB):
''' """
adopts pythereum BaseDB using plyvel adopts pythereum BaseDB using plyvel
''' """
def __init__(self, path): def __init__(self, path):
self.db = plyvel.DB(path) self.db = plyvel.DB(path)
def get(self, key): def get(self, key):
''' """
gets value for key gets value for key
''' """
return self.db.get(key) return self.db.get(key)
def put(self, key, value): def put(self, key, value):
''' """
puts value for key puts value for key
''' """
self.db.put(key, value) self.db.put(key, value)
def write_batch(self): def write_batch(self):
''' """
start writing a batch start writing a batch
''' """
return self.db.write_batch() return self.db.write_batch()

@ -32,9 +32,9 @@ STATE_DEFAULTS = {
class Account(rlp.Serializable): class Account(rlp.Serializable):
''' """
adjusted account from ethereum.state adjusted account from ethereum.state
''' """
fields = [ fields = [
('nonce', big_endian_int), ('nonce', big_endian_int),
@ -57,15 +57,15 @@ class Account(rlp.Serializable):
@property @property
def code(self): def code(self):
''' """
code rlp data code rlp data
''' """
return self.db.get(self.code_hash) return self.db.get(self.code_hash)
def get_storage_data(self, key): def get_storage_data(self, key):
''' """
get storage data get storage data
''' """
if key not in self.storage_cache: if key not in self.storage_cache:
v = self.storage_trie.get(utils.encode_int32(key)) v = self.storage_trie.get(utils.encode_int32(key))
self.storage_cache[key] = utils.big_endian_to_int( self.storage_cache[key] = utils.big_endian_to_int(
@ -74,24 +74,24 @@ class Account(rlp.Serializable):
@classmethod @classmethod
def blank_account(cls, db, address, initial_nonce=0): def blank_account(cls, db, address, initial_nonce=0):
''' """
creates a blank account creates a blank account
''' """
db.put(BLANK_HASH, b'') db.put(BLANK_HASH, b'')
o = cls(initial_nonce, 0, trie.BLANK_ROOT, BLANK_HASH, db, address) o = cls(initial_nonce, 0, trie.BLANK_ROOT, BLANK_HASH, db, address)
o.existent_at_start = False o.existent_at_start = False
return o return o
def is_blank(self): def is_blank(self):
''' """
checks if is a blank account checks if is a blank account
''' """
return self.nonce == 0 and self.balance == 0 and self.code_hash == BLANK_HASH return self.nonce == 0 and self.balance == 0 and self.code_hash == BLANK_HASH
class State(): class State:
''' """
adjusted state from ethereum.state adjusted state from ethereum.state
''' """
def __init__(self, db, root): def __init__(self, db, root):
self.db = db self.db = db
@ -101,9 +101,9 @@ class State():
self.cache = {} self.cache = {}
def get_and_cache_account(self, address): def get_and_cache_account(self, address):
''' """
gets and caches an account for an addres, creates blank if not found gets and caches an account for an addres, creates blank if not found
''' """
if address in self.cache: if address in self.cache:
return self.cache[address] return self.cache[address]
rlpdata = self.secure_trie.get(address) rlpdata = self.secure_trie.get(address)
@ -120,9 +120,9 @@ class State():
return o return o
def get_all_accounts(self): def get_all_accounts(self):
''' """
iterates through trie to and yields non-blank leafs as accounts iterates through trie to and yields non-blank leafs as accounts
''' """
for address_hash, rlpdata in self.secure_trie.trie.iter_branch(): for address_hash, rlpdata in self.secure_trie.trie.iter_branch():
if rlpdata != trie.BLANK_NODE: if rlpdata != trie.BLANK_NODE:
yield rlp.decode(rlpdata, Account, db=self.db, address=address_hash) yield rlp.decode(rlpdata, Account, db=self.db, address=address_hash)

@ -20,64 +20,64 @@ class BaseClient(object):
pass pass
def eth_coinbase(self): def eth_coinbase(self):
''' """
https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_coinbase https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_coinbase
TESTED TESTED
''' """
return self._call('eth_coinbase') return self._call('eth_coinbase')
def eth_blockNumber(self): def eth_blockNumber(self):
''' """
https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_blocknumber https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_blocknumber
TESTED TESTED
''' """
return hex_to_dec(self._call('eth_blockNumber')) return hex_to_dec(self._call('eth_blockNumber'))
def eth_getBalance(self, address=None, block=BLOCK_TAG_LATEST): def eth_getBalance(self, address=None, block=BLOCK_TAG_LATEST):
''' """
https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getbalance https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getbalance
TESTED TESTED
''' """
address = address or self.eth_coinbase() address = address or self.eth_coinbase()
block = validate_block(block) block = validate_block(block)
return hex_to_dec(self._call('eth_getBalance', [address, block])) return hex_to_dec(self._call('eth_getBalance', [address, block]))
def eth_getStorageAt(self, address=None, position=0, block=BLOCK_TAG_LATEST): def eth_getStorageAt(self, address=None, position=0, block=BLOCK_TAG_LATEST):
''' """
https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getstorageat https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getstorageat
TESTED TESTED
''' """
block = validate_block(block) block = validate_block(block)
return self._call('eth_getStorageAt', [address, hex(position), block]) return self._call('eth_getStorageAt', [address, hex(position), block])
def eth_getCode(self, address, default_block=BLOCK_TAG_LATEST): def eth_getCode(self, address, default_block=BLOCK_TAG_LATEST):
''' """
https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getcode https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getcode
NEEDS TESTING NEEDS TESTING
''' """
if isinstance(default_block, str): if isinstance(default_block, str):
if default_block not in BLOCK_TAGS: if default_block not in BLOCK_TAGS:
raise ValueError raise ValueError
return self._call('eth_getCode', [address, default_block]) return self._call('eth_getCode', [address, default_block])
def eth_getBlockByNumber(self, block=BLOCK_TAG_LATEST, tx_objects=True): def eth_getBlockByNumber(self, block=BLOCK_TAG_LATEST, tx_objects=True):
''' """
https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getblockbynumber https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getblockbynumber
TESTED TESTED
''' """
block = validate_block(block) block = validate_block(block)
return self._call('eth_getBlockByNumber', [block, tx_objects]) return self._call('eth_getBlockByNumber', [block, tx_objects])
def eth_getTransactionReceipt(self, tx_hash): def eth_getTransactionReceipt(self, tx_hash):
''' """
https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_gettransactionreceipt https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_gettransactionreceipt
TESTED TESTED
''' """
return self._call('eth_getTransactionReceipt', [tx_hash]) return self._call('eth_getTransactionReceipt', [tx_hash])

@ -17,9 +17,9 @@ JSON_MEDIA_TYPE = 'application/json'
This code is adapted from: https://github.com/ConsenSys/ethjsonrpc This code is adapted from: https://github.com/ConsenSys/ethjsonrpc
''' '''
class EthJsonRpc(BaseClient): class EthJsonRpc(BaseClient):
''' """
Ethereum JSON-RPC client class Ethereum JSON-RPC client class
''' """
def __init__(self, host='localhost', port=GETH_DEFAULT_RPC_PORT, tls=False): def __init__(self, host='localhost', port=GETH_DEFAULT_RPC_PORT, tls=False):
self.host = host self.host = host

@ -2,17 +2,17 @@ from .constants import BLOCK_TAGS
def hex_to_dec(x): def hex_to_dec(x):
''' """
Convert hex to decimal Convert hex to decimal
''' """
return int(x, 16) return int(x, 16)
def clean_hex(d): def clean_hex(d):
''' """
Convert decimal to hex and remove the "L" suffix that is appended to large Convert decimal to hex and remove the "L" suffix that is appended to large
numbers numbers
''' """
return hex(d).rstrip('L') return hex(d).rstrip('L')
def validate_block(block): def validate_block(block):
@ -25,14 +25,14 @@ def validate_block(block):
def wei_to_ether(wei): def wei_to_ether(wei):
''' """
Convert wei to ether Convert wei to ether
''' """
return 1.0 * wei / 10**18 return 1.0 * wei / 10**18
def ether_to_wei(ether): def ether_to_wei(ether):
''' """
Convert ether to wei Convert ether to wei
''' """
return ether * 10**18 return ether * 10**18

@ -5,7 +5,7 @@
http://www.github.com/ConsenSys/mythril http://www.github.com/ConsenSys/mythril
""" """
import logging import logging, coloredlogs
import json import json
import sys import sys
import argparse import argparse
@ -103,7 +103,10 @@ def main():
if args.v: if args.v:
if 0 <= args.v < 3: if 0 <= args.v < 3:
logging.basicConfig(level=[logging.NOTSET, logging.INFO, logging.DEBUG][args.v]) coloredlogs.install(
fmt='%(name)s[%(process)d] %(levelname)s %(message)s',
level=[logging.NOTSET, logging.INFO, logging.DEBUG][args.v]
)
else: else:
exit_with_error(args.outform, "Invalid -v value, you can find valid values in usage") exit_with_error(args.outform, "Invalid -v value, you can find valid values in usage")

@ -49,7 +49,7 @@ def get_callee_address(global_state:GlobalState, dynamic_loader: DynLoader, symb
try: try:
callee_address = hex(util.get_concrete_int(symbolic_to_address)) callee_address = hex(util.get_concrete_int(symbolic_to_address))
except AttributeError: except AttributeError:
logging.info("Symbolic call encountered") logging.debug("Symbolic call encountered")
match = re.search(r'storage_(\d+)', str(simplify(symbolic_to_address))) match = re.search(r'storage_(\d+)', str(simplify(symbolic_to_address)))
logging.debug("CALL to: " + str(simplify(symbolic_to_address))) logging.debug("CALL to: " + str(simplify(symbolic_to_address)))
@ -58,7 +58,7 @@ def get_callee_address(global_state:GlobalState, dynamic_loader: DynLoader, symb
raise ValueError() raise ValueError()
index = int(match.group(1)) index = int(match.group(1))
logging.info("Dynamic contract address at storage index {}".format(index)) logging.debug("Dynamic contract address at storage index {}".format(index))
# attempt to read the contract address from instance storage # attempt to read the contract address from instance storage
try: try:
@ -90,22 +90,22 @@ def get_callee_account(global_state, callee_address, dynamic_loader):
return global_state.accounts[callee_address] return global_state.accounts[callee_address]
except KeyError: except KeyError:
# We have a valid call address, but contract is not in the modules list # We have a valid call address, but contract is not in the modules list
logging.info("Module with address " + callee_address + " not loaded.") logging.debug("Module with address " + callee_address + " not loaded.")
if dynamic_loader is None: if dynamic_loader is None:
raise ValueError() raise ValueError()
logging.info("Attempting to load dependency") logging.debug("Attempting to load dependency")
try: try:
code = dynamic_loader.dynld(environment.active_account.address, callee_address) code = dynamic_loader.dynld(environment.active_account.address, callee_address)
except Exception as e: except Exception as e:
logging.info("Unable to execute dynamic loader.") logging.debug("Unable to execute dynamic loader.")
raise ValueError() raise ValueError()
if code is None: if code is None:
logging.info("No code returned, not a contract account?") logging.debug("No code returned, not a contract account?")
raise ValueError() raise ValueError()
logging.info("Dependency loaded: " + callee_address) logging.debug("Dependency loaded: " + callee_address)
callee_account = Account(callee_address, code, callee_address, dynamic_loader=dynamic_loader) callee_account = Account(callee_address, code, callee_address, dynamic_loader=dynamic_loader)
accounts[callee_address] = callee_account accounts[callee_address] = callee_account

@ -12,3 +12,7 @@ class StackOverflowException(VmException):
class InvalidJumpDestination(VmException): class InvalidJumpDestination(VmException):
pass pass
class InvalidInstruction(VmException):
pass

@ -6,15 +6,16 @@ from ethereum import utils
from z3 import Extract, UDiv, simplify, Concat, ULT, UGT, BitVecNumRef, Not, \ from z3 import Extract, UDiv, simplify, Concat, ULT, UGT, BitVecNumRef, Not, \
is_false, is_expr, ExprRef, URem, SRem, BitVec, Solver, is_true, BitVecVal, If, BoolRef, Or is_false, is_expr, ExprRef, URem, SRem, BitVec, Solver, is_true, BitVecVal, If, BoolRef, Or
import mythril.laser.ethereum.natives as natives
import mythril.laser.ethereum.util as helper import mythril.laser.ethereum.util as helper
from mythril.laser.ethereum import util from mythril.laser.ethereum import util
from mythril.laser.ethereum.call import get_call_parameters from mythril.laser.ethereum.call import get_call_parameters
from mythril.laser.ethereum.evm_exceptions import VmException, StackUnderflowException, InvalidJumpDestination, \
InvalidInstruction
from mythril.laser.ethereum.keccak import KeccakFunctionManager
from mythril.laser.ethereum.state import GlobalState, CalldataType from mythril.laser.ethereum.state import GlobalState, CalldataType
import mythril.laser.ethereum.natives as natives
from mythril.laser.ethereum.transaction import MessageCallTransaction, TransactionStartSignal, \ from mythril.laser.ethereum.transaction import MessageCallTransaction, TransactionStartSignal, \
ContractCreationTransaction ContractCreationTransaction
from mythril.laser.ethereum.evm_exceptions import VmException, StackUnderflowException, InvalidJumpDestination
from mythril.laser.ethereum.keccak import KeccakFunctionManager
TT256 = 2 ** 256 TT256 = 2 ** 256
TT256M1 = 2 ** 256 - 1 TT256M1 = 2 ** 256 - 1
@ -769,7 +770,8 @@ class Instruction:
return self._sload_helper(global_state, str(index)) return self._sload_helper(global_state, str(index))
def _sload_helper(self, global_state, index, constraints=None): @staticmethod
def _sload_helper(global_state, index, constraints=None):
try: try:
data = global_state.environment.active_account.storage[index] data = global_state.environment.active_account.storage[index]
except KeyError: except KeyError:
@ -782,8 +784,8 @@ class Instruction:
global_state.mstate.stack.append(data) global_state.mstate.stack.append(data)
return [global_state] return [global_state]
@staticmethod
def _get_constraints(self, keccak_keys, this_key, argument): def _get_constraints(keccak_keys, this_key, argument):
global keccak_function_manager global keccak_function_manager
for keccak_key in keccak_keys: for keccak_key in keccak_keys:
if keccak_key == this_key: if keccak_key == this_key:
@ -833,7 +835,8 @@ class Instruction:
return self._sstore_helper(global_state, str(index), value) return self._sstore_helper(global_state, str(index), value)
def _sstore_helper(self, global_state, index, value, constraint=None): @staticmethod
def _sstore_helper(global_state, index, value, constraint=None):
try: try:
global_state.environment.active_account = deepcopy(global_state.environment.active_account) global_state.environment.active_account = deepcopy(global_state.environment.active_account)
global_state.accounts[ global_state.accounts[
@ -990,15 +993,23 @@ class Instruction:
@StateTransition() @StateTransition()
def revert_(self, global_state): def revert_(self, global_state):
return [] state = global_state.mstate
offset, length = state.stack.pop(), state.stack.pop()
return_data = [global_state.new_bitvec("return_data", 256)]
try:
return_data = state.memory[util.get_concrete_int(offset):util.get_concrete_int(offset + length)]
except AttributeError:
logging.debug("Return with symbolic length or offset. Not supported")
global_state.current_transaction.end(global_state, return_data=return_data, revert=True)
@StateTransition() @StateTransition()
def assert_fail_(self, global_state): def assert_fail_(self, global_state):
return [] # 0xfe: designated invalid opcode
raise InvalidInstruction
@StateTransition() @StateTransition()
def invalid_(self, global_state): def invalid_(self, global_state):
return [] raise InvalidInstruction
@StateTransition() @StateTransition()
def stop_(self, global_state): def stop_(self, global_state):
@ -1014,7 +1025,7 @@ class Instruction:
callee_address, callee_account, call_data, value, call_data_type, gas, memory_out_offset, memory_out_size = get_call_parameters( callee_address, callee_account, call_data, value, call_data_type, gas, memory_out_offset, memory_out_size = get_call_parameters(
global_state, self.dynamic_loader, True) global_state, self.dynamic_loader, True)
except ValueError as e: except ValueError as e:
logging.info( logging.debug(
"Could not determine required parameters for call, putting fresh symbol on the stack. \n{}".format(e) "Could not determine required parameters for call, putting fresh symbol on the stack. \n{}".format(e)
) )
# TODO: decide what to do in this case # TODO: decide what to do in this case

@ -135,7 +135,9 @@ class MachineStack(list):
""" """
STACK_LIMIT = 1024 STACK_LIMIT = 1024
def __init__(self, default_list=[]): def __init__(self, default_list=None):
if default_list is None:
default_list = []
super(MachineStack, self).__init__(default_list) super(MachineStack, self).__init__(default_list)
def append(self, element): def append(self, element):

@ -60,7 +60,7 @@ class LaserEVM:
def accounts(self): def accounts(self):
return self.world_state.accounts return self.world_state.accounts
def sym_exec(self, main_address=None, creation_code=None, contract_name=None): def sym_exec(self, main_address=None, creation_code=None, contract_name=None, max_transactions=3):
logging.debug("Starting LASER execution") logging.debug("Starting LASER execution")
self.time = datetime.now() self.time = datetime.now()
@ -77,12 +77,16 @@ class LaserEVM:
# Reset code coverage # Reset code coverage
self.coverage = {} self.coverage = {}
self.time = datetime.now() for i in range(max_transactions):
logging.info("Starting message call transaction") initial_coverage = self._get_covered_instructions()
execute_message_call(self, created_account.address)
self.time = datetime.now() self.time = datetime.now()
execute_message_call(self, created_account.address) logging.info("Starting message call transaction, iteration: {}".format(i))
execute_message_call(self, created_account.address)
end_coverage = self._get_covered_instructions()
if end_coverage == initial_coverage:
break
logging.info("Finished symbolic execution") logging.info("Finished symbolic execution")
logging.info("%d nodes, %d edges, %d total states", len(self.nodes), len(self.edges), self.total_states) logging.info("%d nodes, %d edges, %d total states", len(self.nodes), len(self.edges), self.total_states)
@ -90,6 +94,13 @@ class LaserEVM:
cov = reduce(lambda sum_, val: sum_ + 1 if val else sum_, coverage[1]) / float(coverage[0]) * 100 cov = reduce(lambda sum_, val: sum_ + 1 if val else sum_, coverage[1]) / float(coverage[0]) * 100
logging.info("Achieved {} coverage for code: {}".format(cov, code)) logging.info("Achieved {} coverage for code: {}".format(cov, code))
def _get_covered_instructions(self) -> int:
""" Gets the total number of covered instructions for all accounts in the svm"""
total_covered_instructions = 0
for _, cv in self.coverage.items():
total_covered_instructions += reduce(lambda sum_, val: sum_ + 1 if val else sum_, cv[1])
return total_covered_instructions
def exec(self, create=False): def exec(self, create=False):
for global_state in self.strategy: for global_state in self.strategy:
if self.execution_timeout and not create: if self.execution_timeout and not create:
@ -124,50 +135,69 @@ class LaserEVM:
new_global_states = Instruction(op_code, self.dynamic_loader).evaluate(global_state) new_global_states = Instruction(op_code, self.dynamic_loader).evaluate(global_state)
except VmException as e: except VmException as e:
logging.debug("Encountered a VmException, ending path: `{}`".format(str(e))) transaction, return_global_state = global_state.transaction_stack.pop()
new_global_states = []
if return_global_state is None:
# In this case we don't put an unmodified world state in the open_states list Since in the case of an
# exceptional halt all changes should be discarded, and this world state would not provide us with a
# previously unseen world state
logging.debug("Encountered a VmException, ending path: `{}`".format(str(e)))
new_global_states = []
else:
# First execute the post hook for the transaction ending instruction
self._execute_post_hook(op_code, [global_state])
new_global_states = self._end_message_call(return_global_state, global_state,
revert_changes=True, return_data=None)
except TransactionStartSignal as e: except TransactionStartSignal as start_signal:
# Setup new global state # Setup new global state
new_global_state = e.transaction.initial_global_state() new_global_state = start_signal.transaction.initial_global_state()
new_global_state.transaction_stack = copy(global_state.transaction_stack) + [(e.transaction, global_state)] new_global_state.transaction_stack = copy(global_state.transaction_stack) + [(start_signal.transaction, global_state)]
new_global_state.node = global_state.node new_global_state.node = global_state.node
new_global_state.mstate.constraints = global_state.mstate.constraints new_global_state.mstate.constraints = global_state.mstate.constraints
return [new_global_state], op_code return [new_global_state], op_code
except TransactionEndSignal as e: except TransactionEndSignal as end_signal:
transaction, return_global_state = e.global_state.transaction_stack.pop() transaction, return_global_state = end_signal.global_state.transaction_stack.pop()
if return_global_state is None: if return_global_state is None:
if not isinstance(transaction, ContractCreationTransaction) or transaction.return_data: if (not isinstance(transaction, ContractCreationTransaction) or transaction.return_data) and not end_signal.revert:
e.global_state.world_state.node = global_state.node end_signal.global_state.world_state.node = global_state.node
self.open_states.append(e.global_state.world_state) self.open_states.append(end_signal.global_state.world_state)
new_global_states = [] new_global_states = []
else: else:
# First execute the post hook for the transaction ending instruction # First execute the post hook for the transaction ending instruction
self._execute_post_hook(op_code, [e.global_state]) self._execute_post_hook(op_code, [end_signal.global_state])
# Resume execution of the transaction initializing instruction new_global_states = self._end_message_call(return_global_state, global_state,
op_code = return_global_state.environment.code.instruction_list[return_global_state.mstate.pc]['opcode'] revert_changes=False or end_signal.revert,
return_data=transaction.return_data)
# Set execution result in the return_state self._execute_post_hook(op_code, new_global_states)
return_global_state.last_return_data = transaction.return_data
return_global_state.world_state = copy(global_state.world_state)
return_global_state.environment.active_account = \
global_state.accounts[return_global_state.environment.active_account.address]
# Execute the post instruction handler return new_global_states, op_code
new_global_states = Instruction(op_code, self.dynamic_loader).evaluate(return_global_state, True)
# In order to get a nice call graph we need to set the nodes here def _end_message_call(self, return_global_state, global_state, revert_changes=False, return_data=None):
for state in new_global_states: # Resume execution of the transaction initializing instruction
state.node = global_state.node op_code = return_global_state.environment.code.instruction_list[return_global_state.mstate.pc]['opcode']
self._execute_post_hook(op_code, new_global_states) # Set execution result in the return_state
return_global_state.last_return_data = return_data
if not revert_changes:
return_global_state.world_state = copy(global_state.world_state)
return_global_state.environment.active_account = \
global_state.accounts[return_global_state.environment.active_account.address]
return new_global_states, op_code # Execute the post instruction handler
new_global_states = Instruction(op_code, self.dynamic_loader).evaluate(return_global_state, True)
# In order to get a nice call graph we need to set the nodes here
for state in new_global_states:
state.node = global_state.node
return new_global_states
def _measure_coverage(self, global_state): def _measure_coverage(self, global_state):
code = global_state.environment.code.bytecode code = global_state.environment.code.bytecode
@ -234,7 +264,7 @@ class LaserEVM:
environment.active_function_name = disassembly.addr_to_func[address] environment.active_function_name = disassembly.addr_to_func[address]
new_node.flags |= NodeFlags.FUNC_ENTRY new_node.flags |= NodeFlags.FUNC_ENTRY
logging.info( logging.debug(
"- Entering function " + environment.active_account.contract_name + ":" + new_node.function_name) "- Entering function " + environment.active_account.contract_name + ":" + new_node.function_name)
elif address == 0: elif address == 0:
environment.active_function_name = "fallback" environment.active_function_name = "fallback"

@ -82,7 +82,7 @@ class TaintRunner:
""" """
@staticmethod @staticmethod
def execute(statespace, node, state, initial_stack=[]): def execute(statespace, node, state, initial_stack=None):
""" """
Runs taint analysis on the statespace Runs taint analysis on the statespace
:param statespace: symbolic statespace to run taint analysis on :param statespace: symbolic statespace to run taint analysis on
@ -91,6 +91,8 @@ class TaintRunner:
:param stack_indexes: stack indexes to introduce taint :param stack_indexes: stack indexes to introduce taint
:return: TaintResult object containing analysis results :return: TaintResult object containing analysis results
""" """
if initial_stack is None:
initial_stack = []
result = TaintResult() result = TaintResult()
transaction_stack_length = len(node.states[0].transaction_stack) transaction_stack_length = len(node.states[0].transaction_stack)
# Build initial current_node # Build initial current_node
@ -107,7 +109,8 @@ class TaintRunner:
records = TaintRunner.execute_node(node, record, index) records = TaintRunner.execute_node(node, record, index)
result.add_records(records) result.add_records(records)
if len(records) == 0: # continue if there is no record to work on
continue
children = TaintRunner.children(node, statespace, environment, transaction_stack_length) children = TaintRunner.children(node, statespace, environment, transaction_stack_length)
for child in children: for child in children:
current_nodes.append((child, records[-1], 0)) current_nodes.append((child, records[-1], 0))

@ -12,10 +12,12 @@ def get_next_transaction_id():
_next_transaction_id += 1 _next_transaction_id += 1
return _next_transaction_id return _next_transaction_id
class TransactionEndSignal(Exception): class TransactionEndSignal(Exception):
""" Exception raised when a transaction is finalized""" """ Exception raised when a transaction is finalized"""
def __init__(self, global_state): def __init__(self, global_state, revert=False):
self.global_state = global_state self.global_state = global_state
self.revert = revert
class TransactionStartSignal(Exception): class TransactionStartSignal(Exception):
@ -70,9 +72,9 @@ class MessageCallTransaction:
return global_state return global_state
def end(self, global_state, return_data=None): def end(self, global_state, return_data=None, revert=False):
self.return_data = return_data self.return_data = return_data
raise TransactionEndSignal(global_state) raise TransactionEndSignal(global_state, revert)
class ContractCreationTransaction: class ContractCreationTransaction:
@ -125,7 +127,7 @@ class ContractCreationTransaction:
return global_state return global_state
def end(self, global_state, return_data=None): def end(self, global_state, return_data=None, revert=False):
if not all([isinstance(element, int) for element in return_data]): if not all([isinstance(element, int) for element in return_data]):
self.return_data = None self.return_data = None
@ -136,4 +138,6 @@ class ContractCreationTransaction:
global_state.environment.active_account.code = Disassembly(contract_code) global_state.environment.active_account.code = Disassembly(contract_code)
self.return_data = global_state.environment.active_account.address self.return_data = global_state.environment.active_account.address
raise TransactionEndSignal(global_state) raise TransactionEndSignal(global_state, revert=revert)

@ -104,11 +104,11 @@ def concrete_int_to_bytes(val):
# logging.debug("concrete_int_to_bytes " + str(val)) # logging.debug("concrete_int_to_bytes " + str(val))
try: if type(val) == int:
return (simplify(val).as_long()).to_bytes(32, byteorder='big')
except Z3Exception:
return val.to_bytes(32, byteorder='big') return val.to_bytes(32, byteorder='big')
return (simplify(val).as_long()).to_bytes(32, byteorder='big')
def bytearray_to_int(arr): def bytearray_to_int(arr):
o = 0 o = 0

@ -20,8 +20,8 @@ import platform
from mythril.ether import util from mythril.ether import util
from mythril.ether.ethcontract import ETHContract from mythril.ether.ethcontract import ETHContract
from mythril.ether.soliditycontract import SolidityContract, get_contracts_from_file from mythril.ether.soliditycontract import SolidityContract, get_contracts_from_file
from mythril.rpc.client import EthJsonRpc from mythril.ethereum.interface.rpc.client import EthJsonRpc
from mythril.rpc.exceptions import ConnectionError from mythril.ethereum.interface.rpc.exceptions import ConnectionError
from mythril.support import signatures from mythril.support import signatures
from mythril.support.truffle import analyze_truffle_project from mythril.support.truffle import analyze_truffle_project
from mythril.support.loader import DynLoader from mythril.support.loader import DynLoader
@ -31,7 +31,7 @@ from mythril.analysis.callgraph import generate_graph
from mythril.analysis.traceexplore import get_serializable_statespace from mythril.analysis.traceexplore import get_serializable_statespace
from mythril.analysis.security import fire_lasers from mythril.analysis.security import fire_lasers
from mythril.analysis.report import Report from mythril.analysis.report import Report
from mythril.leveldb.client import EthLevelDB from mythril.ethereum.interface.leveldb.client import EthLevelDB
# logging.basicConfig(level=logging.DEBUG) # logging.basicConfig(level=logging.DEBUG)
@ -103,7 +103,8 @@ class Mythril(object):
self.contracts = [] # loaded contracts self.contracts = [] # loaded contracts
def _init_mythril_dir(self): @staticmethod
def _init_mythril_dir():
try: try:
mythril_dir = os.environ['MYTHRIL_DIR'] mythril_dir = os.environ['MYTHRIL_DIR']
except KeyError: except KeyError:
@ -179,7 +180,8 @@ class Mythril(object):
def analyze_truffle_project(self, *args, **kwargs): def analyze_truffle_project(self, *args, **kwargs):
return analyze_truffle_project(self.sigs, *args, **kwargs) # just passthru by passing signatures for now return analyze_truffle_project(self.sigs, *args, **kwargs) # just passthru by passing signatures for now
def _init_solc_binary(self, version): @staticmethod
def _init_solc_binary(version):
# Figure out solc binary and version # Figure out solc binary and version
# Only proper versions are supported. No nightlies, commits etc (such as available in remix) # Only proper versions are supported. No nightlies, commits etc (such as available in remix)
@ -317,7 +319,7 @@ class Mythril(object):
try: try:
# import signatures from solidity source # import signatures from solidity source
self.sigs.import_from_solidity_source(file) self.sigs.import_from_solidity_source(file, solc_binary=self.solc_binary, solc_args=self.solc_args)
# Save updated function signatures # Save updated function signatures
self.sigs.write() # dump signatures to disk (previously opened file or default location) self.sigs.write() # dump signatures to disk (previously opened file or default location)
@ -382,7 +384,9 @@ class Mythril(object):
return report return report
def get_state_variable_from_storage(self, address, params=[]): def get_state_variable_from_storage(self, address, params=None):
if params is None:
params = []
(position, length, mappings) = (0, 1, []) (position, length, mappings) = (0, 1, [])
try: try:
if params[0] == "mapping": if params[0] == "mapping":
@ -433,7 +437,8 @@ class Mythril(object):
raise CriticalError("Could not connect to RPC server. Make sure that your node is running and that RPC parameters are set correctly.") raise CriticalError("Could not connect to RPC server. Make sure that your node is running and that RPC parameters are set correctly.")
return '\n'.join(outtxt) return '\n'.join(outtxt)
def disassemble(self, contract): @staticmethod
def disassemble(contract):
return contract.get_easm() return contract.get_easm()
@staticmethod @staticmethod

@ -37,7 +37,7 @@ class DynLoader:
m = re.match(r'^(0x[0-9a-fA-F]{40})$', dependency_address) m = re.match(r'^(0x[0-9a-fA-F]{40})$', dependency_address)
if (m): if m:
dependency_address = m.group(1) dependency_address = m.group(1)
else: else:

@ -111,6 +111,8 @@ class SignatureDb(object):
:return: self :return: self
""" """
path = path or self.signatures_file path = path or self.signatures_file
directory = os.path.split(path)[0]
if sync and os.path.exists(path): if sync and os.path.exists(path):
# reload and save if file exists # reload and save if file exists
with open(path, "r") as f: with open(path, "r") as f:
@ -122,7 +124,10 @@ class SignatureDb(object):
sigs.update(self.signatures) # reload file and merge cached sigs into what we load from file sigs.update(self.signatures) # reload file and merge cached sigs into what we load from file
self.signatures = sigs self.signatures = sigs
if directory and not os.path.exists(directory):
os.makedirs(directory) # create folder structure if not existS
if not os.path.exists(path): # creates signatures.json file if it doesn't exist if not os.path.exists(path): # creates signatures.json file if it doesn't exist
open(path, "w").close() open(path, "w").close()
@ -172,13 +177,13 @@ class SignatureDb(object):
""" """
return self.get(sighash=item) return self.get(sighash=item)
def import_from_solidity_source(self, file_path): def import_from_solidity_source(self, file_path, solc_binary="solc", solc_args=None):
""" """
Import Function Signatures from solidity source files Import Function Signatures from solidity source files
:param file_path: solidity source code file path :param file_path: solidity source code file path
:return: self :return: self
""" """
self.signatures.update(SignatureDb.get_sigs_from_file(file_path)) self.signatures.update(SignatureDb.get_sigs_from_file(file_path, solc_binary=solc_binary, solc_args=solc_args))
return self return self
@staticmethod @staticmethod
@ -201,13 +206,15 @@ class SignatureDb(object):
proxies=proxies)) proxies=proxies))
@staticmethod @staticmethod
def get_sigs_from_file(file_name): def get_sigs_from_file(file_name, solc_binary="solc", solc_args=None):
""" """
:param file_name: accepts a filename :param file_name: accepts a filename
:return: their signature mappings :return: their signature mappings
""" """
sigs = {} sigs = {}
cmd = ["solc", "--hashes", file_name] cmd = [solc_binary, "--hashes", file_name]
if solc_args:
cmd.extend(solc_args.split())
try: try:
p = Popen(cmd, stdout=PIPE, stderr=PIPE) p = Popen(cmd, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate() stdout, stderr = p.communicate()

@ -1,3 +1,4 @@
coloredlogs>=10.0
configparser>=3.5.0 configparser>=3.5.0
coverage coverage
eth_abi>=1.0.0 eth_abi>=1.0.0

@ -82,6 +82,7 @@ setup(
packages=find_packages(exclude=['contrib', 'docs', 'tests']), packages=find_packages(exclude=['contrib', 'docs', 'tests']),
install_requires=[ install_requires=[
'coloredlogs>=10.0',
'ethereum>=2.3.2', 'ethereum>=2.3.2',
'z3-solver>=4.5', 'z3-solver>=4.5',
'requests', 'requests',

@ -0,0 +1,51 @@
{
"suicide" : {
"_info" : {
"comment" : "",
"filledwith" : "testeth 1.5.0.dev2-52+commit.d419e0a2",
"lllcversion" : "Version: 0.4.26-develop.2018.9.19+commit.785cbf40.Linux.g++",
"source" : "src/VMTestsFiller/vmTests/suicideFiller.json",
"sourceHash" : "4622c577440f9db4b3954a1de60bf2fac55886dcb0ec4ecaf906c25bc77372e7"
},
"callcreates" : [
],
"env" : {
"currentCoinbase" : "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba",
"currentDifficulty" : "0x0100",
"currentGasLimit" : "0x0f4240",
"currentNumber" : "0x00",
"currentTimestamp" : "0x01"
},
"exec" : {
"address" : "0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6",
"caller" : "0xcd1722f3947def4cf144679da39c4c32bdc35681",
"code" : "0x33ff",
"data" : "0x",
"gas" : "0x0186a0",
"gasPrice" : "0x5af3107a4000",
"origin" : "0xcd1722f3947def4cf144679da39c4c32bdc35681",
"value" : "0x0de0b6b3a7640000"
},
"gas" : "0x01869e",
"logs" : "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347",
"out" : "0x",
"post" : {
"0xcd1722f3947def4cf144679da39c4c32bdc35681" : {
"balance" : "0x152d02c7e14af6800000",
"code" : "0x",
"nonce" : "0x00",
"storage" : {
}
}
},
"pre" : {
"0x0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6" : {
"balance" : "0x152d02c7e14af6800000",
"code" : "0x33ff",
"nonce" : "0x00",
"storage" : {
}
}
}
}
}

@ -12,7 +12,7 @@ import pytest
evm_test_dir = Path(__file__).parent / 'VMTests' evm_test_dir = Path(__file__).parent / 'VMTests'
test_types = ['vmArithmeticTest', 'vmBitwiseLogicOperation', 'vmPushDupSwapTest'] test_types = ['vmArithmeticTest', 'vmBitwiseLogicOperation', 'vmPushDupSwapTest', 'vmTests']
def load_test_data(designations): def load_test_data(designations):

@ -6,13 +6,13 @@ from mythril.laser.ethereum import svm
from tests import * from tests import *
SHA256_TEST = [ (0,False) for i in range(6)] SHA256_TEST = [(0, False) for _ in range(6)]
RIPEMD160_TEST = [ (0,False) for i in range(6)] RIPEMD160_TEST = [(0, False) for _ in range(6)]
ECRECOVER_TEST = [ (0,False) for i in range(9)] ECRECOVER_TEST = [(0, False) for _ in range(9)]
IDENTITY_TEST = [ (0, False) for i in range(4)] IDENTITY_TEST = [(0, False) for _ in range(4)]
SHA256_TEST[0] = (5555555555555555, True) #These are Random numbers to check whether the 'if condition' is entered or not(True means entered) SHA256_TEST[0] = (5555555555555555, True) #These are Random numbers to check whether the 'if condition' is entered or not(True means entered)
SHA256_TEST[1] = (323232325445454546, True) SHA256_TEST[1] = (323232325445454546, True)
@ -98,9 +98,9 @@ def _test_natives(laser_info, test_list, test_name):
assert(success == len(test_list)) assert(success == len(test_list))
class NativeTests(BaseTestCase): class NativeTests(BaseTestCase):
def runTest(self): @staticmethod
def runTest():
disassembly = SolidityContract('./tests/native_tests.sol').disassembly disassembly = SolidityContract('./tests/native_tests.sol').disassembly
account = Account("0x0000000000000000000000000000000000000000", disassembly) account = Account("0x0000000000000000000000000000000000000000", disassembly)
accounts = {account.address: account} accounts = {account.address: account}

@ -1,6 +1,6 @@
from unittest import TestCase from unittest import TestCase
from mythril.rpc.client import EthJsonRpc from mythril.ethereum.interface.rpc.client import EthJsonRpc
class RpcTest(TestCase): class RpcTest(TestCase):
client = None client = None

Loading…
Cancel
Save