From 70573ffdd018468ad02a9970c163db9469826c09 Mon Sep 17 00:00:00 2001 From: Konrad Weiss Date: Wed, 13 Jun 2018 11:29:26 +0200 Subject: [PATCH 01/17] Adding base file for solidnotary extension to mythril --- mythril/analysis/modules/build_traces.py | 0 mythril/analysis/modules/dummy.py | 0 mythril/solidnotary.py | 0 mythril/solidnotary/__init__.py | 0 mythril/solidnotary/annotation.py | 0 5 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 mythril/analysis/modules/build_traces.py create mode 100644 mythril/analysis/modules/dummy.py create mode 100644 mythril/solidnotary.py create mode 100644 mythril/solidnotary/__init__.py create mode 100644 mythril/solidnotary/annotation.py diff --git a/mythril/analysis/modules/build_traces.py b/mythril/analysis/modules/build_traces.py new file mode 100644 index 00000000..e69de29b diff --git a/mythril/analysis/modules/dummy.py b/mythril/analysis/modules/dummy.py new file mode 100644 index 00000000..e69de29b diff --git a/mythril/solidnotary.py b/mythril/solidnotary.py new file mode 100644 index 00000000..e69de29b diff --git a/mythril/solidnotary/__init__.py b/mythril/solidnotary/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/mythril/solidnotary/annotation.py b/mythril/solidnotary/annotation.py new file mode 100644 index 00000000..e69de29b From c3131a579dd152b702485146eafd4ecdf1c35a5a Mon Sep 17 00:00:00 2001 From: Konrad Weiss Date: Wed, 13 Jun 2018 11:30:39 +0200 Subject: [PATCH 02/17] Add annotation files such as the wrapper in front of the analysis and the annotation class structures --- annotationWrapper.py | 69 +++++++++++++++++++++++++++++++ mythril/analysis/modules/dummy.py | 33 +++++++++++++++ mythril/solidnotary/annotation.py | 28 +++++++++++++ 3 files changed, 130 insertions(+) create mode 100644 annotationWrapper.py diff --git a/annotationWrapper.py b/annotationWrapper.py new file mode 100644 index 00000000..02337e84 --- /dev/null +++ b/annotationWrapper.py @@ -0,0 +1,69 @@ +""" + Parses the files for annotations to extract the information and do some transformations on the code. + On return the Annotation information is used to change mythrils output regarding the changes code pieces + +""" + +from glob import glob +import re, sys +import json + +newlines = ["\r\n", "\r", "\n"] + +def find_all(a_str, sub): + start = 0 + while True: + start = a_str.find(sub, start) + if start == -1: + return + yield start + start += len(sub) + +def count_elements(source, elements): + ret = 0 + for element in elements: + ret += source.count(element) + return ret + + +def replace_index(text, toReplace, replacement, index): + return text[:index] + replacement + text[(index + len(toReplace)):] + +""" + Here it might be better to split annotations into the containing constraint an the prefix and sufix +""" +def parse_annotation_info(filedata): + annotations = [] + for inv in re.findall(r'//invariant\(([^\)]+)\)(\r\n|\r|\n)', filedata): + match_inv = "//invariant(" + inv[0] + ")" + for pos in find_all(filedata, match_inv + inv[1]): + line = count_elements(filedata[:pos], newlines) + 1 + col = pos - max(map(lambda x: filedata[:pos].rfind(x), newlines)) + annotations.append((pos, line, col, '//invariant(', inv[0], ")", inv[1])) + return set(annotations) + + +def read_write_file(filename): + with open(filename, 'r') as file : + filedata = file.read() + + annotations = parse_annotation_info(filedata) + + annotations = sorted(list(annotations), key=lambda x: x[0], reverse=True) + for annotation in annotations: + filedata = replace_index(filedata, annotation[3] + annotation[4] + annotation[5] + annotation[6], "assert(" + + annotation[4] + ");" + annotation[6], annotation[0]) + # Replace the target string + # filedata = filedata.replace('@ensure', '@invariant') + # filedata = filedata.replace('@invariant', '@ensure') + + with open(filename, 'w') as file: + file.write(filedata) + return annotations + +annot_map = {} + +for sol_file in glob("./*.sol"): + annot_map[sol_file] = read_write_file(sol_file) +json.dump(annot_map, sys.stdout) +print("#end annotations#") diff --git a/mythril/analysis/modules/dummy.py b/mythril/analysis/modules/dummy.py index e69de29b..0d877f85 100644 --- a/mythril/analysis/modules/dummy.py +++ b/mythril/analysis/modules/dummy.py @@ -0,0 +1,33 @@ +from mythril.analysis.report import Issue +import logging + + +''' + To print content of the statespace after it was build. +''' + +def print_obj(obj): + print() + print(obj) + # print(dir(obj)) + print() + + +def execute(statespace): + + logging.debug("Executing module: Transaction End") + + issues = [] + + for k in statespace.nodes: + node = statespace.nodes[k] + + for state in node.states: + + instruction = state.get_current_instruction() + + if(instruction['opcode'] == "STOP"): + print_obj(state.environment.active_account.storage) + # print("opc: {}, add: {} {}".format(instruction['opcode'], instruction['address'], instruction['argument'] if 'argument' in instruction else "")) + + return issues diff --git a/mythril/solidnotary/annotation.py b/mythril/solidnotary/annotation.py index e69de29b..cbf95ce1 100644 --- a/mythril/solidnotary/annotation.py +++ b/mythril/solidnotary/annotation.py @@ -0,0 +1,28 @@ +from re import search + +class Annotation: + + def __init__(self, annstring, lineno, fileoffset): + self.annstring = annstring + + annotation = search(r'@(?P[^\{\}]*)(\{(?P.*)\})?', annstring) + if not annotation: + raise SyntaxError("{} is not a correct annotation".format(annstring)) + + self.aname = annotation['aname'] + self.acontent = annotation['acontent'] + self.lineno = lineno + self.length = len(annstring) + self.fileoffset = fileoffset + +class ContractAnnotation(Annotation): + pass + +class MemberAnnotation(Annotation): + pass + +class InlineAnnotation(Annotation): + pass + +class ContractAnnotation(Annotation): + pass From 0f458e7541f6118d64089c403d6bbb3fe1e881b2 Mon Sep 17 00:00:00 2001 From: Konrad Weiss Date: Wed, 13 Jun 2018 11:33:30 +0200 Subject: [PATCH 03/17] Add the first code to build transaction traces from the storage state at STOP instruction and a combination strategy to build felxibly large trace chain combinations. Starting from the trace of a constructor and using the traces of the external and public functions of a contract. All possible programmstates to a certain depth can be reached. --- mythril/analysis/modules/build_traces.py | 22 ++++++++++ mythril/solidnotary.py | 27 ++++++++++++ mythril/solidnotary/transactiontrace.py | 54 ++++++++++++++++++++++++ 3 files changed, 103 insertions(+) create mode 100644 mythril/solidnotary/transactiontrace.py diff --git a/mythril/analysis/modules/build_traces.py b/mythril/analysis/modules/build_traces.py index e69de29b..d20dae21 100644 --- a/mythril/analysis/modules/build_traces.py +++ b/mythril/analysis/modules/build_traces.py @@ -0,0 +1,22 @@ +from mythril.analysis.report import Issue +import logging + + +''' + Build execution traces from the statespace +''' + +def print_obj(obj): + print() + print(obj) + # print(dir(obj)) + print() + + +def execute(statespace): + + logging.debug("Executing module: Transaction End") + + traces = [] + + return [] diff --git a/mythril/solidnotary.py b/mythril/solidnotary.py index e69de29b..2207b26f 100644 --- a/mythril/solidnotary.py +++ b/mythril/solidnotary.py @@ -0,0 +1,27 @@ +import logging +from mythril.solidnotary.transactiontrace import TransactionTrace + +class SolidNotary: + + def __init__(self): + # Todo Parse Annotations and store them in an additional structure + # Todo receive a list of files or a file, these are modified for the analysis + pass + + def notarize(self): + # Todo Instantiate an instance of Mythril, analyze and print the result + # Todo Find how they are storing results + pass + + def get_transaction_traces(statespace): + logging.debug("Executing module: Transaction End") + + traces = [] + + for k in statespace.nodes: + node = statespace.nodes[k] + for state in node.states: + instruction = state.get_current_instruction() + if instruction['opcode'] == "STOP": + traces.append(TransactionTrace(state.environment.active_account.storage)) + return traces diff --git a/mythril/solidnotary/transactiontrace.py b/mythril/solidnotary/transactiontrace.py new file mode 100644 index 00000000..cee4b6de --- /dev/null +++ b/mythril/solidnotary/transactiontrace.py @@ -0,0 +1,54 @@ +class TransactionTrace: + + def __init__(self, storage): + self.storage = storage + # Todo Identifiy addional trace information such as blocknumber and more + + """ + Applies the new trace tt on a possibly even changed trace self. + """ + def apply_trace(self, tt): + if tt is None: + return self + # Todo implement application of a trace on a existing trace. + return None + + def apply_traces_parallel(self, traces): + combined_traces = [] + for trace in traces: + combined_traces.append(self.apply_trace(trace)) + return combined_traces + + def apply_exact_trace_levels(self, traces, depth): + # Todo maybe some faster trace build not building one level at a time to e.g. + # Todo reach level 17 but build 2, then 4, then 8 and then 16 then 17 + trace_lvl_n = [self] + for i in range(depth): + trace_lvl_np1 = [] + for trace in trace_lvl_n: + trace_lvl_np1.append(trace.apply_traces_parallel(traces)) + if deep_equals(trace_lvl_np1, trace_lvl_n): # Fixpoint detected, function needs to ignore lists, dicts and objects. + return trace_lvl_n + trace_lvl_n = trace_lvl_np1 + return trace_lvl_n + + def apply_up_to_trace_levels(self, traces, depth): + traces_up_to = [[self]] # elements are trace_levels + for i in range(depth): + trace_lvl_np1 = [] + for trace in traces_up_to[-1]: + trace_lvl_np1.append(trace.apply_traces_parallel(traces)) + for trace_lvl_i in traces_up_to: + # the following might be faster to check when using a content representing hash + if deep_equals(trace_lvl_np1, trace_lvl_i): # cycle in the traces of trace chains detected: levels + # while repeat themselves, function needs to ignore lists, dicts and objects. + return traces_up_to + traces_up_to.append(trace_lvl_np1) + return traces_up_to + + """ + Either do only deep checing here and use the proper trace or storage_slot reduction in the apply function. Or do + both here. + """ + def deep_equals(trace_lvl1, trace_lvl2): + pass From 02aa2952e4336d7c4ebaaca412d84baed8426e79 Mon Sep 17 00:00:00 2001 From: Konrad Weiss Date: Thu, 14 Jun 2018 14:51:20 +0200 Subject: [PATCH 04/17] Adding first working transacation application purely working on the stack and not considering the combination of constraints --- mythril/analysis/modules/build_traces.py | 8 +++++ mythril/analysis/modules/dummy.py | 6 ++-- mythril/{ => solidnotary}/solidnotary.py | 4 +-- mythril/solidnotary/transactiontrace.py | 42 +++++++++++++++++++----- 4 files changed, 47 insertions(+), 13 deletions(-) rename mythril/{ => solidnotary}/solidnotary.py (92%) diff --git a/mythril/analysis/modules/build_traces.py b/mythril/analysis/modules/build_traces.py index d20dae21..73a39a07 100644 --- a/mythril/analysis/modules/build_traces.py +++ b/mythril/analysis/modules/build_traces.py @@ -1,4 +1,6 @@ from mythril.analysis.report import Issue +from mythril.solidnotary.transactiontrace import TransactionTrace +from mythril.solidnotary.solidnotary import SolidNotary import logging @@ -19,4 +21,10 @@ def execute(statespace): traces = [] + traces = SolidNotary().get_transaction_traces(statespace) + + print("==== Second level traces ====") + for trace in traces: + print(trace.apply_traces_parallel(traces)) + return [] diff --git a/mythril/analysis/modules/dummy.py b/mythril/analysis/modules/dummy.py index 0d877f85..f795504c 100644 --- a/mythril/analysis/modules/dummy.py +++ b/mythril/analysis/modules/dummy.py @@ -9,7 +9,8 @@ import logging def print_obj(obj): print() print(obj) - # print(dir(obj)) + print(type(obj)) + print(dir(obj)) print() @@ -27,7 +28,8 @@ def execute(statespace): instruction = state.get_current_instruction() if(instruction['opcode'] == "STOP"): - print_obj(state.environment.active_account.storage) + for k,v in state.environment.active_account.storage.items(): + print_obj(v) # print("opc: {}, add: {} {}".format(instruction['opcode'], instruction['address'], instruction['argument'] if 'argument' in instruction else "")) return issues diff --git a/mythril/solidnotary.py b/mythril/solidnotary/solidnotary.py similarity index 92% rename from mythril/solidnotary.py rename to mythril/solidnotary/solidnotary.py index 2207b26f..dd5f0806 100644 --- a/mythril/solidnotary.py +++ b/mythril/solidnotary/solidnotary.py @@ -13,7 +13,7 @@ class SolidNotary: # Todo Find how they are storing results pass - def get_transaction_traces(statespace): + def get_transaction_traces(self, statespace): logging.debug("Executing module: Transaction End") traces = [] @@ -24,4 +24,4 @@ class SolidNotary: instruction = state.get_current_instruction() if instruction['opcode'] == "STOP": traces.append(TransactionTrace(state.environment.active_account.storage)) - return traces + return traces diff --git a/mythril/solidnotary/transactiontrace.py b/mythril/solidnotary/transactiontrace.py index cee4b6de..d2937d66 100644 --- a/mythril/solidnotary/transactiontrace.py +++ b/mythril/solidnotary/transactiontrace.py @@ -1,16 +1,45 @@ +from z3 import * + class TransactionTrace: def __init__(self, storage): self.storage = storage + # Todo the constraints of a trace are probably also important here and have to be somehow aggregated # Todo Identifiy addional trace information such as blocknumber and more + """ + Either do only deep checing here and use the proper trace or storage_slot reduction in the apply function. Or do + both here. + """ + + def deep_equals(trace_lvl1, trace_lvl2): + return set(trace_lvl1) == set(trace_lvl2) # Todo Impelement an ACTUAL deep comparison + + def simplify_storage(self): + for k,v in self.storage.items(): + # Todo explore the arguments of this storage simplification in z3 to find ways to further simplify and to + # sort this expressions for equality comparison + self.storage[k] = simplify(v) + """ Applies the new trace tt on a possibly even changed trace self. """ def apply_trace(self, tt): if tt is None: return self - # Todo implement application of a trace on a existing trace. + subs_map = list(map(lambda x: (BitVec("storage_" + str(x[0]), 256), x[1]), tt.storage.items())) + print("_________") + print(subs_map) + print(self.storage) + print("+++++++++") + for k,v in self.storage.items(): + self.storage[k] = substitute(v, subs_map) + # Todo Add combination of constraints, this might by tricky if we cannot identify which entrancy constraints of + # self can be omitted (e.g. when related storage locations were overwritten) + print(self.storage) + print("=========") + self.simplify_storage() + print(self.storage) return None def apply_traces_parallel(self, traces): @@ -27,7 +56,7 @@ class TransactionTrace: trace_lvl_np1 = [] for trace in trace_lvl_n: trace_lvl_np1.append(trace.apply_traces_parallel(traces)) - if deep_equals(trace_lvl_np1, trace_lvl_n): # Fixpoint detected, function needs to ignore lists, dicts and objects. + if TransactionTrace.deep_equals(trace_lvl_np1, trace_lvl_n): # Fixpoint detected, function needs to ignore lists, dicts and objects. return trace_lvl_n trace_lvl_n = trace_lvl_np1 return trace_lvl_n @@ -40,15 +69,10 @@ class TransactionTrace: trace_lvl_np1.append(trace.apply_traces_parallel(traces)) for trace_lvl_i in traces_up_to: # the following might be faster to check when using a content representing hash - if deep_equals(trace_lvl_np1, trace_lvl_i): # cycle in the traces of trace chains detected: levels + if TransactionTrace.deep_equals(trace_lvl_np1, trace_lvl_i): # cycle in the traces of trace chains detected: levels # while repeat themselves, function needs to ignore lists, dicts and objects. return traces_up_to traces_up_to.append(trace_lvl_np1) return traces_up_to - """ - Either do only deep checing here and use the proper trace or storage_slot reduction in the apply function. Or do - both here. - """ - def deep_equals(trace_lvl1, trace_lvl2): - pass + From df6a88e0621b27751e103194a61008cb4f0b91ea Mon Sep 17 00:00:00 2001 From: Konrad Weiss Date: Thu, 14 Jun 2018 15:52:53 +0200 Subject: [PATCH 05/17] Building transactionchains of up to length 4 for trial, correcting some mistakes when creating transaction and combining them --- mythril/analysis/modules/build_traces.py | 3 ++- mythril/solidnotary/transactiontrace.py | 22 +++++++++++++--------- 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/mythril/analysis/modules/build_traces.py b/mythril/analysis/modules/build_traces.py index 73a39a07..2e6f7c02 100644 --- a/mythril/analysis/modules/build_traces.py +++ b/mythril/analysis/modules/build_traces.py @@ -25,6 +25,7 @@ def execute(statespace): print("==== Second level traces ====") for trace in traces: - print(trace.apply_traces_parallel(traces)) + print(trace.apply_up_to_trace_levels(traces, 4)) + return [] diff --git a/mythril/solidnotary/transactiontrace.py b/mythril/solidnotary/transactiontrace.py index d2937d66..0c52d86f 100644 --- a/mythril/solidnotary/transactiontrace.py +++ b/mythril/solidnotary/transactiontrace.py @@ -1,4 +1,5 @@ from z3 import * +from copy import deepcopy class TransactionTrace: @@ -27,20 +28,23 @@ class TransactionTrace: def apply_trace(self, tt): if tt is None: return self - subs_map = list(map(lambda x: (BitVec("storage_" + str(x[0]), 256), x[1]), tt.storage.items())) + new_trace = deepcopy(tt) + subs_map = list(map(lambda x: (BitVec("storage_" + str(x[0]), 256), x[1]), self.storage.items())) print("_________") print(subs_map) print(self.storage) + print("#########") + print(new_trace.storage) print("+++++++++") - for k,v in self.storage.items(): - self.storage[k] = substitute(v, subs_map) + for k,v in new_trace.storage.items(): + new_trace.storage[k] = substitute(v, subs_map) # Todo Add combination of constraints, this might by tricky if we cannot identify which entrancy constraints of # self can be omitted (e.g. when related storage locations were overwritten) - print(self.storage) + print(new_trace) print("=========") - self.simplify_storage() - print(self.storage) - return None + new_trace.simplify_storage() + print(new_trace.storage) + return new_trace def apply_traces_parallel(self, traces): combined_traces = [] @@ -55,7 +59,7 @@ class TransactionTrace: for i in range(depth): trace_lvl_np1 = [] for trace in trace_lvl_n: - trace_lvl_np1.append(trace.apply_traces_parallel(traces)) + trace_lvl_np1.extend(trace.apply_traces_parallel(traces)) if TransactionTrace.deep_equals(trace_lvl_np1, trace_lvl_n): # Fixpoint detected, function needs to ignore lists, dicts and objects. return trace_lvl_n trace_lvl_n = trace_lvl_np1 @@ -66,7 +70,7 @@ class TransactionTrace: for i in range(depth): trace_lvl_np1 = [] for trace in traces_up_to[-1]: - trace_lvl_np1.append(trace.apply_traces_parallel(traces)) + trace_lvl_np1.extend(trace.apply_traces_parallel(traces)) for trace_lvl_i in traces_up_to: # the following might be faster to check when using a content representing hash if TransactionTrace.deep_equals(trace_lvl_np1, trace_lvl_i): # cycle in the traces of trace chains detected: levels From 62ae88b652c07cb76c515bdd5d0b41ca89ead82d Mon Sep 17 00:00:00 2001 From: Konrad Weiss Date: Wed, 20 Jun 2018 13:03:23 +0200 Subject: [PATCH 06/17] Adding chaining of transactions, using z3 substitution to carry over intertransactional sym. values and combine constraints. Usage of z3 simplification on a (currently only) singular constraint value. and satisfiability checking to consider only valid transactions. --- mythril/analysis/modules/build_traces.py | 33 +++-- mythril/analysis/modules/dummy.py | 20 +-- mythril/solidnotary/solidnotary.py | 48 ++++++-- mythril/solidnotary/transactiontrace.py | 150 ++++++++++++++++++++--- mythril/solidnotary/z3utility.py | 19 +++ 5 files changed, 232 insertions(+), 38 deletions(-) create mode 100644 mythril/solidnotary/z3utility.py diff --git a/mythril/analysis/modules/build_traces.py b/mythril/analysis/modules/build_traces.py index 2e6f7c02..0d6bcd38 100644 --- a/mythril/analysis/modules/build_traces.py +++ b/mythril/analysis/modules/build_traces.py @@ -1,6 +1,7 @@ from mythril.analysis.report import Issue from mythril.solidnotary.transactiontrace import TransactionTrace -from mythril.solidnotary.solidnotary import SolidNotary +from mythril.solidnotary.solidnotary import get_transaction_traces +from mythril.solidnotary.z3utility import are_satisfiable import logging @@ -10,8 +11,12 @@ import logging def print_obj(obj): print() + print(type(obj)) print(obj) - # print(dir(obj)) + print(dir(obj)) + print(obj.decl()) + print(obj.params()) + print(obj.children()) print() @@ -21,11 +26,25 @@ def execute(statespace): traces = [] - traces = SolidNotary().get_transaction_traces(statespace) - - print("==== Second level traces ====") + traces = get_transaction_traces(statespace) for trace in traces: - print(trace.apply_up_to_trace_levels(traces, 4)) - + comp_trace_lvls = trace.apply_up_to_trace_levels(traces, 3) + for trace_lvl in comp_trace_lvls: + for t in trace_lvl: + if t.lvl == 4: + t.pp_trace() + +# for trace in traces: +# trace.pp_trace() + + #print("==== Second level traces ====") + #for trace in traces: + # comp_trace_lvls = trace.apply_up_to_trace_levels(traces, 1) + #for trace_lvl in range(len(comp_trace_lvls)): + # print("\nTrace level: " + str(trace_lvl)) + #for comp_trace in comp_trace_lvls[trace_lvl]: + # print(comp_trace.storage) + # for k, s in comp_trace.storage.items(): + # print_obj(s) return [] diff --git a/mythril/analysis/modules/dummy.py b/mythril/analysis/modules/dummy.py index f795504c..44b0879f 100644 --- a/mythril/analysis/modules/dummy.py +++ b/mythril/analysis/modules/dummy.py @@ -1,5 +1,7 @@ from mythril.analysis.report import Issue import logging +from z3 import * +from mythril.solidnotary.z3utility import simplify_constraints, are_satisfiable ''' @@ -20,16 +22,20 @@ def execute(statespace): issues = [] - for k in statespace.nodes: - node = statespace.nodes[k] +# for k in statespace.nodes: +# node = statespace.nodes[k] - for state in node.states: +# for state in node.states: - instruction = state.get_current_instruction() +# instruction = state.get_current_instruction() - if(instruction['opcode'] == "STOP"): - for k,v in state.environment.active_account.storage.items(): - print_obj(v) +# if(instruction['opcode'] == "STOP"): +# print() + #print(state.environment.active_account.storage) + # print(state.mstate.constraints) + #simpl_const = simplify_constraints(state.mstate.constraints) + #print(simpl_const) + #print(are_satisfiable(simpl_const)) # print("opc: {}, add: {} {}".format(instruction['opcode'], instruction['address'], instruction['argument'] if 'argument' in instruction else "")) return issues diff --git a/mythril/solidnotary/solidnotary.py b/mythril/solidnotary/solidnotary.py index dd5f0806..07798a45 100644 --- a/mythril/solidnotary/solidnotary.py +++ b/mythril/solidnotary/solidnotary.py @@ -1,5 +1,10 @@ import logging from mythril.solidnotary.transactiontrace import TransactionTrace +from mythril.solidnotary.z3utility import are_satisfiable +from laser.ethereum.svm import Environment, GlobalState, CalldataType +from z3 import BitVec, simplify, is_false, is_bool, is_true, Solver, sat +from copy import deepcopy + class SolidNotary: @@ -13,15 +18,38 @@ class SolidNotary: # Todo Find how they are storing results pass - def get_transaction_traces(self, statespace): - logging.debug("Executing module: Transaction End") +def get_transaction_traces(statespace): + logging.debug("Executing module: Transaction End") + + traces = [] + + for k in statespace.nodes: + node = statespace.nodes[k] + for state in node.states: + instruction = state.get_current_instruction() + if instruction['opcode'] == "STOP": + if are_satisfiable(state.mstate.constraints): + traces.append(TransactionTrace(state.environment.active_account.storage, state.mstate.constraints)) + return traces + +def get_t_indexed_environment(active_account, index): + + # Initialize the execution environment + + environment = Environment( + active_account, + BitVec("caller_"+str(index), 256), + [], + BitVec("gasprice_"+str(index), 256), + BitVec("callvalue_"+str(index), 256), + BitVec("origin_"+str(index), 256), + calldata_type=CalldataType.SYMBOLIC, + ) + + return environment - traces = [] +def get_t_indexed_globstate(active_account, index): + environment = get_t_indexed_environment(active_account, index) + # Todo is this just some set of preset accounts? How should we deal with it + return GlobalState(self.accounts, environment) - for k in statespace.nodes: - node = statespace.nodes[k] - for state in node.states: - instruction = state.get_current_instruction() - if instruction['opcode'] == "STOP": - traces.append(TransactionTrace(state.environment.active_account.storage)) - return traces diff --git a/mythril/solidnotary/transactiontrace.py b/mythril/solidnotary/transactiontrace.py index 0c52d86f..e8b4c9a3 100644 --- a/mythril/solidnotary/transactiontrace.py +++ b/mythril/solidnotary/transactiontrace.py @@ -1,13 +1,128 @@ from z3 import * from copy import deepcopy +import re +from mythril.solidnotary.z3utility import are_satisfiable, simplify_constraints + +""" + Returns whether or the specified symbolic string stands for a data value that can be different from transaction to + transaction without the need of an intermediate call to the contract (e.g. a transaction params, blocknumber, ...) +""" + + +def is_t_variable(var): + var = str(var) + if (var.startswith("caller") + or var.startswith("gasprice") + or var.startswith("callvalue") + or var.startswith("origin") + or var.startswith("calldata_") + or var.startswith("calldatasize_") + or var.startswith("balance_at") + or var.startswith("KECCAC_mem_") + or var.startswith("keccac_") + or var.startswith("gasprice") + or var.startswith("extcodesize") + or var.startswith("returndatasize") + # or var.startswith(var, "blockhash_block_") should not change between transactions + or var.startswith("coinbase") + or var.startswith("timestamp") + or var.startswith("block_number") + or var.startswith("block_difficulty") + or var.startswith("block_gaslimit") + or var.startswith("mem_") + or var.startswith("msize") + or var.startswith("gas") + or var.startswith("retval_") + or var.startswith("keccac_")): + return True + else: + return False + + +def filter_for_t_variable_data(sym_vars): + return list(filter(lambda x: is_t_variable(x), sym_vars)) class TransactionTrace: - def __init__(self, storage): - self.storage = storage + def __init__(self, storage, constraints, lvl=1): + self.storage = storage # Todo give all non storage symbolic values that can be different every transaction the number one + self.constraints = constraints # Todo eliminate all constraints that are not regarding the beginning of the transaction may not be necessary + # eliminate all constraints that only contain names not in the set of names from storage + self.constraints = simplify_constraints(self.constraints) # Todo simplification of the sum of constraints + self.tran_constraints = deepcopy(self.constraints) + self.lvl = lvl + self.sym_names = self.extract_sym_names_from_storage() + self.sym_names.extend(self.extract_sym_names_from_constraints()) + if lvl == 1: + self.set_transaction_idx() + # Todo the constraints of a trace are probably also important here and have to be somehow aggregated # Todo Identifiy addional trace information such as blocknumber and more + def __str__(self): + return str(self.as_dict()) + + def as_dict(self): + + return {'lvl': self.lvl, 'storage': str(self.storage), 'constraints': str(self.constraints)} + + def pp_trace(self): + print() + print("Trace lvl: {}".format(self.lvl)) + print("Storage: {}".format({k: str(v).replace("\n", " ") for k, v in self.storage.items()})) + print("Constraints: {}".format(list(map(lambda x: str(x).replace("\n", " "), self.constraints)))) + print() + + + def add_transaction_idx(self, offset): + new_names = [] + for name in self.sym_names: + matched_name = re.search(r't([0-9]+)(_.*)', name) + num = int(matched_name.group(1)) + offset + new_names.append("t" + str(num) + matched_name.group(2)) + repl_tup = list(zip(self.sym_names, new_names)) + + self.substitute_bv_names(repl_tup) + + self.sym_names = new_names + + def set_transaction_idx(self): + repl_tup = [] + new_sym_names = [] + for name in self.sym_names: + repl_tup.append((name, "t1_" + name)) + new_sym_names.append("t1_" + name) + self.sym_names = new_sym_names + self.substitute_bv_names(repl_tup) + + def substitute_bv_names(self, subs_tuple): + subs_tuples = list(map(lambda name_t: (BitVec(name_t[0], 256), BitVec(name_t[1], 256)), subs_tuple)) + for s_num, slot in self.storage.items(): + self.storage[s_num] = substitute(slot, subs_tuples) + for c_idx in range(len(self.constraints)): + self.constraints[c_idx] = substitute(self.constraints[c_idx], subs_tuples) + + def extract_sym_names(self, obj): + if (not hasattr(obj, 'children') or len(obj.children()) == 0) and hasattr(obj, 'decl') : + return [str(obj.decl())] + else: + sym_vars = [] + for c in obj.children(): + sym_vars.extend(self.extract_sym_names(c)) + return sym_vars + + def extract_sym_names_from_constraints(self): + sym_names = [] + for k,v in self.storage.items(): + sym_names.extend(self.extract_sym_names(v)) + return filter_for_t_variable_data(sym_names) + + def extract_sym_names_from_storage(self): + sym_names = [] + for v in self.constraints: + sym_names.extend(self.extract_sym_names(v)) + return filter_for_t_variable_data(sym_names) # Todo Check whether here it is the right choice too, to filter ... + """ Either do only deep checing here and use the proper trace or storage_slot reduction in the apply function. Or do both here. @@ -29,28 +144,28 @@ class TransactionTrace: if tt is None: return self new_trace = deepcopy(tt) + new_trace.add_transaction_idx(self.lvl) subs_map = list(map(lambda x: (BitVec("storage_" + str(x[0]), 256), x[1]), self.storage.items())) - print("_________") - print(subs_map) - print(self.storage) - print("#########") - print(new_trace.storage) - print("+++++++++") for k,v in new_trace.storage.items(): new_trace.storage[k] = substitute(v, subs_map) - # Todo Add combination of constraints, this might by tricky if we cannot identify which entrancy constraints of + for c_idx in range(len(new_trace.constraints)): + new_trace.constraints[c_idx] = substitute(new_trace.constraints[c_idx], subs_map) + new_trace.lvl += self.lvl + new_trace.sym_names.extend(deepcopy(self.sym_names)) # self can be omitted (e.g. when related storage locations were overwritten) - print(new_trace) - print("=========") new_trace.simplify_storage() - print(new_trace.storage) - return new_trace + new_trace.constraints = simplify_constraints(new_trace.constraints) + # Simplify constraints in there sum to eliminate subconstraints + if are_satisfiable(new_trace.constraints): + return new_trace + else: + return None def apply_traces_parallel(self, traces): combined_traces = [] for trace in traces: combined_traces.append(self.apply_trace(trace)) - return combined_traces + return list(filter(lambda t: not t is None, combined_traces)) def apply_exact_trace_levels(self, traces, depth): # Todo maybe some faster trace build not building one level at a time to e.g. @@ -79,4 +194,11 @@ class TransactionTrace: traces_up_to.append(trace_lvl_np1) return traces_up_to + # Todo Maybe implement a function that checks whether two traces are combinable before creating objekts, adv. in + # case they are not the object creation doe not have to be done. Investigate whether a suicide trace completely + # stopes the contract from being executable. In that case a suicided transaction also is not combinable with + # successive transactions. + + # Todo write a function that allows to specify a function/invocable to explore the tracechain space in DFS manner + diff --git a/mythril/solidnotary/z3utility.py b/mythril/solidnotary/z3utility.py new file mode 100644 index 00000000..50b80bac --- /dev/null +++ b/mythril/solidnotary/z3utility.py @@ -0,0 +1,19 @@ +from z3 import Solver, sat, simplify, is_bool, is_true, is_false +from copy import deepcopy + +def are_satisfiable(constraints): + s = Solver() + for c in constraints: + s.add(c) + return s.check() == sat + +def simplify_constraints(constraints): # Todo simplification of the sum of constraints + simp_const = [] + for const in constraints: + simp_const.append(simplify(const)) + simp_const = list(filter(lambda c: not is_bool(c) or not is_true(c), simp_const)) + falses = list(filter(lambda c: is_bool(c) and is_false(c), simp_const)) + if len(falses) > 0: + return [deepcopy(falses[0])] + + return simp_const From b18edc8d5da2e43da48c24ff4350eaa1e9ff6682 Mon Sep 17 00:00:00 2001 From: Konrad Weiss Date: Mon, 2 Jul 2018 15:30:48 +0200 Subject: [PATCH 07/17] Adding files for source code params to symbolic value mapping --- mythril/solidnotary/abitypemapping.py | 0 mythril/solidnotary/calldata.py | 0 2 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 mythril/solidnotary/abitypemapping.py create mode 100644 mythril/solidnotary/calldata.py diff --git a/mythril/solidnotary/abitypemapping.py b/mythril/solidnotary/abitypemapping.py new file mode 100644 index 00000000..e69de29b diff --git a/mythril/solidnotary/calldata.py b/mythril/solidnotary/calldata.py new file mode 100644 index 00000000..e69de29b From 43b27c0290bfaf15be6c251654dd912b6c411582 Mon Sep 17 00:00:00 2001 From: Konrad Weiss Date: Mon, 2 Jul 2018 15:32:06 +0200 Subject: [PATCH 08/17] Modification to mythril to also include abi information in the contract object --- mythril/ether/soliditycontract.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/mythril/ether/soliditycontract.py b/mythril/ether/soliditycontract.py index 4ef2f4b6..14fa1057 100644 --- a/mythril/ether/soliditycontract.py +++ b/mythril/ether/soliditycontract.py @@ -51,6 +51,7 @@ class SolidityContract(ETHContract): if filename == input_file and name == _name: name = name code = contract['bin-runtime'] + abi = contract['abi'] creation_code = contract['bin'] srcmap = contract['srcmap-runtime'].split(";") has_contract = True @@ -65,6 +66,7 @@ class SolidityContract(ETHContract): if filename == input_file and len(contract['bin-runtime']): name = name code = contract['bin-runtime'] + abi = contract['abi'] creation_code = contract['bin'] srcmap = contract['srcmap-runtime'].split(";") has_contract = True @@ -89,6 +91,7 @@ class SolidityContract(ETHContract): lineno = self.solidity_files[idx].data[0:offset].count('\n') + 1 self.mappings.append(SourceMapping(idx, offset, length, lineno)) + self.abi = abi super().__init__(code, creation_code, name=name) From c8ad82bc87885454ed030d7f6076fef6a95865c4 Mon Sep 17 00:00:00 2001 From: Konrad Weiss Date: Mon, 2 Jul 2018 15:34:22 +0200 Subject: [PATCH 09/17] Add retrieval of constructor trace, using RETURN instead of STOP as exit point --- mythril/solidnotary/solidnotary.py | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/mythril/solidnotary/solidnotary.py b/mythril/solidnotary/solidnotary.py index 07798a45..3a3091d2 100644 --- a/mythril/solidnotary/solidnotary.py +++ b/mythril/solidnotary/solidnotary.py @@ -19,7 +19,7 @@ class SolidNotary: pass def get_transaction_traces(statespace): - logging.debug("Executing module: Transaction End") + print("get_transaction_traces") traces = [] @@ -27,11 +27,28 @@ def get_transaction_traces(statespace): node = statespace.nodes[k] for state in node.states: instruction = state.get_current_instruction() + # print("op: " + str(instruction['opcode']) + ((" " + instruction['argument']) if instruction['opcode'].startswith("PUSH") else "") + " stack: " + str(state.mstate.stack).replace("\n", "")+ " mem: " + str(state.mstate.memory).replace("\n", "")) if instruction['opcode'] == "STOP": if are_satisfiable(state.mstate.constraints): traces.append(TransactionTrace(state.environment.active_account.storage, state.mstate.constraints)) return traces +def get_construction_traces(statespace): + print("get_constructor_traces") + + traces = [] + + for k in statespace.nodes: + node = statespace.nodes[k] + for state in node.states: + instruction = state.get_current_instruction() + + # print("op: " + str(instruction['opcode']) + ((" " + instruction['argument']) if instruction['opcode'].startswith("PUSH") else "") + " stack: " + str(state.mstate.stack).replace("\n", "")+ " mem: " + str(state.mstate.memory).replace("\n", "")) + if instruction['opcode'] == "RETURN": + if are_satisfiable(state.mstate.constraints): + traces.append(TransactionTrace(state.environment.active_account.storage, state.mstate.constraints)) + return traces + def get_t_indexed_environment(active_account, index): # Initialize the execution environment From 3535248b81440d4e40cc27751d290663e4b6b16d Mon Sep 17 00:00:00 2001 From: Konrad Weiss Date: Mon, 2 Jul 2018 15:35:56 +0200 Subject: [PATCH 10/17] Add abi to the combined output expected for a contract --- mythril/ether/util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mythril/ether/util.py b/mythril/ether/util.py index e01cae28..467c37df 100644 --- a/mythril/ether/util.py +++ b/mythril/ether/util.py @@ -18,7 +18,7 @@ def safe_decode(hex_encoded_string): def get_solc_json(file, solc_binary="solc", solc_args=None): - cmd = [solc_binary, "--combined-json", "bin,bin-runtime,srcmap-runtime", '--allow-paths', "."] + cmd = [solc_binary, "--combined-json", "bin,bin-runtime,srcmap-runtime,abi", '--allow-paths', "."] if solc_args: cmd.extend(solc_args.split(" ")) From 9f140925c0f21e1540ddad419d7052b0cd3c6316 Mon Sep 17 00:00:00 2001 From: Konrad Weiss Date: Mon, 2 Jul 2018 15:36:51 +0200 Subject: [PATCH 11/17] Using deepcopy for storage and constraints when combining --- mythril/solidnotary/transactiontrace.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/mythril/solidnotary/transactiontrace.py b/mythril/solidnotary/transactiontrace.py index e8b4c9a3..44bcaca9 100644 --- a/mythril/solidnotary/transactiontrace.py +++ b/mythril/solidnotary/transactiontrace.py @@ -38,6 +38,8 @@ def is_t_variable(var): else: return False +# Todo constructor mit den intentet initvalues versorgen + def filter_for_t_variable_data(sym_vars): return list(filter(lambda x: is_t_variable(x), sym_vars)) @@ -45,11 +47,11 @@ def filter_for_t_variable_data(sym_vars): class TransactionTrace: def __init__(self, storage, constraints, lvl=1): - self.storage = storage # Todo give all non storage symbolic values that can be different every transaction the number one + self.storage = deepcopy(storage) # Todo give all non storage symbolic values that can be different every transaction the number one self.constraints = constraints # Todo eliminate all constraints that are not regarding the beginning of the transaction may not be necessary # eliminate all constraints that only contain names not in the set of names from storage self.constraints = simplify_constraints(self.constraints) # Todo simplification of the sum of constraints - self.tran_constraints = deepcopy(self.constraints) + self.tran_constraints = deepcopy(self.constraints) # Todo minimize them if they do not involve outside symb variables self.lvl = lvl self.sym_names = self.extract_sym_names_from_storage() self.sym_names.extend(self.extract_sym_names_from_constraints()) From beac3f0ccf815bef98e20e5b5f55306104902b3f Mon Sep 17 00:00:00 2001 From: Konrad Weiss Date: Mon, 2 Jul 2018 15:39:32 +0200 Subject: [PATCH 12/17] Change mythril to allow outside specification of custom global state, adding tuple of creation params to the sym wrapper to later reuse parts for custom analysis with for example a provided global state modified from the original --- mythril/analysis/symbolic.py | 9 +++++++-- mythril/mythril.py | 8 ++++++++ 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/mythril/analysis/symbolic.py b/mythril/analysis/symbolic.py index 3d3e1a6d..fe76f892 100644 --- a/mythril/analysis/symbolic.py +++ b/mythril/analysis/symbolic.py @@ -11,7 +11,7 @@ class SymExecWrapper: Wrapper class for the LASER Symbolic virtual machine. Symbolically executes the code and does a bit of pre-analysis for convenience. ''' - def __init__(self, contract, address, dynloader=None, max_depth=12): + def __init__(self, contract, address, dynloader=None, max_depth=12, gblState=None): account = svm.Account(address, contract.disassembly, contract_name=contract.name) @@ -19,7 +19,12 @@ class SymExecWrapper: self.laser = svm.LaserEVM(self.accounts, dynamic_loader=dynloader, max_depth=max_depth) - self.laser.sym_exec(address) + if not (gblState is None): + # Adding the ability to specify a custom global state, e.g. machine configuration from outside + node = self.laser._sym_exec(gblState) + self.laser.nodes[node.uid] = node + else: + self.laser.sym_exec(address) self.nodes = self.laser.nodes self.edges = self.laser.edges diff --git a/mythril/mythril.py b/mythril/mythril.py index bc123e1d..28760582 100644 --- a/mythril/mythril.py +++ b/mythril/mythril.py @@ -13,6 +13,8 @@ import re from ethereum import utils from solc.exceptions import SolcError import solc +from copy import deepcopy +from mythril.disassembler.disassembly import Disassembly from mythril.ether import util from mythril.ether.contractstorage import get_persistent_storage @@ -321,6 +323,12 @@ class Mythril(object): dynloader=DynLoader(self.eth) if self.dynld else None, max_depth=max_depth) + + contr_to_const = deepcopy(contract) + contr_to_const.disassembly = Disassembly(contr_to_const.creation_code) + contr_to_const.code = contr_to_const.creation_code + sym.sym_constr = (contr_to_const, address, DynLoader(self.eth) if self.dynld else None, max_depth) + issues = fire_lasers(sym, modules) if type(contract) == SolidityContract: From 675adbae30e44baa2cb9cb4b51f93fbaac92614c Mon Sep 17 00:00:00 2001 From: Konrad Weiss Date: Mon, 2 Jul 2018 15:40:54 +0200 Subject: [PATCH 13/17] Addign computation of minimal encoding length to use for memory initalization when symbolically executing the constructor. --- mythril/analysis/modules/build_traces.py | 56 ++++++++- mythril/solidnotary/calldata.py | 150 +++++++++++++++++++++++ 2 files changed, 200 insertions(+), 6 deletions(-) diff --git a/mythril/analysis/modules/build_traces.py b/mythril/analysis/modules/build_traces.py index 0d6bcd38..76519410 100644 --- a/mythril/analysis/modules/build_traces.py +++ b/mythril/analysis/modules/build_traces.py @@ -1,8 +1,13 @@ from mythril.analysis.report import Issue from mythril.solidnotary.transactiontrace import TransactionTrace -from mythril.solidnotary.solidnotary import get_transaction_traces +from mythril.disassembler.disassembly import Disassembly +from laser.ethereum.svm import GlobalState, Account, Environment, MachineState, CalldataType +from z3 import BitVec +from mythril.analysis.symbolic import SymExecWrapper +from mythril.solidnotary.solidnotary import get_transaction_traces, get_construction_traces from mythril.solidnotary.z3utility import are_satisfiable import logging +from mythril.solidnotary.calldata import get_minimal_constructor_param_encoding_len, abi_json_to_abi ''' @@ -19,23 +24,62 @@ def print_obj(obj): print(obj.children()) print() +def get_constr_glbstate(contract, address): + + mstate = MachineState(gas=10000000) + + minimal_const_byte_len = get_minimal_constructor_param_encoding_len(abi_json_to_abi(contract.abi)) + + # better would be to append symbolic params to the bytecode such that the codecopy instruction that copies the + # params into memory takes care of placing them onto the memory with the respective size. + for i in range(int(minimal_const_byte_len / 32)): + mstate.mem_extend(128 + 32 * i, 32) + mstate.memory.insert(128 + 32 * i, BitVec('calldata_' + contract.name + '_' + str(i * 32), 256)) + + # Todo Replace pure placement of enough symbolic 32 Byte-words with placement of symbolic variables that contain + # the name of the solidity variables + + accounts = {address: Account(address, contract.disassembly, contract_name=contract.name)} + + environment = Environment( + accounts[address], + BitVec("caller", 256), + [], + BitVec("gasprice", 256), + BitVec("callvalue", 256), + BitVec("origin", 256), + calldata_type=CalldataType.SYMBOLIC, + ) + + # Todo find source for account info, maybe the std statespace? + + return GlobalState(accounts, environment, mstate) + def execute(statespace): logging.debug("Executing module: Transaction End") - traces = [] + constructor_trace = {} + + if hasattr(statespace, "sym_constr"): + sym_exe_tuple = statespace.sym_constr + glbstate = get_constr_glbstate(sym_exe_tuple[0], sym_exe_tuple[1]) + sym_exe_tuple = statespace.sym_constr + (glbstate,) + constr_statespace = SymExecWrapper(*sym_exe_tuple) + constructor_trace = get_construction_traces(constr_statespace) # Todo the traces here should not contain references to storages anymore + for t in constructor_trace: + t.pp_trace() traces = get_transaction_traces(statespace) - for trace in traces: + for trace in constructor_trace: comp_trace_lvls = trace.apply_up_to_trace_levels(traces, 3) for trace_lvl in comp_trace_lvls: for t in trace_lvl: - if t.lvl == 4: t.pp_trace() -# for trace in traces: -# trace.pp_trace() + # for trace in traces: + # trace.pp_trace() #print("==== Second level traces ====") #for trace in traces: diff --git a/mythril/solidnotary/calldata.py b/mythril/solidnotary/calldata.py index e69de29b..27891271 100644 --- a/mythril/solidnotary/calldata.py +++ b/mythril/solidnotary/calldata.py @@ -0,0 +1,150 @@ +from re import search +from mythril.exceptions import CompilerError +from subprocess import Popen, PIPE +import json +import sys + +class CalldataMap: + + def __init__(self, abi, solc_v): + pass + + def __init__(self, solidity_file, solc_v): + pass + +def get_minimal_param_encoding_len(abi_inputs): + len = 0 + return len + +def get_minimal_byte_enc_len(abi_obj): + if type(abi_obj) == list: + return sum([head_byte_length_min(t) for t in abi_obj]) + sum([tail_byte_length_min(t) for t in abi_obj]) + if type(abi_obj) == str: + stype = abi_obj + else: + stype = abi_obj['type'] + if stype == 'tuple': + return get_minimal_byte_enc_len(abi_obj['components']) + try: + match = search(r'(?P
.*)\[(?P[0-9]+)\]', stype)
+        pre = match['pre']
+        post = match['post']
+        return int(post) * get_minimal_byte_enc_len(pre)
+    except (KeyError, TypeError) as e:
+        pass
+    if stype.endswith("[]"):
+        return 32
+
+    if stype == "string":
+        return 32
+    elif stype == "bytes":
+        return 32 # 2 was the smallest observed value, remix did not allow specification of zero size bytes
+    elif [stype.startswith(prefix_type) for prefix_type in ["int", "uint", "address", "bool", "fixed", "ufixed", "bytes"]]:
+        return 32
+
+def head_byte_length_min(abi_obj):
+    if is_dynamic(abi_obj):
+        return 32
+    else:
+        return get_minimal_byte_enc_len(abi_obj)
+
+
+def tail_byte_length_min(abi_obj):
+    if is_dynamic(abi_obj):
+        return get_minimal_byte_enc_len(abi_obj)
+    else:
+        return 0
+
+def get_minimal_wsize(abi_obj):
+    stype = abi_obj['type']
+    if type(stype) == list:
+        return sum(list(map(lambda a: get_minimal_wsize(a), stype)))
+    if stype in ["bytes", "string"] or stype.endswith("[]"):
+        return True
+    if stype == 'tuple':
+        return True in [is_dynamic(elem) for elem in abi_obj['components']]
+    try:
+        match = search(r'(?P
.*)(?P\[[0-9]+\])', stype)
+        pre = match['pre']
+        # post = match['post']
+        return is_dynamic(pre)
+    except (KeyError | TypeError):
+        pass
+    return False
+
+
+def get_minimal_constructor_param_encoding_len(abi):
+    for spec in abi:
+        try:
+            if spec['type'] == 'constructor':
+                con_inputs = spec['inputs']
+                return get_minimal_byte_enc_len(con_inputs)
+        except KeyError:
+            print("ABI does not contain inputs for constructor")
+    return -1
+
+def get_minimal_constr_param_byte_length(filename, contract_name=None):
+    abi_decoded = get_solc_abi_json(filename)
+    return get_minimal_constructor_param_encoding_len(abi_decoded)
+
+def is_dynamic(abi_obj):
+    if type(abi_obj) == list:
+        return True in list(map(lambda a: is_dynamic(a), abi_obj))
+    if type(abi_obj) == str:
+        stype = abi_obj
+    else:
+        stype = abi_obj['type']
+    if stype in ["bytes", "string"] or stype.endswith("[]"):
+        return True
+    if stype == 'tuple':
+        return True in [is_dynamic(elem) for elem in abi_obj['components']]
+    try:
+        match = search(r'(?P
.*)(?P\[[0-9]+\])', stype)
+        pre = match['pre']
+        # post = match['post']
+        return is_dynamic(pre)
+    except (KeyError, TypeError) as e:
+        pass
+    return False
+
+def get_solc_abi_json(file, solc_binary="solc", solc_args=None):
+
+    cmd = [solc_binary, "--abi", '--allow-paths', "."]
+
+    if solc_args:
+        cmd.extend(solc_args.split(" "))
+
+    cmd.append(file)
+
+    try:
+        p = Popen(cmd, stdout=PIPE, stderr=PIPE)
+
+        stdout, stderr = p.communicate()
+        ret = p.returncode
+
+        if ret != 0:
+            raise CompilerError("Solc experienced a fatal error (code %d).\n\n%s" % (ret, stderr.decode('UTF-8')))
+    except FileNotFoundError:
+        raise CompilerError("Compiler not found. Make sure that solc is installed and in PATH, or set the SOLC environment variable.")
+
+    out = stdout.decode("UTF-8")
+
+    if not len(out):
+        raise CompilerError("Compilation failed.")
+
+    out = out[out.index("["):]
+
+    print(out)
+
+    return json.loads(out)
+
+def abi_json_to_abi(abi_json):
+    return json.loads(abi_json)
+
+
+if __name__ == "__main__":
+    if len(sys.argv) > 1:
+        print("Size:")
+        print(get_minimal_constr_param_byte_length(sys.argv[1]))
+    else:
+        print("Error: No file specified")

From 06ddeb13dc3579a6fb1083834deb854dbaff8710 Mon Sep 17 00:00:00 2001
From: Konrad Weiss 
Date: Tue, 3 Jul 2018 23:31:32 +0200
Subject: [PATCH 14/17] Modifies the symbolic suffixes enclosing them into
 brackets and symplifying them with the z3 simplification algorithm

---
 mythril/laser/ethereum/svm.py | 35 ++++++++++++++++++++---------------
 1 file changed, 20 insertions(+), 15 deletions(-)

diff --git a/mythril/laser/ethereum/svm.py b/mythril/laser/ethereum/svm.py
index b6d722c9..a0896d32 100644
--- a/mythril/laser/ethereum/svm.py
+++ b/mythril/laser/ethereum/svm.py
@@ -396,7 +396,7 @@ class LaserEVM:
                     result = Concat(BitVecVal(0, 248), Extract(oft + 7, oft, s1))
                 except AttributeError:
                     logging.debug("BYTE: Unsupported symbolic byte offset")
-                    result = BitVec(str(simplify(s1)) + "_" + str(simplify(s0)), 256)
+                    result = BitVec(str(simplify(s1)) + "[" + str(simplify(s0)) + "]", 256)
 
                 state.stack.append(simplify(result))
 
@@ -442,7 +442,7 @@ class LaserEVM:
                 base, exponent = helper.pop_bitvec(state), helper.pop_bitvec(state)
 
                 if (type(base) != BitVecNumRef) or (type(exponent) != BitVecNumRef):
-                    state.stack.append(BitVec(str(base) + "_EXP_" + str(exponent), 256))
+                    state.stack.append(BitVec("(" + str(simplify(base)) + ")^(" + str(simplify(exponent)) + ")", 256))
                 elif (base.as_long() == 2):
                     if exponent.as_long() == 0:
                         state.stack.append(BitVecVal(1, 256))
@@ -535,11 +535,11 @@ class LaserEVM:
 
                 except AttributeError:
                     logging.debug("CALLDATALOAD: Unsupported symbolic index")
-                    state.stack.append(BitVec("calldata_" + str(environment.active_account.contract_name) + "_" + str(op0), 256))
+                    state.stack.append(BitVec("calldata_" + str(environment.active_account.contract_name) + "[" + str(simplify(op0)) + "]", 256))
                     continue
                 except IndexError:
                     logging.debug("Calldata not set, using symbolic variable instead")
-                    state.stack.append(BitVec("calldata_" + str(environment.active_account.contract_name) + "_" + str(op0), 256))
+                    state.stack.append(BitVec("calldata_" + str(environment.active_account.contract_name) + "[" + str(simplify(op0)) + "]", 256))
                     continue
 
                 if type(b) == int:
@@ -554,10 +554,10 @@ class LaserEVM:
                         state.stack.append(BitVecVal(int.from_bytes(val, byteorder='big'), 256))
 
                     except:
-                        state.stack.append(BitVec("calldata_" + str(environment.active_account.contract_name) + "_" + str(op0), 256))
+                        state.stack.append(BitVec("calldata_" + str(environment.active_account.contract_name) + "[" + str(simplify(op0)) + "]", 256))
                 else:
                     # symbolic variable
-                    state.stack.append(BitVec("calldata_" + str(environment.active_account.contract_name) + "_" + str(op0), 256))
+                    state.stack.append(BitVec("calldata_" + str(environment.active_account.contract_name) + "[" + str(simplify(op0)) + "]", 256))
 
             elif op == 'CALLDATASIZE':
 
@@ -575,20 +575,25 @@ class LaserEVM:
                     logging.debug("Unsupported symbolic memory offset in CALLDATACOPY")
                     continue
 
+                dstart_sym = False
                 try:
                     dstart = helper.get_concrete_int(op1)
+                    dstart_sym = True
                 except:
                     logging.debug("Unsupported symbolic calldata offset in CALLDATACOPY")
-                    state.mem_extend(mstart, 1)
-                    state.memory[mstart] = BitVec("calldata_" + str(environment.active_account.contract_name) + "_cpy", 256)
-                    continue
+                    dstart = simplify(op1)
 
+                size_sym = False
                 try:
                     size = helper.get_concrete_int(op2)
+                    size_sym = True
                 except:
                     logging.debug("Unsupported symbolic size in CALLDATACOPY")
+                    size = simplify(op2)
+
+                if dstart_sym or size_sym:
                     state.mem_extend(mstart, 1)
-                    state.memory[mstart] = BitVec("calldata_" + str(environment.active_account.contract_name) + "_" + str(dstart), 256)
+                    state.memory[mstart] = BitVec("calldata_" + str(environment.active_account.contract_name) + "[" + str(dstart) + ": + " + str(size) + "]", 256)
                     continue
 
                 if size > 0:
@@ -598,7 +603,7 @@ class LaserEVM:
                     except:
                         logging.debug("Memory allocation error: mstart = " + str(mstart) + ", size = " + str(size))
                         state.mem_extend(mstart, 1)
-                        state.memory[mstart] = BitVec("calldata_" + str(environment.active_account.contract_name) + "_" + str(dstart), 256)
+                        state.memory[mstart] = BitVec("calldata_" + str(environment.active_account.contract_name) + "[" + str(dstart) + ": + " + str(size) + "]", 256)
                         continue
 
                     try:
@@ -610,7 +615,7 @@ class LaserEVM:
                     except:
                         logging.debug("Exception copying calldata to memory")
 
-                        state.memory[mstart] = BitVec("calldata_" + str(environment.active_account.contract_name) + "_" + str(dstart), 256)
+                        state.memory[mstart] = BitVec("calldata_" + str(environment.active_account.contract_name) + "[" + str(dstart) + ": + " + str(size) + "]", 256)
 
             elif op == 'STOP':
                 if len(self.call_stack):
@@ -645,7 +650,7 @@ class LaserEVM:
 
                 except:
                     # Can't access symbolic memory offsets
-                    state.stack.append(BitVec("KECCAC_mem_" + str(op0) + ")", 256))
+                    state.stack.append(BitVec("KECCAC_mem[" + str(simplify(op0)) + "]", 256))
                     continue
 
                 try:
@@ -717,14 +722,14 @@ class LaserEVM:
                     offset = helper.get_concrete_int(op0)
                 except AttributeError:
                     logging.debug("Can't MLOAD from symbolic index")
-                    data = BitVec("mem_" + str(op0), 256)
+                    data = BitVec("mem[" + str(simplify(op0)) + "]", 256)
                     state.stack.append(data)
                     continue
 
                 try:
                     data = helper.concrete_int_from_bytes(state.memory, offset)
                 except IndexError:  # Memory slot not allocated
-                    data = BitVec("mem_" + str(offset), 256)
+                    data = BitVec("mem[" + str(offset)+"]", 256)
                 except TypeError:  # Symbolic memory
                     data = state.memory[offset]
 

From b1d112784164e946e64cd61911c7f701620bd1ee Mon Sep 17 00:00:00 2001
From: Konrad Weiss 
Date: Thu, 5 Jul 2018 10:13:45 +0200
Subject: [PATCH 15/17] Portyng symbolic name formating to the new svm and
 instructions file structure

---
 mythril/laser/ethereum/instructions.py | 47 +++++++++++++++-----------
 1 file changed, 27 insertions(+), 20 deletions(-)

diff --git a/mythril/laser/ethereum/instructions.py b/mythril/laser/ethereum/instructions.py
index 4d90da95..89844986 100644
--- a/mythril/laser/ethereum/instructions.py
+++ b/mythril/laser/ethereum/instructions.py
@@ -157,7 +157,7 @@ class Instruction:
             result = Concat(BitVecVal(0, 248), Extract(offset + 7, offset, op1))
         except AttributeError:
             logging.debug("BYTE: Unsupported symbolic byte offset")
-            result = BitVec(str(simplify(op1)) + "_" + str(simplify(op0)), 256)
+            result = BitVec(str(simplify(op1)) + "[" + str(simplify(op0)) + "]", 256)
 
         mstate.stack.append(simplify(result))
         return [global_state]
@@ -219,7 +219,7 @@ class Instruction:
         base, exponent = util.pop_bitvec(state), util.pop_bitvec(state)
 
         if (type(base) != BitVecNumRef) or (type(exponent) != BitVecNumRef):
-            state.stack.append(BitVec(str(base) + "_EXP_" + str(exponent), 256))
+            state.stack.append(BitVec("(" + str(simplify(base)) + ")^(" + str(simplify(exponent)) + ")", 256))
         elif base.as_long() == 2:
             if exponent.as_long() == 0:
                 state.stack.append(BitVecVal(1, 256))
@@ -330,13 +330,13 @@ class Instruction:
             b = environment.calldata[offset]
         except AttributeError:
             logging.debug("CALLDATALOAD: Unsupported symbolic index")
-            state.stack.append(
-                BitVec("calldata_" + str(environment.active_account.contract_name) + "_" + str(op0), 256))
+            state.stack.append(BitVec(
+                "calldata_" + str(environment.active_account.contract_name) + "[" + str(simplify(op0)) + "]", 256))
             return [global_state]
         except IndexError:
             logging.debug("Calldata not set, using symbolic variable instead")
-            state.stack.append(
-                BitVec("calldata_" + str(environment.active_account.contract_name) + "_" + str(op0), 256))
+            state.stack.append(BitVec(
+                "calldata_" + str(environment.active_account.contract_name) + "[" + str(simplify(op0)) + "]", 256))
             return [global_state]
 
         if type(b) == int:
@@ -350,12 +350,12 @@ class Instruction:
                 state.stack.append(BitVecVal(int.from_bytes(val, byteorder='big'), 256))
             # FIXME: broad exception catch
             except:
-                state.stack.append(
-                    BitVec("calldata_" + str(environment.active_account.contract_name) + "_" + str(op0), 256))
+                state.stack.append(BitVec(
+                    "calldata_" + str(environment.active_account.contract_name) + "[" + str(simplify(op0)) + "]", 256))
         else:
             # symbolic variable
-            state.stack.append(
-                BitVec("calldata_" + str(environment.active_account.contract_name) + "_" + str(op0), 256))
+            state.stack.append(BitVec(
+                "calldata_" + str(environment.active_account.contract_name) + "[" + str(simplify(op0)) + "]", 256))
 
         return [global_state]
 
@@ -382,24 +382,29 @@ class Instruction:
             logging.debug("Unsupported symbolic memory offset in CALLDATACOPY")
             return [global_state]
 
+        dstart_sym = False
         try:
             dstart = util.get_concrete_int(op1)
+            dstart_sym = True
             # FIXME: broad exception catch
         except:
             logging.debug("Unsupported symbolic calldata offset in CALLDATACOPY")
-            state.mem_extend(mstart, 1)
-            state.memory[mstart] = BitVec("calldata_" + str(environment.active_account.contract_name) + "_cpy",
-                                          256)
-            return [global_state]
+            dstart = simplify(op1)
 
+        size_sym = False
         try:
             size = util.get_concrete_int(op2)
+            size_sym = True
             # FIXME: broad exception catch
         except:
             logging.debug("Unsupported symbolic size in CALLDATACOPY")
+            size = simplify(op2)
+
+        if dstart_sym or size_sym:
             state.mem_extend(mstart, 1)
             state.memory[mstart] = BitVec(
-                "calldata_" + str(environment.active_account.contract_name) + "_" + str(dstart), 256)
+                "calldata_" + str(environment.active_account.contract_name) + "[" + str(dstart) + ": + " + str(
+                    size) + "]", 256)
             return [global_state]
 
         if size > 0:
@@ -410,7 +415,8 @@ class Instruction:
                 logging.debug("Memory allocation error: mstart = " + str(mstart) + ", size = " + str(size))
                 state.mem_extend(mstart, 1)
                 state.memory[mstart] = BitVec(
-                    "calldata_" + str(environment.active_account.contract_name) + "_" + str(dstart), 256)
+                    "calldata_" + str(environment.active_account.contract_name) + "[" + str(dstart) + ": + " + str(
+                        size) + "]", 256)
                 return [global_state]
 
             try:
@@ -423,7 +429,8 @@ class Instruction:
                 logging.debug("Exception copying calldata to memory")
 
                 state.memory[mstart] = BitVec(
-                    "calldata_" + str(environment.active_account.contract_name) + "_" + str(dstart), 256)
+                    "calldata_" + str(environment.active_account.contract_name) + "[" + str(dstart) + ": + " + str(
+                        size) + "]", 256)
         return [global_state]
 
     # Environment
@@ -474,7 +481,7 @@ class Instruction:
         # FIXME: broad exception catch
         except:
             # Can't access symbolic memory offsets
-            state.stack.append(BitVec("KECCAC_mem_" + str(op0) + ")", 256))
+            state.stack.append(BitVec("KECCAC_mem[" + str(simplify(op0)) + "]", 256))
             return [global_state]
 
         try:
@@ -590,14 +597,14 @@ class Instruction:
             offset = util.get_concrete_int(op0)
         except AttributeError:
             logging.debug("Can't MLOAD from symbolic index")
-            data = BitVec("mem_" + str(op0), 256)
+            data = BitVec("mem[" + str(simplify(op0)) + "]", 256)
             state.stack.append(data)
             return [global_state]
 
         try:
             data = util.concrete_int_from_bytes(state.memory, offset)
         except IndexError:  # Memory slot not allocated
-            data = BitVec("mem_" + str(offset), 256)
+            data = BitVec("mem[" + str(offset) + "]", 256)
         except TypeError:  # Symbolic memory
             data = state.memory[offset]
 

From ac818454ab9bee4b762c035103718e0e95ca39c3 Mon Sep 17 00:00:00 2001
From: Konrad Weiss 
Date: Mon, 9 Jul 2018 08:48:58 +0200
Subject: [PATCH 16/17] Correcting the symetric variable formatting for
 calldatacopy

---
 mythril/laser/ethereum/instructions.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/mythril/laser/ethereum/instructions.py b/mythril/laser/ethereum/instructions.py
index 89844986..1fbce31f 100644
--- a/mythril/laser/ethereum/instructions.py
+++ b/mythril/laser/ethereum/instructions.py
@@ -385,20 +385,20 @@ class Instruction:
         dstart_sym = False
         try:
             dstart = util.get_concrete_int(op1)
-            dstart_sym = True
             # FIXME: broad exception catch
         except:
             logging.debug("Unsupported symbolic calldata offset in CALLDATACOPY")
             dstart = simplify(op1)
+            dstart_sym = True
 
         size_sym = False
         try:
             size = util.get_concrete_int(op2)
-            size_sym = True
             # FIXME: broad exception catch
         except:
             logging.debug("Unsupported symbolic size in CALLDATACOPY")
             size = simplify(op2)
+            size_sym = True
 
         if dstart_sym or size_sym:
             state.mem_extend(mstart, 1)

From f873d041ee459d78d16a560a3e6dceae1728c071 Mon Sep 17 00:00:00 2001
From: Konrad Weiss 
Date: Tue, 10 Jul 2018 12:50:33 +0200
Subject: [PATCH 17/17] Revert "Merge branch 'master' of
 https://github.com/LoCorVin/mythril into feature/reformatsymindices"

This reverts commit a8de6b1a55d1e2cf12ae5710b6096df1db94b8f5, reversing
changes made to ac818454ab9bee4b762c035103718e0e95ca39c3.
---
 annotationWrapper.py                     |  69 --------
 mythril/analysis/modules/build_traces.py |  94 -----------
 mythril/analysis/modules/dummy.py        |  41 -----
 mythril/analysis/symbolic.py             |   9 +-
 mythril/ether/soliditycontract.py        |   3 -
 mythril/ether/util.py                    |   2 +-
 mythril/mythril.py                       |   8 -
 mythril/solidnotary/__init__.py          |   0
 mythril/solidnotary/abitypemapping.py    |   0
 mythril/solidnotary/annotation.py        |  28 ---
 mythril/solidnotary/calldata.py          | 150 -----------------
 mythril/solidnotary/solidnotary.py       |  72 --------
 mythril/solidnotary/transactiontrace.py  | 206 -----------------------
 mythril/solidnotary/z3utility.py         |  19 ---
 14 files changed, 3 insertions(+), 698 deletions(-)
 delete mode 100644 annotationWrapper.py
 delete mode 100644 mythril/analysis/modules/build_traces.py
 delete mode 100644 mythril/analysis/modules/dummy.py
 delete mode 100644 mythril/solidnotary/__init__.py
 delete mode 100644 mythril/solidnotary/abitypemapping.py
 delete mode 100644 mythril/solidnotary/annotation.py
 delete mode 100644 mythril/solidnotary/calldata.py
 delete mode 100644 mythril/solidnotary/solidnotary.py
 delete mode 100644 mythril/solidnotary/transactiontrace.py
 delete mode 100644 mythril/solidnotary/z3utility.py

diff --git a/annotationWrapper.py b/annotationWrapper.py
deleted file mode 100644
index 02337e84..00000000
--- a/annotationWrapper.py
+++ /dev/null
@@ -1,69 +0,0 @@
-""" 
-	Parses the files for annotations to extract the information and do some transformations on the code.
-	On return the Annotation information is used to change mythrils output regarding the changes code pieces
-
-"""
-
-from glob import glob
-import re, sys
-import json
-
-newlines = ["\r\n", "\r", "\n"]
-
-def find_all(a_str, sub):
-    start = 0
-    while True:
-        start = a_str.find(sub, start)
-        if start == -1:
-            return
-        yield start
-        start += len(sub)
-
-def count_elements(source, elements):
-    ret = 0
-    for element in elements:
-        ret += source.count(element)
-    return ret
-
-
-def replace_index(text, toReplace, replacement, index):
-    return text[:index] + replacement + text[(index + len(toReplace)):]
-
-"""
-    Here it might be better to split annotations into the containing constraint an the prefix and sufix
-"""
-def parse_annotation_info(filedata):
-    annotations = []
-    for inv in re.findall(r'//invariant\(([^\)]+)\)(\r\n|\r|\n)', filedata):
-        match_inv = "//invariant(" + inv[0] + ")"
-        for pos in find_all(filedata, match_inv + inv[1]):
-            line = count_elements(filedata[:pos], newlines) + 1
-            col = pos - max(map(lambda x: filedata[:pos].rfind(x), newlines))
-            annotations.append((pos, line, col, '//invariant(', inv[0], ")", inv[1]))
-    return set(annotations)
-
-
-def read_write_file(filename):
-    with open(filename, 'r') as file :
-        filedata = file.read()
-
-    annotations = parse_annotation_info(filedata)
-
-    annotations = sorted(list(annotations), key=lambda x: x[0], reverse=True)
-    for annotation in annotations:
-        filedata = replace_index(filedata, annotation[3] + annotation[4] + annotation[5] + annotation[6],  "assert("
-                                 + annotation[4] + ");" + annotation[6], annotation[0])
-    # Replace the target string
-    # filedata = filedata.replace('@ensure', '@invariant')
-    # filedata = filedata.replace('@invariant', '@ensure')
-
-    with open(filename, 'w') as file:
-       file.write(filedata)
-    return annotations
-
-annot_map = {}
-
-for sol_file in glob("./*.sol"):
-    annot_map[sol_file] = read_write_file(sol_file)
-json.dump(annot_map, sys.stdout)
-print("#end annotations#")
diff --git a/mythril/analysis/modules/build_traces.py b/mythril/analysis/modules/build_traces.py
deleted file mode 100644
index 76519410..00000000
--- a/mythril/analysis/modules/build_traces.py
+++ /dev/null
@@ -1,94 +0,0 @@
-from mythril.analysis.report import Issue
-from mythril.solidnotary.transactiontrace import TransactionTrace
-from mythril.disassembler.disassembly import Disassembly
-from laser.ethereum.svm import GlobalState, Account, Environment, MachineState, CalldataType
-from z3 import BitVec
-from mythril.analysis.symbolic import SymExecWrapper
-from mythril.solidnotary.solidnotary import get_transaction_traces, get_construction_traces
-from mythril.solidnotary.z3utility import  are_satisfiable
-import logging
-from mythril.solidnotary.calldata import get_minimal_constructor_param_encoding_len, abi_json_to_abi
-
-
-'''
-    Build execution traces from the statespace 
-'''
-
-def print_obj(obj):
-    print()
-    print(type(obj))
-    print(obj)
-    print(dir(obj))
-    print(obj.decl())
-    print(obj.params())
-    print(obj.children())
-    print()
-
-def get_constr_glbstate(contract, address):
-
-    mstate = MachineState(gas=10000000)
-
-    minimal_const_byte_len = get_minimal_constructor_param_encoding_len(abi_json_to_abi(contract.abi))
-
-    # better would be to append symbolic params to the bytecode such that the codecopy instruction that copies the
-    # params into memory takes care of placing them onto the memory with the respective size.
-    for i in range(int(minimal_const_byte_len / 32)):
-        mstate.mem_extend(128 + 32 * i, 32)
-        mstate.memory.insert(128 + 32 * i, BitVec('calldata_' + contract.name + '_' + str(i * 32), 256))
-
-    # Todo Replace pure placement of enough symbolic 32 Byte-words with placement of symbolic variables that contain
-    # the name of the solidity variables
-
-    accounts = {address: Account(address, contract.disassembly, contract_name=contract.name)}
-
-    environment = Environment(
-        accounts[address],
-        BitVec("caller", 256),
-        [],
-        BitVec("gasprice", 256),
-        BitVec("callvalue", 256),
-        BitVec("origin", 256),
-        calldata_type=CalldataType.SYMBOLIC,
-    )
-
-    # Todo find source for account info, maybe the std statespace?
-
-    return GlobalState(accounts, environment, mstate)
-
-
-def execute(statespace):
-
-    logging.debug("Executing module: Transaction End")
-
-    constructor_trace = {}
-
-    if hasattr(statespace, "sym_constr"):
-        sym_exe_tuple = statespace.sym_constr
-        glbstate = get_constr_glbstate(sym_exe_tuple[0], sym_exe_tuple[1])
-        sym_exe_tuple = statespace.sym_constr + (glbstate,)
-        constr_statespace = SymExecWrapper(*sym_exe_tuple)
-        constructor_trace = get_construction_traces(constr_statespace) # Todo the traces here should not contain references to storages anymore
-        for t in constructor_trace:
-            t.pp_trace()
-
-    traces = get_transaction_traces(statespace)
-    for trace in constructor_trace:
-        comp_trace_lvls = trace.apply_up_to_trace_levels(traces, 3)
-        for trace_lvl in comp_trace_lvls:
-            for t in trace_lvl:
-                    t.pp_trace()
-
-    # for trace in traces:
-    #    trace.pp_trace()
-
-    #print("==== Second level traces ====")
-    #for trace in traces:
-    #    comp_trace_lvls = trace.apply_up_to_trace_levels(traces, 1)
-        #for trace_lvl in range(len(comp_trace_lvls)):
-            # print("\nTrace level: " + str(trace_lvl))
-            #for comp_trace in comp_trace_lvls[trace_lvl]:
-            #    print(comp_trace.storage)
-                # for k, s in comp_trace.storage.items():
-                #    print_obj(s)
-
-    return []
diff --git a/mythril/analysis/modules/dummy.py b/mythril/analysis/modules/dummy.py
deleted file mode 100644
index 44b0879f..00000000
--- a/mythril/analysis/modules/dummy.py
+++ /dev/null
@@ -1,41 +0,0 @@
-from mythril.analysis.report import Issue
-import logging
-from z3 import *
-from mythril.solidnotary.z3utility import simplify_constraints, are_satisfiable
-
-
-'''
-    To print content of the statespace after it was build.
-'''
-
-def print_obj(obj):
-    print()
-    print(obj)
-    print(type(obj))
-    print(dir(obj))
-    print()
-
-
-def execute(statespace):
-
-    logging.debug("Executing module: Transaction End")
-
-    issues = []
-
-#    for k in statespace.nodes:
-#        node = statespace.nodes[k]
-
-#        for state in node.states:
-
-#            instruction = state.get_current_instruction()
-
-#            if(instruction['opcode'] == "STOP"):
-#                print()
-                #print(state.environment.active_account.storage)
-                # print(state.mstate.constraints)
-                #simpl_const = simplify_constraints(state.mstate.constraints)
-                #print(simpl_const)
-                #print(are_satisfiable(simpl_const))
-                # print("opc: {}, add: {} {}".format(instruction['opcode'], instruction['address'], instruction['argument'] if 'argument' in instruction else ""))
-
-    return issues
diff --git a/mythril/analysis/symbolic.py b/mythril/analysis/symbolic.py
index 975c52bf..5d47c356 100644
--- a/mythril/analysis/symbolic.py
+++ b/mythril/analysis/symbolic.py
@@ -11,7 +11,7 @@ class SymExecWrapper:
     Wrapper class for the LASER Symbolic virtual machine. Symbolically executes the code and does a bit of pre-analysis for convenience.
     '''
 
-    def __init__(self, contract, address, dynloader=None, max_depth=12, gblState=None):
+    def __init__(self, contract, address, dynloader=None, max_depth=12):
 
         account = svm.Account(address, contract.disassembly, contract_name=contract.name)
 
@@ -19,12 +19,7 @@ class SymExecWrapper:
 
         self.laser = svm.LaserEVM(self.accounts, dynamic_loader=dynloader, max_depth=max_depth)
 
-        if not (gblState is None):
-            # Adding the ability to specify a custom global state, e.g. machine configuration from outside
-            node = self.laser._sym_exec(gblState)
-            self.laser.nodes[node.uid] = node
-        else:
-            self.laser.sym_exec(address)
+        self.laser.sym_exec(address)
 
         self.nodes = self.laser.nodes
         self.edges = self.laser.edges
diff --git a/mythril/ether/soliditycontract.py b/mythril/ether/soliditycontract.py
index 95b863b5..919b3d30 100644
--- a/mythril/ether/soliditycontract.py
+++ b/mythril/ether/soliditycontract.py
@@ -51,7 +51,6 @@ class SolidityContract(ETHContract):
                 if filename == input_file and name == _name:
                     name = name
                     code = contract['bin-runtime']
-                    abi = contract['abi']
                     creation_code = contract['bin']
                     srcmap = contract['srcmap-runtime'].split(";")
                     has_contract = True
@@ -66,7 +65,6 @@ class SolidityContract(ETHContract):
                 if filename == input_file and len(contract['bin-runtime']):
                     name = name
                     code = contract['bin-runtime']
-                    abi = contract['abi']
                     creation_code = contract['bin']
                     srcmap = contract['srcmap-runtime'].split(";")
                     has_contract = True
@@ -91,7 +89,6 @@ class SolidityContract(ETHContract):
             lineno = self.solidity_files[idx].data[0:offset].count('\n') + 1
 
             self.mappings.append(SourceMapping(idx, offset, length, lineno))
-            self.abi = abi
 
         super().__init__(code, creation_code, name=name)
 
diff --git a/mythril/ether/util.py b/mythril/ether/util.py
index 467c37df..e01cae28 100644
--- a/mythril/ether/util.py
+++ b/mythril/ether/util.py
@@ -18,7 +18,7 @@ def safe_decode(hex_encoded_string):
 
 def get_solc_json(file, solc_binary="solc", solc_args=None):
 
-    cmd = [solc_binary, "--combined-json", "bin,bin-runtime,srcmap-runtime,abi", '--allow-paths', "."]
+    cmd = [solc_binary, "--combined-json", "bin,bin-runtime,srcmap-runtime", '--allow-paths', "."]
 
     if solc_args:
         cmd.extend(solc_args.split(" "))
diff --git a/mythril/mythril.py b/mythril/mythril.py
index e2bb6c37..b63cc137 100644
--- a/mythril/mythril.py
+++ b/mythril/mythril.py
@@ -16,8 +16,6 @@ from solc.exceptions import SolcError
 import solc
 from configparser import ConfigParser
 import platform
-from copy import deepcopy
-from mythril.disassembler.disassembly import Disassembly
 
 from mythril.ether import util
 from mythril.ether.ethcontract import ETHContract
@@ -344,12 +342,6 @@ class Mythril(object):
                                  dynloader=DynLoader(self.eth) if self.dynld else None,
                                  max_depth=max_depth)
 
-
-            contr_to_const = deepcopy(contract)
-            contr_to_const.disassembly = Disassembly(contr_to_const.creation_code)
-            contr_to_const.code = contr_to_const.creation_code
-            sym.sym_constr = (contr_to_const, address, DynLoader(self.eth) if self.dynld else None, max_depth)
-
             issues = fire_lasers(sym, modules)
 
             if type(contract) == SolidityContract:
diff --git a/mythril/solidnotary/__init__.py b/mythril/solidnotary/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/mythril/solidnotary/abitypemapping.py b/mythril/solidnotary/abitypemapping.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/mythril/solidnotary/annotation.py b/mythril/solidnotary/annotation.py
deleted file mode 100644
index cbf95ce1..00000000
--- a/mythril/solidnotary/annotation.py
+++ /dev/null
@@ -1,28 +0,0 @@
-from re import search
-
-class Annotation:
-
-    def __init__(self, annstring, lineno, fileoffset):
-        self.annstring = annstring
-
-        annotation = search(r'@(?P[^\{\}]*)(\{(?P.*)\})?', annstring)
-        if not annotation:
-            raise SyntaxError("{} is not a correct annotation".format(annstring))
-
-        self.aname = annotation['aname']
-        self.acontent = annotation['acontent']
-        self.lineno = lineno
-        self.length = len(annstring)
-        self.fileoffset = fileoffset
-
-class ContractAnnotation(Annotation):
-    pass
-
-class MemberAnnotation(Annotation):
-    pass
-
-class InlineAnnotation(Annotation):
-    pass
-
-class ContractAnnotation(Annotation):
-    pass
diff --git a/mythril/solidnotary/calldata.py b/mythril/solidnotary/calldata.py
deleted file mode 100644
index 27891271..00000000
--- a/mythril/solidnotary/calldata.py
+++ /dev/null
@@ -1,150 +0,0 @@
-from re import search
-from mythril.exceptions import CompilerError
-from subprocess import Popen, PIPE
-import json
-import sys
-
-class CalldataMap:
-
-    def __init__(self, abi, solc_v):
-        pass
-
-    def __init__(self, solidity_file, solc_v):
-        pass
-
-def get_minimal_param_encoding_len(abi_inputs):
-    len = 0
-    return len
-
-def get_minimal_byte_enc_len(abi_obj):
-    if type(abi_obj) == list:
-        return sum([head_byte_length_min(t) for t in abi_obj]) + sum([tail_byte_length_min(t) for t in abi_obj])
-    if type(abi_obj) == str:
-        stype = abi_obj
-    else:
-        stype = abi_obj['type']
-    if stype == 'tuple':
-        return get_minimal_byte_enc_len(abi_obj['components'])
-    try:
-        match = search(r'(?P
.*)\[(?P[0-9]+)\]', stype)
-        pre = match['pre']
-        post = match['post']
-        return int(post) * get_minimal_byte_enc_len(pre)
-    except (KeyError, TypeError) as e:
-        pass
-    if stype.endswith("[]"):
-        return 32
-
-    if stype == "string":
-        return 32
-    elif stype == "bytes":
-        return 32 # 2 was the smallest observed value, remix did not allow specification of zero size bytes
-    elif [stype.startswith(prefix_type) for prefix_type in ["int", "uint", "address", "bool", "fixed", "ufixed", "bytes"]]:
-        return 32
-
-def head_byte_length_min(abi_obj):
-    if is_dynamic(abi_obj):
-        return 32
-    else:
-        return get_minimal_byte_enc_len(abi_obj)
-
-
-def tail_byte_length_min(abi_obj):
-    if is_dynamic(abi_obj):
-        return get_minimal_byte_enc_len(abi_obj)
-    else:
-        return 0
-
-def get_minimal_wsize(abi_obj):
-    stype = abi_obj['type']
-    if type(stype) == list:
-        return sum(list(map(lambda a: get_minimal_wsize(a), stype)))
-    if stype in ["bytes", "string"] or stype.endswith("[]"):
-        return True
-    if stype == 'tuple':
-        return True in [is_dynamic(elem) for elem in abi_obj['components']]
-    try:
-        match = search(r'(?P
.*)(?P\[[0-9]+\])', stype)
-        pre = match['pre']
-        # post = match['post']
-        return is_dynamic(pre)
-    except (KeyError | TypeError):
-        pass
-    return False
-
-
-def get_minimal_constructor_param_encoding_len(abi):
-    for spec in abi:
-        try:
-            if spec['type'] == 'constructor':
-                con_inputs = spec['inputs']
-                return get_minimal_byte_enc_len(con_inputs)
-        except KeyError:
-            print("ABI does not contain inputs for constructor")
-    return -1
-
-def get_minimal_constr_param_byte_length(filename, contract_name=None):
-    abi_decoded = get_solc_abi_json(filename)
-    return get_minimal_constructor_param_encoding_len(abi_decoded)
-
-def is_dynamic(abi_obj):
-    if type(abi_obj) == list:
-        return True in list(map(lambda a: is_dynamic(a), abi_obj))
-    if type(abi_obj) == str:
-        stype = abi_obj
-    else:
-        stype = abi_obj['type']
-    if stype in ["bytes", "string"] or stype.endswith("[]"):
-        return True
-    if stype == 'tuple':
-        return True in [is_dynamic(elem) for elem in abi_obj['components']]
-    try:
-        match = search(r'(?P
.*)(?P\[[0-9]+\])', stype)
-        pre = match['pre']
-        # post = match['post']
-        return is_dynamic(pre)
-    except (KeyError, TypeError) as e:
-        pass
-    return False
-
-def get_solc_abi_json(file, solc_binary="solc", solc_args=None):
-
-    cmd = [solc_binary, "--abi", '--allow-paths', "."]
-
-    if solc_args:
-        cmd.extend(solc_args.split(" "))
-
-    cmd.append(file)
-
-    try:
-        p = Popen(cmd, stdout=PIPE, stderr=PIPE)
-
-        stdout, stderr = p.communicate()
-        ret = p.returncode
-
-        if ret != 0:
-            raise CompilerError("Solc experienced a fatal error (code %d).\n\n%s" % (ret, stderr.decode('UTF-8')))
-    except FileNotFoundError:
-        raise CompilerError("Compiler not found. Make sure that solc is installed and in PATH, or set the SOLC environment variable.")
-
-    out = stdout.decode("UTF-8")
-
-    if not len(out):
-        raise CompilerError("Compilation failed.")
-
-    out = out[out.index("["):]
-
-    print(out)
-
-    return json.loads(out)
-
-def abi_json_to_abi(abi_json):
-    return json.loads(abi_json)
-
-
-if __name__ == "__main__":
-    if len(sys.argv) > 1:
-        print("Size:")
-        print(get_minimal_constr_param_byte_length(sys.argv[1]))
-    else:
-        print("Error: No file specified")
diff --git a/mythril/solidnotary/solidnotary.py b/mythril/solidnotary/solidnotary.py
deleted file mode 100644
index 3a3091d2..00000000
--- a/mythril/solidnotary/solidnotary.py
+++ /dev/null
@@ -1,72 +0,0 @@
-import logging
-from mythril.solidnotary.transactiontrace import TransactionTrace
-from mythril.solidnotary.z3utility import are_satisfiable
-from laser.ethereum.svm import Environment, GlobalState, CalldataType
-from z3 import BitVec, simplify, is_false, is_bool, is_true, Solver, sat
-from copy import deepcopy
-
-
-class SolidNotary:
-
-    def __init__(self):
-        # Todo Parse Annotations and store them in an additional structure
-        # Todo receive a list of files or a file, these are modified for the analysis
-        pass
-
-    def notarize(self):
-        # Todo Instantiate an instance of Mythril, analyze and print the result
-        # Todo Find how they are storing results
-        pass
-
-def get_transaction_traces(statespace):
-    print("get_transaction_traces")
-
-    traces = []
-
-    for k in statespace.nodes:
-        node = statespace.nodes[k]
-        for state in node.states:
-            instruction = state.get_current_instruction()
-            # print("op: " + str(instruction['opcode']) + ((" " + instruction['argument']) if instruction['opcode'].startswith("PUSH") else "") + " stack: " + str(state.mstate.stack).replace("\n", "")+ " mem: " + str(state.mstate.memory).replace("\n", ""))
-            if instruction['opcode'] == "STOP":
-                if are_satisfiable(state.mstate.constraints):
-                    traces.append(TransactionTrace(state.environment.active_account.storage, state.mstate.constraints))
-    return traces
-
-def get_construction_traces(statespace):
-    print("get_constructor_traces")
-
-    traces = []
-
-    for k in statespace.nodes:
-        node = statespace.nodes[k]
-        for state in node.states:
-            instruction = state.get_current_instruction()
-
-            # print("op: " + str(instruction['opcode']) + ((" " + instruction['argument']) if instruction['opcode'].startswith("PUSH") else "") + " stack: " + str(state.mstate.stack).replace("\n", "")+ " mem: " + str(state.mstate.memory).replace("\n", ""))
-            if instruction['opcode'] == "RETURN":
-                if are_satisfiable(state.mstate.constraints):
-                    traces.append(TransactionTrace(state.environment.active_account.storage, state.mstate.constraints))
-    return traces
-
-def get_t_indexed_environment(active_account, index):
-
-        # Initialize the execution environment
-
-        environment = Environment(
-            active_account,
-            BitVec("caller_"+str(index), 256),
-            [],
-            BitVec("gasprice_"+str(index), 256),
-            BitVec("callvalue_"+str(index), 256),
-            BitVec("origin_"+str(index), 256),
-            calldata_type=CalldataType.SYMBOLIC,
-        )
-
-        return environment
-
-def get_t_indexed_globstate(active_account, index):
-    environment = get_t_indexed_environment(active_account, index)
-    # Todo is this just some set of preset accounts? How should we deal with it
-    return GlobalState(self.accounts, environment)
-
diff --git a/mythril/solidnotary/transactiontrace.py b/mythril/solidnotary/transactiontrace.py
deleted file mode 100644
index 44bcaca9..00000000
--- a/mythril/solidnotary/transactiontrace.py
+++ /dev/null
@@ -1,206 +0,0 @@
-from z3 import *
-from copy import deepcopy
-import re
-from mythril.solidnotary.z3utility import are_satisfiable, simplify_constraints
-
-"""
-    Returns whether or the specified symbolic string stands for a data value that can be different from transaction to 
-    transaction without the need of an intermediate call to the contract (e.g. a transaction params, blocknumber, ...)
-"""
-
-
-def is_t_variable(var):
-    var = str(var)
-    if (var.startswith("caller")
-        or var.startswith("gasprice")
-        or var.startswith("callvalue")
-        or var.startswith("origin")
-        or var.startswith("calldata_")
-        or var.startswith("calldatasize_")
-        or var.startswith("balance_at")
-        or var.startswith("KECCAC_mem_")
-        or var.startswith("keccac_")
-        or var.startswith("gasprice")
-        or var.startswith("extcodesize")
-        or var.startswith("returndatasize")
-        # or var.startswith(var, "blockhash_block_") should not change between transactions
-        or var.startswith("coinbase")
-        or var.startswith("timestamp")
-        or var.startswith("block_number")
-        or var.startswith("block_difficulty")
-        or var.startswith("block_gaslimit")
-        or var.startswith("mem_")
-        or var.startswith("msize")
-        or var.startswith("gas")
-        or var.startswith("retval_")
-        or var.startswith("keccac_")):
-        return True
-    else:
-        return False
-
-# Todo constructor mit den intentet initvalues versorgen
-
-
-def filter_for_t_variable_data(sym_vars):
-    return list(filter(lambda x: is_t_variable(x), sym_vars))
-
-class TransactionTrace:
-
-    def __init__(self, storage, constraints, lvl=1):
-        self.storage = deepcopy(storage) # Todo give all non storage symbolic values that can be different every transaction the number one
-        self.constraints = constraints # Todo eliminate all constraints that are not regarding the beginning of the transaction may not be necessary
-        # eliminate all constraints that only contain names not in the set of names from storage
-        self.constraints = simplify_constraints(self.constraints) # Todo simplification of the sum of constraints
-        self.tran_constraints = deepcopy(self.constraints) # Todo minimize them if they do not involve outside symb variables
-        self.lvl = lvl
-        self.sym_names = self.extract_sym_names_from_storage()
-        self.sym_names.extend(self.extract_sym_names_from_constraints())
-        if lvl == 1:
-            self.set_transaction_idx()
-
-        # Todo the constraints of a trace are probably also important here and have to be somehow aggregated
-        # Todo Identifiy addional trace information such as blocknumber and more
-
-    def __str__(self):
-        return str(self.as_dict())
-
-    def as_dict(self):
-
-        return {'lvl': self.lvl, 'storage': str(self.storage), 'constraints': str(self.constraints)}
-
-    def pp_trace(self):
-        print()
-        print("Trace lvl: {}".format(self.lvl))
-        print("Storage: {}".format({k: str(v).replace("\n", " ") for k, v in self.storage.items()}))
-        print("Constraints: {}".format(list(map(lambda x: str(x).replace("\n", " "), self.constraints))))
-        print()
-
-
-    def add_transaction_idx(self, offset):
-        new_names = []
-        for name in self.sym_names:
-            matched_name = re.search(r't([0-9]+)(_.*)', name)
-            num = int(matched_name.group(1)) + offset
-            new_names.append("t" + str(num) + matched_name.group(2))
-        repl_tup = list(zip(self.sym_names, new_names))
-
-        self.substitute_bv_names(repl_tup)
-
-        self.sym_names = new_names
-
-    def set_transaction_idx(self):
-        repl_tup = []
-        new_sym_names = []
-        for name in self.sym_names:
-            repl_tup.append((name, "t1_" + name))
-            new_sym_names.append("t1_" + name)
-        self.sym_names = new_sym_names
-        self.substitute_bv_names(repl_tup)
-
-    def substitute_bv_names(self, subs_tuple):
-        subs_tuples = list(map(lambda name_t: (BitVec(name_t[0], 256), BitVec(name_t[1], 256)), subs_tuple))
-        for s_num, slot in self.storage.items():
-            self.storage[s_num] = substitute(slot, subs_tuples)
-        for c_idx in range(len(self.constraints)):
-            self.constraints[c_idx] = substitute(self.constraints[c_idx], subs_tuples)
-
-    def extract_sym_names(self, obj):
-        if (not hasattr(obj, 'children') or len(obj.children()) == 0) and hasattr(obj, 'decl') :
-                return [str(obj.decl())]
-        else:
-            sym_vars = []
-            for c in obj.children():
-                sym_vars.extend(self.extract_sym_names(c))
-            return sym_vars
-
-    def extract_sym_names_from_constraints(self):
-        sym_names = []
-        for k,v in self.storage.items():
-            sym_names.extend(self.extract_sym_names(v))
-        return filter_for_t_variable_data(sym_names)
-
-    def extract_sym_names_from_storage(self):
-        sym_names = []
-        for v in self.constraints:
-            sym_names.extend(self.extract_sym_names(v))
-        return filter_for_t_variable_data(sym_names) # Todo Check whether here it is the right choice too, to filter ...
-
-    """
-          Either do only deep checing here and use the proper trace or storage_slot reduction in the apply function. Or do
-          both here.
-      """
-
-    def deep_equals(trace_lvl1, trace_lvl2):
-        return set(trace_lvl1) == set(trace_lvl2) # Todo Impelement an ACTUAL deep comparison
-
-    def simplify_storage(self):
-        for k,v in self.storage.items():
-            # Todo explore the arguments of this storage simplification in z3 to find ways to further simplify and to
-            # sort this expressions for equality comparison
-            self.storage[k] = simplify(v)
-
-    """
-        Applies the new trace tt on a possibly even changed trace self.
-    """
-    def apply_trace(self, tt):
-        if tt is None:
-            return self
-        new_trace = deepcopy(tt)
-        new_trace.add_transaction_idx(self.lvl)
-        subs_map = list(map(lambda x: (BitVec("storage_" + str(x[0]), 256), x[1]), self.storage.items()))
-        for k,v in new_trace.storage.items():
-            new_trace.storage[k] = substitute(v, subs_map)
-        for c_idx in range(len(new_trace.constraints)):
-            new_trace.constraints[c_idx] = substitute(new_trace.constraints[c_idx], subs_map)
-        new_trace.lvl += self.lvl
-        new_trace.sym_names.extend(deepcopy(self.sym_names))
-        # self can be omitted (e.g. when related storage locations were overwritten)
-        new_trace.simplify_storage()
-        new_trace.constraints = simplify_constraints(new_trace.constraints)
-        # Simplify constraints in there sum to eliminate subconstraints
-        if are_satisfiable(new_trace.constraints):
-            return new_trace
-        else:
-            return None
-
-    def apply_traces_parallel(self, traces):
-        combined_traces = []
-        for trace in traces:
-            combined_traces.append(self.apply_trace(trace))
-        return list(filter(lambda t: not t is None, combined_traces))
-
-    def apply_exact_trace_levels(self, traces, depth):
-        # Todo maybe some faster trace build not building one level at a time to e.g.
-        # Todo reach level 17 but build 2, then 4, then 8 and then 16 then 17
-        trace_lvl_n = [self]
-        for i in range(depth):
-            trace_lvl_np1 = []
-            for trace in trace_lvl_n:
-                trace_lvl_np1.extend(trace.apply_traces_parallel(traces))
-            if TransactionTrace.deep_equals(trace_lvl_np1, trace_lvl_n): # Fixpoint detected, function needs to ignore lists, dicts and objects.
-                return trace_lvl_n
-            trace_lvl_n = trace_lvl_np1
-        return trace_lvl_n
-
-    def apply_up_to_trace_levels(self, traces, depth):
-        traces_up_to = [[self]] # elements are trace_levels
-        for i in range(depth):
-            trace_lvl_np1 = []
-            for trace in traces_up_to[-1]:
-                trace_lvl_np1.extend(trace.apply_traces_parallel(traces))
-            for trace_lvl_i in traces_up_to:
-                # the following might be faster to check when using a content representing hash
-                if TransactionTrace.deep_equals(trace_lvl_np1, trace_lvl_i): # cycle in the traces of trace chains detected: levels
-                    # while repeat themselves, function needs to ignore lists, dicts and objects.
-                    return traces_up_to
-            traces_up_to.append(trace_lvl_np1)
-        return traces_up_to
-
-    # Todo Maybe implement a function that checks whether two traces are combinable before creating objekts, adv. in
-    # case they are not the object creation doe not have to be done. Investigate whether a suicide trace completely
-    # stopes the contract from being executable. In that case a suicided transaction also is not combinable with
-    # successive transactions.
-
-    # Todo write a function that allows to specify a function/invocable to explore the tracechain space in DFS manner
-
-
diff --git a/mythril/solidnotary/z3utility.py b/mythril/solidnotary/z3utility.py
deleted file mode 100644
index 50b80bac..00000000
--- a/mythril/solidnotary/z3utility.py
+++ /dev/null
@@ -1,19 +0,0 @@
-from z3 import Solver, sat, simplify, is_bool, is_true, is_false
-from copy import deepcopy
-
-def are_satisfiable(constraints):
-    s = Solver()
-    for c in constraints:
-        s.add(c)
-    return s.check() == sat
-
-def simplify_constraints(constraints): # Todo simplification of the sum of constraints
-    simp_const = []
-    for const in constraints:
-        simp_const.append(simplify(const))
-    simp_const = list(filter(lambda c: not is_bool(c) or not is_true(c), simp_const))
-    falses = list(filter(lambda c: is_bool(c) and is_false(c), simp_const))
-    if len(falses) > 0:
-        return [deepcopy(falses[0])]
-
-    return simp_const