Merge branch 'dev' into fix-check-erc-output

pull/1016/head
Josselin 3 years ago
commit 663107dbd4
  1. 2
      .github/workflows/black.yml
  2. 2
      .github/workflows/ci.yml
  3. 2
      .github/workflows/linter.yml
  4. 2
      .github/workflows/pylint.yml
  5. 2
      CONTRIBUTING.md
  6. 2
      README.md
  7. 2
      plugin_example/README.md
  8. 3
      pyproject.toml
  9. 5
      setup.py
  10. 14
      slither/__main__.py
  11. 10
      slither/analyses/data_dependency/data_dependency.py
  12. 2
      slither/core/compilation_unit.py
  13. 12
      slither/core/declarations/contract.py
  14. 14
      slither/core/declarations/function.py
  15. 2
      slither/core/declarations/structure.py
  16. 6
      slither/core/scope/scope.py
  17. 2
      slither/core/slither_core.py
  18. 2
      slither/core/solidity_types/array_type.py
  19. 2
      slither/core/solidity_types/elementary_type.py
  20. 30
      slither/detectors/abstract_detector.py
  21. 2
      slither/detectors/compiler_bugs/reused_base_constructor.py
  22. 2
      slither/detectors/statements/incorrect_strict_equality.py
  23. 4
      slither/detectors/statements/write_after_write.py
  24. 2
      slither/formatters/attributes/constant_pragma.py
  25. 6
      slither/printers/abstract_printer.py
  26. 4
      slither/printers/inheritance/inheritance_graph.py
  27. 6
      slither/printers/summary/evm.py
  28. 2
      slither/printers/summary/human_summary.py
  29. 4
      slither/slithir/convert.py
  30. 4
      slither/slithir/tmp_operations/argument.py
  31. 12
      slither/slithir/utils/ssa.py
  32. 4
      slither/solc_parsing/declarations/function.py
  33. 4
      slither/solc_parsing/slither_compilation_unit_solc.py
  34. 4
      slither/tools/flattening/export/export.py
  35. 2
      slither/tools/mutator/mutators/MIA.py
  36. 2
      slither/tools/mutator/mutators/MVIE.py
  37. 2
      slither/tools/mutator/mutators/MVIV.py
  38. 12
      slither/tools/mutator/mutators/abstract_mutator.py
  39. 2
      slither/tools/properties/utils.py
  40. 2
      slither/tools/similarity/__main__.py
  41. 2
      slither/tools/similarity/cache.py
  42. 4
      slither/tools/similarity/encode.py
  43. 4
      slither/tools/similarity/plot.py
  44. 2
      slither/tools/similarity/test.py
  45. 2
      slither/tools/similarity/train.py
  46. 4
      slither/tools/slither_format/__main__.py
  47. 8
      slither/tools/slither_format/slither_format.py
  48. 6
      slither/tools/upgradeability/__main__.py
  49. 30
      slither/tools/upgradeability/checks/abstract_checks.py
  50. 2
      slither/utils/command_line.py
  51. BIN
      tests/ast-parsing/compile/assembly-0.8.11-compact.zip
  52. BIN
      tests/ast-parsing/compile/assignment-0.8.11-compact.zip
  53. BIN
      tests/ast-parsing/compile/binaryoperation-0.8.11-compact.zip
  54. BIN
      tests/ast-parsing/compile/break-0.8.11-compact.zip
  55. BIN
      tests/ast-parsing/compile/call_to_variable-0.8.11-compact.zip
  56. BIN
      tests/ast-parsing/compile/comment-0.8.11-compact.zip
  57. BIN
      tests/ast-parsing/compile/conditional-0.8.11-compact.zip
  58. BIN
      tests/ast-parsing/compile/continue-0.8.11-compact.zip
  59. BIN
      tests/ast-parsing/compile/contract-0.8.11-compact.zip
  60. BIN
      tests/ast-parsing/compile/custom_error-0.8.11-compact.zip
  61. BIN
      tests/ast-parsing/compile/dowhile-0.8.11-compact.zip
  62. BIN
      tests/ast-parsing/compile/emit-0.8.11-compact.zip
  63. BIN
      tests/ast-parsing/compile/enum-0.8.11-compact.zip
  64. BIN
      tests/ast-parsing/compile/event-0.8.11-compact.zip
  65. BIN
      tests/ast-parsing/compile/for-0.8.11-compact.zip
  66. BIN
      tests/ast-parsing/compile/function-0.8.11-compact.zip
  67. BIN
      tests/ast-parsing/compile/functioncall-0.8.11-compact.zip
  68. BIN
      tests/ast-parsing/compile/if-0.8.11-compact.zip
  69. BIN
      tests/ast-parsing/compile/indexaccess-0.8.11-compact.zip
  70. BIN
      tests/ast-parsing/compile/indexrangeaccess-0.8.11-compact.zip
  71. BIN
      tests/ast-parsing/compile/library_implicit_conversion-0.8.11-compact.zip
  72. BIN
      tests/ast-parsing/compile/literal-0.8.11-compact.zip
  73. BIN
      tests/ast-parsing/compile/memberaccess-0.8.11-compact.zip
  74. BIN
      tests/ast-parsing/compile/minmax-0.8.11-compact.zip
  75. BIN
      tests/ast-parsing/compile/modifier-0.8.11-compact.zip
  76. BIN
      tests/ast-parsing/compile/newexpression-0.8.11-compact.zip
  77. BIN
      tests/ast-parsing/compile/pragma-0.8.11-compact.zip
  78. BIN
      tests/ast-parsing/compile/push-0.8.11-compact.zip
  79. BIN
      tests/ast-parsing/compile/return-0.8.11-compact.zip
  80. BIN
      tests/ast-parsing/compile/scope-0.8.11-compact.zip
  81. BIN
      tests/ast-parsing/compile/struct-0.8.11-compact.zip
  82. BIN
      tests/ast-parsing/compile/throw-0.8.11-compact.zip
  83. BIN
      tests/ast-parsing/compile/top-level-0.8.11-compact.zip
  84. BIN
      tests/ast-parsing/compile/top-level-import-0.8.11-compact.zip
  85. BIN
      tests/ast-parsing/compile/top-level-import-bis-0.8.11-compact.zip
  86. BIN
      tests/ast-parsing/compile/top-level-nested-import-0.8.11-compact.zip
  87. BIN
      tests/ast-parsing/compile/trycatch-0.8.11-compact.zip
  88. BIN
      tests/ast-parsing/compile/tupleexpression-0.8.11-compact.zip
  89. BIN
      tests/ast-parsing/compile/unaryexpression-0.8.11-compact.zip
  90. BIN
      tests/ast-parsing/compile/unchecked-0.8.11-compact.zip
  91. BIN
      tests/ast-parsing/compile/units_and_global_variables-0.8.11-compact.zip
  92. BIN
      tests/ast-parsing/compile/using-for-0.8.11-compact.zip
  93. BIN
      tests/ast-parsing/compile/variable-0.8.11-compact.zip
  94. BIN
      tests/ast-parsing/compile/variabledeclaration-0.8.11-compact.zip
  95. BIN
      tests/ast-parsing/compile/while-0.8.11-compact.zip
  96. BIN
      tests/ast-parsing/compile/yul-0.8.11-compact.zip
  97. 5
      tests/ast-parsing/expected/assembly-0.8.11-compact.json
  98. 5
      tests/ast-parsing/expected/assignment-0.8.11-compact.json
  99. 5
      tests/ast-parsing/expected/binaryoperation-0.8.11-compact.json
  100. 5
      tests/ast-parsing/expected/break-0.8.11-compact.json
  101. Some files were not shown because too many files have changed in this diff Show More

@ -36,7 +36,7 @@ jobs:
cp pyproject.toml .github/linters cp pyproject.toml .github/linters
- name: Black - name: Black
uses: docker://github/super-linter:v3 uses: docker://github/super-linter:v4
if: always() if: always()
env: env:
# run linter on everything to catch preexisting problems # run linter on everything to catch preexisting problems

@ -55,4 +55,4 @@ jobs:
TEST_TYPE: ${{ matrix.type }} TEST_TYPE: ${{ matrix.type }}
GITHUB_ETHERSCAN: ${{ secrets.GITHUB_ETHERSCAN }} GITHUB_ETHERSCAN: ${{ secrets.GITHUB_ETHERSCAN }}
run: | run: |
bash scripts/ci_test_${TEST_TYPE}.sh bash "scripts/ci_test_${TEST_TYPE}.sh"

@ -36,7 +36,7 @@ jobs:
cp pyproject.toml .github/linters cp pyproject.toml .github/linters
- name: Lint everything else - name: Lint everything else
uses: docker://github/super-linter:v3 uses: docker://github/super-linter:v4
if: always() if: always()
env: env:
# run linter on everything to catch preexisting problems # run linter on everything to catch preexisting problems

@ -36,7 +36,7 @@ jobs:
cp pyproject.toml .github/linters cp pyproject.toml .github/linters
- name: Pylint - name: Pylint
uses: docker://github/super-linter:v3 uses: docker://github/super-linter:v4
if: always() if: always()
env: env:
# run linter on everything to catch preexisting problems # run linter on everything to catch preexisting problems

@ -37,7 +37,7 @@ To run them locally in the root dir of the repository:
- `pylint slither tests --rcfile pyproject.toml` - `pylint slither tests --rcfile pyproject.toml`
- `black . --config pyproject.toml` - `black . --config pyproject.toml`
We use pylint `2.8.2` black `20.8b1`. We use pylint `2.12.2` black `21.10b0`.
### Detectors tests ### Detectors tests
For each new detector, at least one regression tests must be present. For each new detector, at least one regression tests must be present.

@ -177,7 +177,7 @@ git clone https://github.com/crytic/slither.git && cd slither
python3 setup.py install python3 setup.py install
``` ```
We recommend using an Python virtual environment, as detailed in the [Developer Installation Instructions](https://github.com/trailofbits/slither/wiki/Developer-installation), if you prefer to install Slither via git. We recommend using a Python virtual environment, as detailed in the [Developer Installation Instructions](https://github.com/trailofbits/slither/wiki/Developer-installation), if you prefer to install Slither via git.
### Using Docker ### Using Docker

@ -1,6 +1,6 @@
# Slither, Plugin Example # Slither, Plugin Example
This repo contains an example of plugin for Slither. This repository contains an example of plugin for Slither.
See the [detector documentation](https://github.com/trailofbits/slither/wiki/Adding-a-new-detector). See the [detector documentation](https://github.com/trailofbits/slither/wiki/Adding-a-new-detector).

@ -18,5 +18,6 @@ logging-fstring-interpolation,
logging-not-lazy, logging-not-lazy,
duplicate-code, duplicate-code,
import-error, import-error,
unsubscriptable-object unsubscriptable-object,
consider-using-f-string
""" """

@ -1,5 +1,8 @@
from setuptools import setup, find_packages from setuptools import setup, find_packages
with open("README.md", "r", encoding="utf-8") as f:
long_description = f.read()
setup( setup(
name="slither-analyzer", name="slither-analyzer",
description="Slither is a Solidity static analysis framework written in Python 3.", description="Slither is a Solidity static analysis framework written in Python 3.",
@ -16,7 +19,7 @@ setup(
], ],
# dependency_links=["git+https://github.com/crytic/crytic-compile.git@master#egg=crytic-compile"], # dependency_links=["git+https://github.com/crytic/crytic-compile.git@master#egg=crytic-compile"],
license="AGPL-3.0", license="AGPL-3.0",
long_description=open("README.md", "r", encoding="utf-8").read(), long_description=long_description,
entry_points={ entry_points={
"console_scripts": [ "console_scripts": [
"slither = slither.__main__:main", "slither = slither.__main__:main",

@ -173,13 +173,11 @@ def get_detectors_and_printers():
detector = None detector = None
if not all(issubclass(detector, AbstractDetector) for detector in plugin_detectors): if not all(issubclass(detector, AbstractDetector) for detector in plugin_detectors):
raise Exception( raise Exception(
"Error when loading plugin %s, %r is not a detector" % (entry_point, detector) f"Error when loading plugin {entry_point}, {detector} is not a detector"
) )
printer = None printer = None
if not all(issubclass(printer, AbstractPrinter) for printer in plugin_printers): if not all(issubclass(printer, AbstractPrinter) for printer in plugin_printers):
raise Exception( raise Exception(f"Error when loading plugin {entry_point}, {printer} is not a printer")
"Error when loading plugin %s, %r is not a printer" % (entry_point, printer)
)
# We convert those to lists in case someone returns a tuple # We convert those to lists in case someone returns a tuple
detectors += list(plugin_detectors) detectors += list(plugin_detectors)
@ -253,7 +251,7 @@ def choose_printers(args, all_printer_classes):
if printer in printers: if printer in printers:
printers_to_run.append(printers[printer]) printers_to_run.append(printers[printer])
else: else:
raise Exception("Error: {} is not a printer".format(printer)) raise Exception(f"Error: {printer} is not a printer")
return printers_to_run return printers_to_run
@ -303,7 +301,7 @@ def parse_args(detector_classes, printer_classes): # pylint: disable=too-many-s
group_detector.add_argument( group_detector.add_argument(
"--detect", "--detect",
help="Comma-separated list of detectors, defaults to all, " help="Comma-separated list of detectors, defaults to all, "
"available detectors: {}".format(", ".join(d.ARGUMENT for d in detector_classes)), f"available detectors: {', '.join(d.ARGUMENT for d in detector_classes)}",
action="store", action="store",
dest="detectors_to_run", dest="detectors_to_run",
default=defaults_flag_in_config["detectors_to_run"], default=defaults_flag_in_config["detectors_to_run"],
@ -312,7 +310,7 @@ def parse_args(detector_classes, printer_classes): # pylint: disable=too-many-s
group_printer.add_argument( group_printer.add_argument(
"--print", "--print",
help="Comma-separated list fo contract information printers, " help="Comma-separated list fo contract information printers, "
"available printers: {}".format(", ".join(d.ARGUMENT for d in printer_classes)), f"available printers: {', '.join(d.ARGUMENT for d in printer_classes)}",
action="store", action="store",
dest="printers_to_run", dest="printers_to_run",
default=defaults_flag_in_config["printers_to_run"], default=defaults_flag_in_config["printers_to_run"],
@ -657,7 +655,7 @@ def main_impl(all_detector_classes, all_printer_classes):
outputting_sarif = args.sarif is not None outputting_sarif = args.sarif is not None
outputting_sarif_stdout = args.sarif == "-" outputting_sarif_stdout = args.sarif == "-"
outputting_zip = args.zip is not None outputting_zip = args.zip is not None
if args.zip_type not in ZIP_TYPES_ACCEPTED.keys(): if args.zip_type not in ZIP_TYPES_ACCEPTED:
to_log = f'Zip type not accepted, it must be one of {",".join(ZIP_TYPES_ACCEPTED.keys())}' to_log = f'Zip type not accepted, it must be one of {",".join(ZIP_TYPES_ACCEPTED.keys())}'
logger.error(to_log) logger.error(to_log)

@ -284,8 +284,8 @@ def compute_dependency_contract(contract, compilation_unit: "SlitherCompilationU
if KEY_SSA in contract.context: if KEY_SSA in contract.context:
return return
contract.context[KEY_SSA] = dict() contract.context[KEY_SSA] = {}
contract.context[KEY_SSA_UNPROTECTED] = dict() contract.context[KEY_SSA_UNPROTECTED] = {}
for function in contract.functions + contract.modifiers: for function in contract.functions + contract.modifiers:
compute_dependency_function(function) compute_dependency_function(function)
@ -365,8 +365,8 @@ def compute_dependency_function(function):
if KEY_SSA in function.context: if KEY_SSA in function.context:
return return
function.context[KEY_SSA] = dict() function.context[KEY_SSA] = {}
function.context[KEY_SSA_UNPROTECTED] = dict() function.context[KEY_SSA_UNPROTECTED] = {}
is_protected = function.is_protected() is_protected = function.is_protected()
for node in function.nodes: for node in function.nodes:
@ -417,7 +417,7 @@ def convert_variable_to_non_ssa(v):
def convert_to_non_ssa(data_depencies): def convert_to_non_ssa(data_depencies):
# Need to create new set() as its changed during iteration # Need to create new set() as its changed during iteration
ret = dict() ret = {}
for (k, values) in data_depencies.items(): for (k, values) in data_depencies.items():
var = convert_variable_to_non_ssa(k) var = convert_variable_to_non_ssa(k)
if not var in ret: if not var in ret:

@ -61,7 +61,7 @@ class SlitherCompilationUnit(Context):
self.counter_slithir_temporary = 0 self.counter_slithir_temporary = 0
self.counter_slithir_reference = 0 self.counter_slithir_reference = 0
self.scopes: Dict[Filename, FileScope] = dict() self.scopes: Dict[Filename, FileScope] = {}
@property @property
def core(self) -> "SlitherCore": def core(self) -> "SlitherCore":

@ -1093,9 +1093,11 @@ class Contract(SourceMapping): # pylint: disable=too-many-public-methods
if initializable in self.inheritance: if initializable in self.inheritance:
self._is_upgradeable = True self._is_upgradeable = True
else: else:
for c in self.inheritance + [self]: for contract in self.inheritance + [self]:
# This might lead to false positive # This might lead to false positive
lower_name = c.name.lower() # Not sure why pylint is having a trouble here
# pylint: disable=no-member
lower_name = contract.name.lower()
if "upgradeable" in lower_name or "upgradable" in lower_name: if "upgradeable" in lower_name or "upgradable" in lower_name:
self._is_upgradeable = True self._is_upgradeable = True
break break
@ -1257,7 +1259,7 @@ class Contract(SourceMapping): # pylint: disable=too-many-public-methods
""" """
from slither.slithir.variables import StateIRVariable from slither.slithir.variables import StateIRVariable
all_ssa_state_variables_instances = dict() all_ssa_state_variables_instances = {}
for contract in self.inheritance: for contract in self.inheritance:
for v in contract.state_variables_declared: for v in contract.state_variables_declared:
@ -1275,8 +1277,8 @@ class Contract(SourceMapping): # pylint: disable=too-many-public-methods
func.generate_slithir_ssa(all_ssa_state_variables_instances) func.generate_slithir_ssa(all_ssa_state_variables_instances)
def fix_phi(self): def fix_phi(self):
last_state_variables_instances = dict() last_state_variables_instances = {}
initial_state_variables_instances = dict() initial_state_variables_instances = {}
for v in self._initial_state_variables: for v in self._initial_state_variables:
last_state_variables_instances[v.canonical_name] = [] last_state_variables_instances[v.canonical_name] = []
initial_state_variables_instances[v.canonical_name] = v initial_state_variables_instances[v.canonical_name] = v

@ -882,7 +882,7 @@ class Function(SourceMapping, metaclass=ABCMeta): # pylint: disable=too-many-pu
from slither.slithir.variables import Constant from slither.slithir.variables import Constant
if self._return_values is None: if self._return_values is None:
return_values = list() return_values = []
returns = [n for n in self.nodes if n.type == NodeType.RETURN] returns = [n for n in self.nodes if n.type == NodeType.RETURN]
[ # pylint: disable=expression-not-assigned [ # pylint: disable=expression-not-assigned
return_values.extend(ir.values) return_values.extend(ir.values)
@ -903,7 +903,7 @@ class Function(SourceMapping, metaclass=ABCMeta): # pylint: disable=too-many-pu
from slither.slithir.variables import Constant from slither.slithir.variables import Constant
if self._return_values_ssa is None: if self._return_values_ssa is None:
return_values_ssa = list() return_values_ssa = []
returns = [n for n in self.nodes if n.type == NodeType.RETURN] returns = [n for n in self.nodes if n.type == NodeType.RETURN]
[ # pylint: disable=expression-not-assigned [ # pylint: disable=expression-not-assigned
return_values_ssa.extend(ir.values) return_values_ssa.extend(ir.values)
@ -1599,16 +1599,16 @@ class Function(SourceMapping, metaclass=ABCMeta): # pylint: disable=too-many-pu
from slither.core.cfg.node import NodeType from slither.core.cfg.node import NodeType
if not self.is_implemented: if not self.is_implemented:
return dict() return {}
if self._entry_point is None: if self._entry_point is None:
return dict() return {}
# node, values # node, values
to_explore: List[Tuple["Node", Dict]] = [(self._entry_point, dict())] to_explore: List[Tuple["Node", Dict]] = [(self._entry_point, {})]
# node -> values # node -> values
explored: Dict = dict() explored: Dict = {}
# name -> instances # name -> instances
ret: Dict = dict() ret: Dict = {}
while to_explore: while to_explore:
node, values = to_explore[0] node, values = to_explore[0]

@ -12,7 +12,7 @@ class Structure(SourceMapping):
super().__init__() super().__init__()
self._name = None self._name = None
self._canonical_name = None self._canonical_name = None
self._elems: Dict[str, "StructureVariable"] = dict() self._elems: Dict[str, "StructureVariable"] = {}
# Name of the elements in the order of declaration # Name of the elements in the order of declaration
self._elems_ordered: List[str] = [] self._elems_ordered: List[str] = []
self.compilation_unit = compilation_unit self.compilation_unit = compilation_unit

@ -23,19 +23,19 @@ class FileScope:
self.filename = filename self.filename = filename
self.accessible_scopes: List[FileScope] = [] self.accessible_scopes: List[FileScope] = []
self.contracts: Dict[str, Contract] = dict() self.contracts: Dict[str, Contract] = {}
# Custom error are a list instead of a dict # Custom error are a list instead of a dict
# Because we parse the function signature later on # Because we parse the function signature later on
# So we simplify the logic and have the scope fields all populated # So we simplify the logic and have the scope fields all populated
self.custom_errors: Set[CustomErrorTopLevel] = set() self.custom_errors: Set[CustomErrorTopLevel] = set()
self.enums: Dict[str, EnumTopLevel] = dict() self.enums: Dict[str, EnumTopLevel] = {}
# Functions is a list instead of a dict # Functions is a list instead of a dict
# Because we parse the function signature later on # Because we parse the function signature later on
# So we simplify the logic and have the scope fields all populated # So we simplify the logic and have the scope fields all populated
self.functions: Set[FunctionTopLevel] = set() self.functions: Set[FunctionTopLevel] = set()
self.imports: Set[Import] = set() self.imports: Set[Import] = set()
self.pragmas: Set[Pragma] = set() self.pragmas: Set[Pragma] = set()
self.structures: Dict[str, StructureTopLevel] = dict() self.structures: Dict[str, StructureTopLevel] = {}
def add_accesible_scopes(self) -> bool: def add_accesible_scopes(self) -> bool:
""" """

@ -254,7 +254,7 @@ class SlitherCore(Context):
filename = self._previous_results_filename filename = self._previous_results_filename
try: try:
if os.path.isfile(filename): if os.path.isfile(filename):
with open(filename) as f: with open(filename, encoding="utf8") as f:
self._previous_results = json.load(f) self._previous_results = json.load(f)
if self._previous_results: if self._previous_results:
for r in self._previous_results: for r in self._previous_results:

@ -46,7 +46,7 @@ class ArrayType(Type):
def __str__(self): def __str__(self):
if self._length: if self._length:
return str(self._type) + "[{}]".format(str(self._length_value)) return str(self._type) + f"[{str(self._length_value)}]"
return str(self._type) + "[]" return str(self._type) + "[]"
def __eq__(self, other): def __eq__(self, other):

@ -194,7 +194,7 @@ class ElementaryType(Type):
@property @property
def storage_size(self) -> Tuple[int, bool]: def storage_size(self) -> Tuple[int, bool]:
if self._type == "string" or self._type == "bytes": if self._type in ["string", "bytes"]:
return 32, True return 32, True
if self.size is None: if self.size is None:
return 32, True return 32, True

@ -72,27 +72,27 @@ class AbstractDetector(metaclass=abc.ABCMeta):
if not self.HELP: if not self.HELP:
raise IncorrectDetectorInitialization( raise IncorrectDetectorInitialization(
"HELP is not initialized {}".format(self.__class__.__name__) f"HELP is not initialized {self.__class__.__name__}"
) )
if not self.ARGUMENT: if not self.ARGUMENT:
raise IncorrectDetectorInitialization( raise IncorrectDetectorInitialization(
"ARGUMENT is not initialized {}".format(self.__class__.__name__) f"ARGUMENT is not initialized {self.__class__.__name__}"
) )
if not self.WIKI: if not self.WIKI:
raise IncorrectDetectorInitialization( raise IncorrectDetectorInitialization(
"WIKI is not initialized {}".format(self.__class__.__name__) f"WIKI is not initialized {self.__class__.__name__}"
) )
if not self.WIKI_TITLE: if not self.WIKI_TITLE:
raise IncorrectDetectorInitialization( raise IncorrectDetectorInitialization(
"WIKI_TITLE is not initialized {}".format(self.__class__.__name__) f"WIKI_TITLE is not initialized {self.__class__.__name__}"
) )
if not self.WIKI_DESCRIPTION: if not self.WIKI_DESCRIPTION:
raise IncorrectDetectorInitialization( raise IncorrectDetectorInitialization(
"WIKI_DESCRIPTION is not initialized {}".format(self.__class__.__name__) f"WIKI_DESCRIPTION is not initialized {self.__class__.__name__}"
) )
if not self.WIKI_EXPLOIT_SCENARIO and self.IMPACT not in [ if not self.WIKI_EXPLOIT_SCENARIO and self.IMPACT not in [
@ -100,17 +100,17 @@ class AbstractDetector(metaclass=abc.ABCMeta):
DetectorClassification.OPTIMIZATION, DetectorClassification.OPTIMIZATION,
]: ]:
raise IncorrectDetectorInitialization( raise IncorrectDetectorInitialization(
"WIKI_EXPLOIT_SCENARIO is not initialized {}".format(self.__class__.__name__) f"WIKI_EXPLOIT_SCENARIO is not initialized {self.__class__.__name__}"
) )
if not self.WIKI_RECOMMENDATION: if not self.WIKI_RECOMMENDATION:
raise IncorrectDetectorInitialization( raise IncorrectDetectorInitialization(
"WIKI_RECOMMENDATION is not initialized {}".format(self.__class__.__name__) f"WIKI_RECOMMENDATION is not initialized {self.__class__.__name__}"
) )
if re.match("^[a-zA-Z0-9_-]*$", self.ARGUMENT) is None: if re.match("^[a-zA-Z0-9_-]*$", self.ARGUMENT) is None:
raise IncorrectDetectorInitialization( raise IncorrectDetectorInitialization(
"ARGUMENT has illegal character {}".format(self.__class__.__name__) f"ARGUMENT has illegal character {self.__class__.__name__}"
) )
if self.IMPACT not in [ if self.IMPACT not in [
@ -121,7 +121,7 @@ class AbstractDetector(metaclass=abc.ABCMeta):
DetectorClassification.OPTIMIZATION, DetectorClassification.OPTIMIZATION,
]: ]:
raise IncorrectDetectorInitialization( raise IncorrectDetectorInitialization(
"IMPACT is not initialized {}".format(self.__class__.__name__) f"IMPACT is not initialized {self.__class__.__name__}"
) )
if self.CONFIDENCE not in [ if self.CONFIDENCE not in [
@ -132,7 +132,7 @@ class AbstractDetector(metaclass=abc.ABCMeta):
DetectorClassification.OPTIMIZATION, DetectorClassification.OPTIMIZATION,
]: ]:
raise IncorrectDetectorInitialization( raise IncorrectDetectorInitialization(
"CONFIDENCE is not initialized {}".format(self.__class__.__name__) f"CONFIDENCE is not initialized {self.__class__.__name__}"
) )
def _log(self, info: str) -> None: def _log(self, info: str) -> None:
@ -160,7 +160,7 @@ class AbstractDetector(metaclass=abc.ABCMeta):
self._format(self.compilation_unit, result) self._format(self.compilation_unit, result)
if not "patches" in result: if not "patches" in result:
continue continue
result["patches_diff"] = dict() result["patches_diff"] = {}
for file in result["patches"]: for file in result["patches"]:
original_txt = self.compilation_unit.core.source_code[file].encode("utf8") original_txt = self.compilation_unit.core.source_code[file].encode("utf8")
patched_txt = original_txt patched_txt = original_txt
@ -189,9 +189,7 @@ class AbstractDetector(metaclass=abc.ABCMeta):
if results and self.slither.triage_mode: if results and self.slither.triage_mode:
while True: while True:
indexes = input( indexes = input(
'Results to hide during next runs: "0,1,...,{}" or "All" (enter to not hide results): '.format( f'Results to hide during next runs: "0,1,...,{len(results)}" or "All" (enter to not hide results): '
len(results)
)
) )
if indexes == "All": if indexes == "All":
self.slither.save_results_to_hide(results) self.slither.save_results_to_hide(results)
@ -243,7 +241,7 @@ class AbstractDetector(metaclass=abc.ABCMeta):
info = "\n" info = "\n"
for idx, result in enumerate(results): for idx, result in enumerate(results):
if self.slither.triage_mode: if self.slither.triage_mode:
info += "{}: ".format(idx) info += f"{idx}: "
info += result["description"] info += result["description"]
info += "Reference: {}".format(self.WIKI) info += f"Reference: {self.WIKI}"
self._log(info) self._log(info)

@ -77,7 +77,7 @@ The constructor of `A` is called multiple times in `D` and `E`:
:param contract: The contract to detect explicit calls to a base constructor with arguments to. :param contract: The contract to detect explicit calls to a base constructor with arguments to.
:return: Dictionary of function:list(tuple): { constructor : [(invoking_contract, called_by_constructor]} :return: Dictionary of function:list(tuple): { constructor : [(invoking_contract, called_by_constructor]}
""" """
results = dict() results = {}
# Create a set to track all completed contracts # Create a set to track all completed contracts
processed_contracts = set() processed_contracts = set()

@ -106,7 +106,7 @@ contract Crowdsale{
# Retrieve all tainted (node, function) pairs # Retrieve all tainted (node, function) pairs
def tainted_equality_nodes(self, funcs, taints): def tainted_equality_nodes(self, funcs, taints):
results = dict() results = {}
taints += self.sources_taint taints += self.sources_taint
for func in funcs: for func in funcs:

@ -88,7 +88,7 @@ def _detect_write_after_write(
_handle_ir(ir, written, ret) _handle_ir(ir, written, ret)
if len(node.sons) > 1: if len(node.sons) > 1:
written = dict() written = {}
for son in node.sons: for son in node.sons:
_detect_write_after_write(son, explored, dict(written), ret) _detect_write_after_write(son, explored, dict(written), ret)
@ -128,7 +128,7 @@ class WriteAfterWrite(AbstractDetector):
for function in contract.functions: for function in contract.functions:
if function.entry_point: if function.entry_point:
ret = [] ret = []
_detect_write_after_write(function.entry_point, set(), dict(), ret) _detect_write_after_write(function.entry_point, set(), {}, ret)
for var, node1, node2 in ret: for var, node1, node2 in ret:
info = [var, " is written in both\n\t", node1, "\n\t", node2, "\n"] info = [var, " is written in both\n\t", node1, "\n\t", node2, "\n"]

@ -34,7 +34,7 @@ def custom_format(slither, result):
def _analyse_versions(used_solc_versions): def _analyse_versions(used_solc_versions):
replace_solc_versions = list() replace_solc_versions = []
for version in used_solc_versions: for version in used_solc_versions:
replace_solc_versions.append(_determine_solc_version_replacement(version)) replace_solc_versions.append(_determine_solc_version_replacement(version))
if not all(version == replace_solc_versions[0] for version in replace_solc_versions): if not all(version == replace_solc_versions[0] for version in replace_solc_versions):

@ -21,17 +21,17 @@ class AbstractPrinter(metaclass=abc.ABCMeta):
if not self.HELP: if not self.HELP:
raise IncorrectPrinterInitialization( raise IncorrectPrinterInitialization(
"HELP is not initialized {}".format(self.__class__.__name__) f"HELP is not initialized {self.__class__.__name__}"
) )
if not self.ARGUMENT: if not self.ARGUMENT:
raise IncorrectPrinterInitialization( raise IncorrectPrinterInitialization(
"ARGUMENT is not initialized {}".format(self.__class__.__name__) f"ARGUMENT is not initialized {self.__class__.__name__}"
) )
if not self.WIKI: if not self.WIKI:
raise IncorrectPrinterInitialization( raise IncorrectPrinterInitialization(
"WIKI is not initialized {}".format(self.__class__.__name__) f"WIKI is not initialized {self.__class__.__name__}"
) )
def info(self, info): def info(self, info):

@ -102,10 +102,10 @@ class PrinterInheritanceGraph(AbstractPrinter):
if len(contract.immediate_inheritance) == 1: if len(contract.immediate_inheritance) == 1:
ret += "%s -> %s;\n" % (contract.name, contract.immediate_inheritance[0]) ret += "%s -> %s;\n" % (contract.name, contract.immediate_inheritance[0])
else: else:
for i in range(0, len(contract.immediate_inheritance)): for i, immediate_inheritance in enumerate(contract.immediate_inheritance):
ret += '%s -> %s [ label="%s" ];\n' % ( ret += '%s -> %s [ label="%s" ];\n' % (
contract.name, contract.name,
contract.immediate_inheritance[i], immediate_inheritance,
i + 1, i + 1,
) )

@ -57,6 +57,7 @@ def _extract_evm_info(slither):
return evm_info return evm_info
# pylint: disable=too-many-locals
class PrinterEVM(AbstractPrinter): class PrinterEVM(AbstractPrinter):
ARGUMENT = "evm" ARGUMENT = "evm"
HELP = "Print the evm instructions of nodes in functions" HELP = "Print the evm instructions of nodes in functions"
@ -84,9 +85,8 @@ class PrinterEVM(AbstractPrinter):
contract_file = self.slither.source_code[ contract_file = self.slither.source_code[
contract.source_mapping["filename_absolute"] contract.source_mapping["filename_absolute"]
].encode("utf-8") ].encode("utf-8")
contract_file_lines = open( with open(contract.source_mapping["filename_absolute"], "r", encoding="utf8") as f:
contract.source_mapping["filename_absolute"], "r" contract_file_lines = f.readlines()
).readlines()
contract_pcs = {} contract_pcs = {}
contract_cfg = {} contract_cfg = {}

@ -308,7 +308,7 @@ class PrinterHumanSummary(AbstractPrinter):
"number_lines_assembly": 0, "number_lines_assembly": 0,
"standard_libraries": [], "standard_libraries": [],
"ercs": [], "ercs": [],
"number_findings": dict(), "number_findings": {},
"detectors": [], "detectors": [],
} }

@ -490,7 +490,7 @@ def propagate_types(ir, node: "Node"): # pylint: disable=too-many-locals
# propagate the type # propagate the type
node_function = node.function node_function = node.function
using_for = ( using_for = (
node_function.contract.using_for if isinstance(node_function, FunctionContract) else dict() node_function.contract.using_for if isinstance(node_function, FunctionContract) else {}
) )
if isinstance(ir, OperationWithLValue): if isinstance(ir, OperationWithLValue):
# Force assignment in case of missing previous correct type # Force assignment in case of missing previous correct type
@ -853,7 +853,7 @@ def extract_tmp_call(ins: TmpCall, contract: Optional[Contract]): # pylint: dis
# } # }
node_func = ins.node.function node_func = ins.node.function
using_for = ( using_for = (
node_func.contract.using_for if isinstance(node_func, FunctionContract) else dict() node_func.contract.using_for if isinstance(node_func, FunctionContract) else {}
) )
targeted_libraries = ( targeted_libraries = (

@ -42,5 +42,5 @@ class Argument(Operation):
def __str__(self): def __str__(self):
call_id = "none" call_id = "none"
if self.call_id: if self.call_id:
call_id = "(id ({}))".format(self.call_id) call_id = f"(id ({self.call_id}))"
return "ARG_{} {} {}".format(self._type.name, str(self._argument), call_id) return f"ARG_{self._type.name} {str(self._argument)} {call_id}"

@ -109,7 +109,7 @@ def add_ssa_ir(function, all_state_variables_instances):
if not function.is_implemented: if not function.is_implemented:
return return
init_definition = dict() init_definition = {}
for v in function.parameters: for v in function.parameters:
if v.name: if v.name:
init_definition[v.name] = (v, function.entry_point) init_definition[v.name] = (v, function.entry_point)
@ -127,7 +127,7 @@ def add_ssa_ir(function, all_state_variables_instances):
# rvalues are fixed in solc_parsing.declaration.function # rvalues are fixed in solc_parsing.declaration.function
function.entry_point.add_ssa_ir(Phi(StateIRVariable(variable_instance), set())) function.entry_point.add_ssa_ir(Phi(StateIRVariable(variable_instance), set()))
add_phi_origins(function.entry_point, init_definition, dict()) add_phi_origins(function.entry_point, init_definition, {})
for node in function.nodes: for node in function.nodes:
for (variable, nodes) in node.phi_origins_local_variables.values(): for (variable, nodes) in node.phi_origins_local_variables.values():
@ -143,7 +143,7 @@ def add_ssa_ir(function, all_state_variables_instances):
# continue # continue
node.add_ssa_ir(Phi(StateIRVariable(variable), nodes)) node.add_ssa_ir(Phi(StateIRVariable(variable), nodes))
init_local_variables_instances = dict() init_local_variables_instances = {}
for v in function.parameters: for v in function.parameters:
if v.name: if v.name:
new_var = LocalIRVariable(v) new_var = LocalIRVariable(v)
@ -232,9 +232,9 @@ def generate_ssa_irs(
# these variables are lived only during the liveness of the block # these variables are lived only during the liveness of the block
# They dont need phi function # They dont need phi function
temporary_variables_instances = dict() temporary_variables_instances = {}
reference_variables_instances = dict() reference_variables_instances = {}
tuple_variables_instances = dict() tuple_variables_instances = {}
for ir in node.irs: for ir in node.irs:
new_ir = copy_ir( new_ir = copy_ir(

@ -83,8 +83,8 @@ class FunctionSolc(CallerContextExpression):
self._analyze_type() self._analyze_type()
self._node_to_nodesolc: Dict[Node, NodeSolc] = dict() self._node_to_nodesolc: Dict[Node, NodeSolc] = {}
self._node_to_yulobject: Dict[Node, YulBlock] = dict() self._node_to_yulobject: Dict[Node, YulBlock] = {}
self._local_variables_parser: List[ self._local_variables_parser: List[
Union[LocalVariableSolc, LocalVariableInitFromTupleSolc] Union[LocalVariableSolc, LocalVariableInitFromTupleSolc]

@ -37,7 +37,7 @@ class SlitherCompilationUnitSolc:
self._parsed = False self._parsed = False
self._analyzed = False self._analyzed = False
self._underlying_contract_to_parser: Dict[Contract, ContractSolc] = dict() self._underlying_contract_to_parser: Dict[Contract, ContractSolc] = {}
self._structures_top_level_parser: List[StructureTopLevelSolc] = [] self._structures_top_level_parser: List[StructureTopLevelSolc] = []
self._custom_error_parser: List[CustomErrorSolc] = [] self._custom_error_parser: List[CustomErrorSolc] = []
self._variables_top_level_parser: List[TopLevelVariableSolc] = [] self._variables_top_level_parser: List[TopLevelVariableSolc] = []
@ -636,7 +636,7 @@ Please rename it, this name is reserved for Slither's internals"""
for func in self._compilation_unit.functions_top_level: for func in self._compilation_unit.functions_top_level:
func.generate_slithir_and_analyze() func.generate_slithir_and_analyze()
func.generate_slithir_ssa(dict()) func.generate_slithir_ssa({})
self._compilation_unit.propagate_function_calls() self._compilation_unit.propagate_function_calls()
for contract in self._compilation_unit.contracts: for contract in self._compilation_unit.contracts:
contract.fix_phi() contract.fix_phi()

@ -37,7 +37,7 @@ def save_to_disk(files: List[Export]):
Save projects to a zip Save projects to a zip
""" """
for file in files: for file in files:
with open(file.filename, "w") as f: with open(file.filename, "w", encoding="utf8") as f:
logger.info(f"Export {file.filename}") logger.info(f"Export {file.filename}")
f.write(file.content) f.write(file.content)
@ -53,5 +53,5 @@ def export_as_json(files: List[Export], filename: str):
print(json.dumps(files_as_dict)) print(json.dumps(files_as_dict))
else: else:
logger.info(f"Export {filename}") logger.info(f"Export {filename}")
with open(filename, "w") as f: with open(filename, "w", encoding="utf8") as f:
json.dump(files_as_dict, f) json.dump(files_as_dict, f)

@ -11,7 +11,7 @@ class MIA(AbstractMutator): # pylint: disable=too-few-public-methods
def _mutate(self): def _mutate(self):
result = dict() result = {}
for contract in self.slither.contracts: for contract in self.slither.contracts:

@ -11,7 +11,7 @@ class MVIE(AbstractMutator): # pylint: disable=too-few-public-methods
def _mutate(self): def _mutate(self):
result = dict() result = {}
for contract in self.slither.contracts: for contract in self.slither.contracts:

@ -11,7 +11,7 @@ class MVIV(AbstractMutator): # pylint: disable=too-few-public-methods
def _mutate(self): def _mutate(self):
result = dict() result = {}
for contract in self.slither.contracts: for contract in self.slither.contracts:

@ -41,33 +41,33 @@ class AbstractMutator(metaclass=abc.ABCMeta): # pylint: disable=too-few-public-
if not self.NAME: if not self.NAME:
raise IncorrectMutatorInitialization( raise IncorrectMutatorInitialization(
"NAME is not initialized {}".format(self.__class__.__name__) f"NAME is not initialized {self.__class__.__name__}"
) )
if not self.HELP: if not self.HELP:
raise IncorrectMutatorInitialization( raise IncorrectMutatorInitialization(
"HELP is not initialized {}".format(self.__class__.__name__) f"HELP is not initialized {self.__class__.__name__}"
) )
if self.FAULTCLASS == FaulClass.Undefined: if self.FAULTCLASS == FaulClass.Undefined:
raise IncorrectMutatorInitialization( raise IncorrectMutatorInitialization(
"FAULTCLASS is not initialized {}".format(self.__class__.__name__) f"FAULTCLASS is not initialized {self.__class__.__name__}"
) )
if self.FAULTNATURE == FaultNature.Undefined: if self.FAULTNATURE == FaultNature.Undefined:
raise IncorrectMutatorInitialization( raise IncorrectMutatorInitialization(
"FAULTNATURE is not initialized {}".format(self.__class__.__name__) f"FAULTNATURE is not initialized {self.__class__.__name__}"
) )
if rate < 0 or rate > 100: if rate < 0 or rate > 100:
raise IncorrectMutatorInitialization( raise IncorrectMutatorInitialization(
"rate must be between 0 and 100 {}".format(self.__class__.__name__) f"rate must be between 0 and 100 {self.__class__.__name__}"
) )
@abc.abstractmethod @abc.abstractmethod
def _mutate(self) -> Dict: def _mutate(self) -> Dict:
"""TODO Documentation""" """TODO Documentation"""
return dict() return {}
def mutate(self) -> None: def mutate(self) -> None:
all_patches = self._mutate() all_patches = self._mutate()

@ -32,5 +32,5 @@ def write_file(
logger.info(yellow(f"Overwrite {file_to_write}")) logger.info(yellow(f"Overwrite {file_to_write}"))
else: else:
logger.info(green(f"Write {file_to_write}")) logger.info(green(f"Write {file_to_write}"))
with open(file_to_write, "w") as f: with open(file_to_write, "w", encoding="utf8") as f:
f.write(content) f.write(content)

@ -95,7 +95,7 @@ def main():
elif mode == "plot": elif mode == "plot":
plot(args) plot(args)
else: else:
to_log = "Invalid mode!. It should be one of these: %s" % ", ".join(modes) to_log = f"Invalid mode!. It should be one of these: {', '.join(modes)}"
logger.error(to_log) logger.error(to_log)
sys.exit(-1) sys.exit(-1)

@ -9,7 +9,7 @@ except ImportError:
def load_cache(infile, nsamples=None): def load_cache(infile, nsamples=None):
cache = dict() cache = {}
with np.load(infile, allow_pickle=True) as data: with np.load(infile, allow_pickle=True) as data:
array = data["arr_0"][0] array = data["arr_0"][0]
for i, (x, y) in enumerate(array): for i, (x, y) in enumerate(array):

@ -75,7 +75,7 @@ def parse_target(target):
def load_and_encode(infile, vmodel, ext=None, nsamples=None, **kwargs): def load_and_encode(infile, vmodel, ext=None, nsamples=None, **kwargs):
r = dict() r = {}
if infile.endswith(".npz"): if infile.endswith(".npz"):
r = load_cache(infile, nsamples=nsamples) r = load_cache(infile, nsamples=nsamples)
else: else:
@ -222,7 +222,7 @@ def encode_ir(ir): # pylint: disable=too-many-branches
def encode_contract(cfilename, **kwargs): def encode_contract(cfilename, **kwargs):
r = dict() r = {}
# Init slither # Init slither
try: try:

@ -50,8 +50,8 @@ def plot(args): # pylint: disable=too-many-locals
logger.info("Loading data..") logger.info("Loading data..")
cache = load_and_encode(infile, **vars(args)) cache = load_and_encode(infile, **vars(args))
data = list() data = []
fs = list() fs = []
logger.info("Procesing data..") logger.info("Procesing data..")
for (f, c, n), y in cache.items(): for (f, c, n), y in cache.items():

@ -34,7 +34,7 @@ def test(args):
cache = load_and_encode(infile, model, **vars(args)) cache = load_and_encode(infile, model, **vars(args))
# save_cache("cache.npz", cache) # save_cache("cache.npz", cache)
r = dict() r = {}
for x, y in cache.items(): for x, y in cache.items():
r[x] = similarity(fvector, y) r[x] = similarity(fvector, y)

@ -24,7 +24,7 @@ def train(args): # pylint: disable=too-many-locals
contracts = load_contracts(dirname, **vars(args)) contracts = load_contracts(dirname, **vars(args))
logger.info("Saving extracted data into %s", last_data_train_filename) logger.info("Saving extracted data into %s", last_data_train_filename)
cache = [] cache = []
with open(last_data_train_filename, "w") as f: with open(last_data_train_filename, "w", encoding="utf8") as f:
for filename in contracts: for filename in contracts:
# cache[filename] = dict() # cache[filename] = dict()
for (filename_inner, contract, function), ir in encode_contract( for (filename_inner, contract, function), ir in encode_contract(

@ -66,7 +66,7 @@ def parse_args():
group_detector.add_argument( group_detector.add_argument(
"--detect", "--detect",
help="Comma-separated list of detectors, defaults to all, " help="Comma-separated list of detectors, defaults to all, "
"available detectors: {}".format(", ".join(d for d in available_detectors)), f"available detectors: {', '.join(d for d in available_detectors)}",
action="store", action="store",
dest="detectors_to_run", dest="detectors_to_run",
default="all", default="all",
@ -75,7 +75,7 @@ def parse_args():
group_detector.add_argument( group_detector.add_argument(
"--exclude", "--exclude",
help="Comma-separated list of detectors to exclude," help="Comma-separated list of detectors to exclude,"
"available detectors: {}".format(", ".join(d for d in available_detectors)), "available detectors: {', '.join(d for d in available_detectors)}",
action="store", action="store",
dest="detectors_to_exclude", dest="detectors_to_exclude",
default="all", default="all",

@ -72,7 +72,7 @@ def slither_format(slither, **kwargs): # pylint: disable=too-many-locals
filename = f"fix_{counter}.patch" filename = f"fix_{counter}.patch"
path = Path(export_result, filename) path = Path(export_result, filename)
logger.info(f"\t- {filename}") logger.info(f"\t- {filename}")
with open(path, "w") as f: with open(path, "w", encoding="utf8") as f:
f.write(diff) f.write(diff)
counter += 1 counter += 1
@ -90,10 +90,10 @@ def choose_detectors(detectors_to_run, detectors_to_exclude):
cls_detectors_to_run = [] cls_detectors_to_run = []
exclude = detectors_to_exclude.split(",") exclude = detectors_to_exclude.split(",")
if detectors_to_run == "all": if detectors_to_run == "all":
for d in all_detectors: for key, detector in all_detectors.items():
if d in exclude: if key in exclude:
continue continue
cls_detectors_to_run.append(all_detectors[d]) cls_detectors_to_run.append(detector)
else: else:
exclude = detectors_to_exclude.split(",") exclude = detectors_to_exclude.split(",")
for d in detectors_to_run.split(","): for d in detectors_to_run.split(","):

@ -208,7 +208,7 @@ def main():
proxy_contracts = proxy.get_contract_from_name(args.proxy_name) proxy_contracts = proxy.get_contract_from_name(args.proxy_name)
if len(proxy_contracts) != 1: if len(proxy_contracts) != 1:
info = "Proxy {} not found in {}".format(args.proxy_name, proxy.filename) info = f"Proxy {args.proxy_name} not found in {proxy.filename}"
logger.error(red(info)) logger.error(red(info))
if args.json: if args.json:
output_to_json(args.json, str(info), json_results) output_to_json(args.json, str(info), json_results)
@ -230,8 +230,8 @@ def main():
v2_contracts = variable2.get_contract_from_name(args.new_contract_name) v2_contracts = variable2.get_contract_from_name(args.new_contract_name)
if len(v2_contracts) != 1: if len(v2_contracts) != 1:
info = "New logic contract {} not found in {}".format( info = (
args.new_contract_name, variable2.filename f"New logic contract {args.new_contract_name} not found in {variable2.filename}"
) )
logger.error(red(info)) logger.error(red(info))
if args.json: if args.json:

@ -64,48 +64,40 @@ class AbstractCheck(metaclass=abc.ABCMeta):
self.contract_v2 = contract_v2 self.contract_v2 = contract_v2
if not self.ARGUMENT: if not self.ARGUMENT:
raise IncorrectCheckInitialization( raise IncorrectCheckInitialization(f"NAME is not initialized {self.__class__.__name__}")
"NAME is not initialized {}".format(self.__class__.__name__)
)
if not self.HELP: if not self.HELP:
raise IncorrectCheckInitialization( raise IncorrectCheckInitialization(f"HELP is not initialized {self.__class__.__name__}")
"HELP is not initialized {}".format(self.__class__.__name__)
)
if not self.WIKI: if not self.WIKI:
raise IncorrectCheckInitialization( raise IncorrectCheckInitialization(f"WIKI is not initialized {self.__class__.__name__}")
"WIKI is not initialized {}".format(self.__class__.__name__)
)
if not self.WIKI_TITLE: if not self.WIKI_TITLE:
raise IncorrectCheckInitialization( raise IncorrectCheckInitialization(
"WIKI_TITLE is not initialized {}".format(self.__class__.__name__) f"WIKI_TITLE is not initialized {self.__class__.__name__}"
) )
if not self.WIKI_DESCRIPTION: if not self.WIKI_DESCRIPTION:
raise IncorrectCheckInitialization( raise IncorrectCheckInitialization(
"WIKI_DESCRIPTION is not initialized {}".format(self.__class__.__name__) f"WIKI_DESCRIPTION is not initialized {self.__class__.__name__}"
) )
if not self.WIKI_EXPLOIT_SCENARIO and self.IMPACT not in [ if not self.WIKI_EXPLOIT_SCENARIO and self.IMPACT not in [
CheckClassification.INFORMATIONAL CheckClassification.INFORMATIONAL
]: ]:
raise IncorrectCheckInitialization( raise IncorrectCheckInitialization(
"WIKI_EXPLOIT_SCENARIO is not initialized {}".format(self.__class__.__name__) f"WIKI_EXPLOIT_SCENARIO is not initialized {self.__class__.__name__}"
) )
if not self.WIKI_RECOMMENDATION: if not self.WIKI_RECOMMENDATION:
raise IncorrectCheckInitialization( raise IncorrectCheckInitialization(
"WIKI_RECOMMENDATION is not initialized {}".format(self.__class__.__name__) f"WIKI_RECOMMENDATION is not initialized {self.__class__.__name__}"
) )
if self.REQUIRE_PROXY and self.REQUIRE_CONTRACT_V2: if self.REQUIRE_PROXY and self.REQUIRE_CONTRACT_V2:
# This is not a fundatemenal issues # This is not a fundatemenal issues
# But it requires to change __main__ to avoid running two times the detectors # But it requires to change __main__ to avoid running two times the detectors
txt = "REQUIRE_PROXY and REQUIRE_CONTRACT_V2 needs change in __main___ {}".format( txt = f"REQUIRE_PROXY and REQUIRE_CONTRACT_V2 needs change in __main___ {self.__class__.__name__}"
self.__class__.__name__
)
raise IncorrectCheckInitialization(txt) raise IncorrectCheckInitialization(txt)
if self.IMPACT not in [ if self.IMPACT not in [
@ -115,17 +107,17 @@ class AbstractCheck(metaclass=abc.ABCMeta):
CheckClassification.INFORMATIONAL, CheckClassification.INFORMATIONAL,
]: ]:
raise IncorrectCheckInitialization( raise IncorrectCheckInitialization(
"IMPACT is not initialized {}".format(self.__class__.__name__) f"IMPACT is not initialized {self.__class__.__name__}"
) )
if self.REQUIRE_CONTRACT_V2 and contract_v2 is None: if self.REQUIRE_CONTRACT_V2 and contract_v2 is None:
raise IncorrectCheckInitialization( raise IncorrectCheckInitialization(
"ContractV2 is not initialized {}".format(self.__class__.__name__) f"ContractV2 is not initialized {self.__class__.__name__}"
) )
if self.REQUIRE_PROXY and proxy is None: if self.REQUIRE_PROXY and proxy is None:
raise IncorrectCheckInitialization( raise IncorrectCheckInitialization(
"Proxy is not initialized {}".format(self.__class__.__name__) f"Proxy is not initialized {self.__class__.__name__}"
) )
@abc.abstractmethod @abc.abstractmethod

@ -54,7 +54,7 @@ defaults_flag_in_config = {
def read_config_file(args): def read_config_file(args):
if os.path.isfile(args.config_file): if os.path.isfile(args.config_file):
try: try:
with open(args.config_file) as f: with open(args.config_file, encoding="utf8") as f:
config = json.load(f) config = json.load(f)
for key, elem in config.items(): for key, elem in config.items():
if key not in defaults_flag_in_config: if key not in defaults_flag_in_config:

@ -0,0 +1,5 @@
{
"C": {
"f()": "digraph{\n0[label=\"Node Type: ENTRY_POINT 0\n\"];\n0->1;\n1[label=\"Node Type: INLINE ASM 1\n\"];\n1->2;\n2[label=\"Node Type: NEW VARIABLE 2\n\"];\n2->3;\n3[label=\"Node Type: EXPRESSION 3\n\"];\n3->4;\n4[label=\"Node Type: INLINE ASM 4\n\"];\n4->5;\n5[label=\"Node Type: NEW VARIABLE 5\n\"];\n5->6;\n6[label=\"Node Type: EXPRESSION 6\n\"];\n}\n"
}
}

@ -0,0 +1,5 @@
{
"C": {
"f()": "digraph{\n0[label=\"Node Type: ENTRY_POINT 0\n\"];\n0->1;\n1[label=\"Node Type: NEW VARIABLE 1\n\"];\n1->2;\n2[label=\"Node Type: EXPRESSION 2\n\"];\n2->3;\n3[label=\"Node Type: EXPRESSION 3\n\"];\n3->4;\n4[label=\"Node Type: EXPRESSION 4\n\"];\n4->5;\n5[label=\"Node Type: EXPRESSION 5\n\"];\n5->6;\n6[label=\"Node Type: EXPRESSION 6\n\"];\n6->7;\n7[label=\"Node Type: EXPRESSION 7\n\"];\n7->8;\n8[label=\"Node Type: EXPRESSION 8\n\"];\n8->9;\n9[label=\"Node Type: EXPRESSION 9\n\"];\n9->10;\n10[label=\"Node Type: EXPRESSION 10\n\"];\n10->11;\n11[label=\"Node Type: EXPRESSION 11\n\"];\n11->12;\n12[label=\"Node Type: EXPRESSION 12\n\"];\n}\n"
}
}

@ -0,0 +1,5 @@
{
"C": {
"f()": "digraph{\n0[label=\"Node Type: ENTRY_POINT 0\n\"];\n0->1;\n1[label=\"Node Type: EXPRESSION 1\n\"];\n1->2;\n2[label=\"Node Type: EXPRESSION 2\n\"];\n2->3;\n3[label=\"Node Type: EXPRESSION 3\n\"];\n3->4;\n4[label=\"Node Type: EXPRESSION 4\n\"];\n4->5;\n5[label=\"Node Type: EXPRESSION 5\n\"];\n5->6;\n6[label=\"Node Type: EXPRESSION 6\n\"];\n6->7;\n7[label=\"Node Type: EXPRESSION 7\n\"];\n7->8;\n8[label=\"Node Type: EXPRESSION 8\n\"];\n8->9;\n9[label=\"Node Type: EXPRESSION 9\n\"];\n9->10;\n10[label=\"Node Type: EXPRESSION 10\n\"];\n10->11;\n11[label=\"Node Type: EXPRESSION 11\n\"];\n11->12;\n12[label=\"Node Type: EXPRESSION 12\n\"];\n12->13;\n13[label=\"Node Type: EXPRESSION 13\n\"];\n13->14;\n14[label=\"Node Type: EXPRESSION 14\n\"];\n14->15;\n15[label=\"Node Type: EXPRESSION 15\n\"];\n15->16;\n16[label=\"Node Type: EXPRESSION 16\n\"];\n16->17;\n17[label=\"Node Type: EXPRESSION 17\n\"];\n17->18;\n18[label=\"Node Type: EXPRESSION 18\n\"];\n18->19;\n19[label=\"Node Type: EXPRESSION 19\n\"];\n}\n"
}
}

@ -0,0 +1,5 @@
{
"C": {
"f()": "digraph{\n0[label=\"Node Type: ENTRY_POINT 0\n\"];\n0->1;\n1[label=\"Node Type: NEW VARIABLE 1\n\"];\n1->4;\n2[label=\"Node Type: BEGIN_LOOP 2\n\"];\n2->5;\n3[label=\"Node Type: END_LOOP 3\n\"];\n3->13;\n4[label=\"Node Type: NEW VARIABLE 4\n\"];\n4->2;\n5[label=\"Node Type: IF_LOOP 5\n\"];\n5->6[label=\"True\"];\n5->3[label=\"False\"];\n6[label=\"Node Type: IF 6\n\"];\n6->7[label=\"True\"];\n6->8[label=\"False\"];\n7[label=\"Node Type: BREAK 7\n\"];\n7->3;\n8[label=\"Node Type: END_IF 8\n\"];\n8->9;\n9[label=\"Node Type: EXPRESSION 9\n\"];\n9->10;\n10[label=\"Node Type: EXPRESSION 10\n\"];\n10->5;\n11[label=\"Node Type: BEGIN_LOOP 11\n\"];\n11->14;\n12[label=\"Node Type: END_LOOP 12\n\"];\n13[label=\"Node Type: NEW VARIABLE 13\n\"];\n13->11;\n14[label=\"Node Type: IF_LOOP 14\n\"];\n14->17[label=\"True\"];\n14->12[label=\"False\"];\n15[label=\"Node Type: BEGIN_LOOP 15\n\"];\n15->18;\n16[label=\"Node Type: END_LOOP 16\n\"];\n16->24;\n17[label=\"Node Type: NEW VARIABLE 17\n\"];\n17->15;\n18[label=\"Node Type: IF_LOOP 18\n\"];\n18->19[label=\"True\"];\n18->16[label=\"False\"];\n19[label=\"Node Type: IF 19\n\"];\n19->20[label=\"True\"];\n19->21[label=\"False\"];\n20[label=\"Node Type: BREAK 20\n\"];\n20->16;\n21[label=\"Node Type: END_IF 21\n\"];\n21->22;\n22[label=\"Node Type: EXPRESSION 22\n\"];\n22->23;\n23[label=\"Node Type: EXPRESSION 23\n\"];\n23->18;\n24[label=\"Node Type: EXPRESSION 24\n\"];\n24->14;\n}\n"
}
}

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save