Clean up Source API

Add --change-line-prefix flag (replace #1042)
pull/877/head
Josselin 3 years ago
parent 8da99e66c3
commit d18a3da3c2
  1. 8
      slither/__main__.py
  2. 2
      slither/core/cfg/node.py
  3. 4
      slither/core/children/child_contract.py
  4. 5
      slither/core/slither_core.py
  5. 60
      slither/core/source_mapping/source_mapping.py
  6. 30
      slither/printers/summary/declaration.py
  7. 5
      slither/slither.py
  8. 2
      slither/solc_parsing/declarations/function.py
  9. 2
      slither/tools/flattening/flattening.py
  10. 24
      slither/utils/output.py

@ -465,6 +465,14 @@ def parse_args(detector_classes, printer_classes): # pylint: disable=too-many-s
default="slither.config.json", default="slither.config.json",
) )
group_misc.add_argument(
"--change-line-prefix",
help="Change the line prefix (default #) for the displayed source codes (i.e. file.sol#1).",
action="store",
dest="change_line_prefix",
default="#",
)
group_misc.add_argument( group_misc.add_argument(
"--solc-ast", "--solc-ast",
help="Provide the contract as a json AST", help="Provide the contract as a json AST",

@ -913,7 +913,7 @@ class Node(SourceMapping, ChildFunction): # pylint: disable=too-many-public-met
except AttributeError as error: except AttributeError as error:
# pylint: disable=raise-missing-from # pylint: disable=raise-missing-from
raise SlitherException( raise SlitherException(
f"Function not found on IR: {ir}.\nNode: {self} ({self.source_mapping_str})\nFunction: {self.function}\nPlease try compiling with a recent Solidity version. {error}" f"Function not found on IR: {ir}.\nNode: {self} ({self.source_mapping})\nFunction: {self.function}\nPlease try compiling with a recent Solidity version. {error}"
) )
elif isinstance(ir, LibraryCall): elif isinstance(ir, LibraryCall):
assert isinstance(ir.destination, Contract) assert isinstance(ir.destination, Contract)

@ -1,10 +1,12 @@
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from slither.core.source_mapping.source_mapping import SourceMapping
if TYPE_CHECKING: if TYPE_CHECKING:
from slither.core.declarations import Contract from slither.core.declarations import Contract
class ChildContract: class ChildContract(SourceMapping):
def __init__(self): def __init__(self):
super().__init__() super().__init__()
self._contract = None self._contract = None

@ -79,6 +79,11 @@ class SlitherCore(Context):
self._offset_to_implementations: Optional[Dict[Filename, Dict[int, Set[Source]]]] = None self._offset_to_implementations: Optional[Dict[Filename, Dict[int, Set[Source]]]] = None
self._offset_to_definitions: Optional[Dict[Filename, Dict[int, Set[Source]]]] = None self._offset_to_definitions: Optional[Dict[Filename, Dict[int, Set[Source]]]] = None
# Line prefix is used during the source mapping generation
# By default we generate file.sol#1
# But we allow to alter this (ex: file.sol:1) for vscode integration
self.line_prefix: str = "#"
@property @property
def compilation_units(self) -> List[SlitherCompilationUnit]: def compilation_units(self) -> List[SlitherCompilationUnit]:
return list(self._compilation_units) return list(self._compilation_units)

@ -1,6 +1,6 @@
import re import re
from abc import ABCMeta from abc import ABCMeta
from typing import Dict, Union, List, Tuple, TYPE_CHECKING from typing import Dict, Union, List, Tuple, TYPE_CHECKING, Optional
from crytic_compile.utils.naming import Filename from crytic_compile.utils.naming import Filename
@ -27,6 +27,7 @@ class Source:
self.starting_column: int = 0 self.starting_column: int = 0
self.ending_column: int = 0 self.ending_column: int = 0
self.end: int = 0 self.end: int = 0
self.compilation_unit: Optional["SlitherCompilationUnit"] = None
def to_json(self) -> Dict: def to_json(self) -> Dict:
return { return {
@ -45,26 +46,33 @@ class Source:
"ending_column": self.ending_column, "ending_column": self.ending_column,
} }
def _get_lines_str(self, line_descr=""): def to_markdown(self, markdown_root: str) -> str:
lines = self.lines
if not lines:
lines = ""
elif len(lines) == 1:
lines = "#{}{}".format(line_descr, lines[0])
else:
lines = f"#{line_descr}{lines[0]}-{line_descr}{lines[-1]}"
return lines
def source_mapping_to_markdown(self, markdown_root: str) -> str:
lines = self._get_lines_str(line_descr="L") lines = self._get_lines_str(line_descr="L")
filename_relative: str = self.filename.relative if self.filename.relative else "" filename_relative: str = self.filename.relative if self.filename.relative else ""
return f"{markdown_root}{filename_relative}{lines}" return f"{markdown_root}{filename_relative}{lines}"
def detailled_str(self) -> str: def to_detailled_str(self) -> str:
lines = self._get_lines_str() lines = self._get_lines_str()
filename_short: str = self.filename.short if self.filename.short else "" filename_short: str = self.filename.short if self.filename.short else ""
return f"{filename_short}{lines} ({self.starting_column} - {self.ending_column})" return f"{filename_short}{lines} ({self.starting_column} - {self.ending_column})"
def _get_lines_str(self, line_descr=""):
# If the compilation unit was not initialized, it means that the set_offset was never called
# on the corresponding object, which should not happen
assert self.compilation_unit is not None
line_prefix = self.compilation_unit.core.line_prefix
lines = self.lines
if not lines:
lines = ""
elif len(lines) == 1:
lines = f"{line_prefix}{line_descr}{lines[0]}"
else:
lines = f"{line_prefix}{line_descr}{lines[0]}-{line_descr}{lines[-1]}"
return lines
def __str__(self) -> str: def __str__(self) -> str:
lines = self._get_lines_str() lines = self._get_lines_str()
filename_short: str = self.filename.short if self.filename.short else "" filename_short: str = self.filename.short if self.filename.short else ""
@ -172,31 +180,7 @@ class SourceMapping(Context, metaclass=ABCMeta):
self.source_mapping.end = offset.end self.source_mapping.end = offset.end
else: else:
self.source_mapping = _convert_source_mapping(offset, compilation_unit) self.source_mapping = _convert_source_mapping(offset, compilation_unit)
self.source_mapping.compilation_unit = compilation_unit
def _get_lines_str(self, line_descr=""):
lines = self.source_mapping.lines
if not lines:
lines = ""
elif len(lines) == 1:
lines = "#{}{}".format(line_descr, lines[0])
else:
lines = f"#{line_descr}{lines[0]}-{line_descr}{lines[-1]}"
return lines
def source_mapping_to_markdown(self, markdown_root: str) -> str:
lines = self._get_lines_str(line_descr="L")
filename_relative: str = (
self.source_mapping.filename.relative if self.source_mapping.filename.relative else ""
)
return f"{markdown_root}{filename_relative}{lines}"
@property
def source_mapping_str(self) -> str:
lines = self._get_lines_str()
filename_short: str = (
self.source_mapping.filename.short if self.source_mapping.filename.short else ""
)
return f"{filename_short}{lines}"
def add_reference_from_raw_source( def add_reference_from_raw_source(
self, offset: str, compilation_unit: "SlitherCompilationUnit" self, offset: str, compilation_unit: "SlitherCompilationUnit"

@ -20,39 +20,37 @@ class Declaration(AbstractPrinter):
txt += "\n# Contracts\n" txt += "\n# Contracts\n"
for contract in compilation_unit.contracts: for contract in compilation_unit.contracts:
txt += f"# {contract.name}\n" txt += f"# {contract.name}\n"
txt += f"\t- Declaration: {get_definition(contract, compilation_unit.core.crytic_compile).detailled_str()}\n" txt += f"\t- Declaration: {get_definition(contract, compilation_unit.core.crytic_compile).to_detailled_str()}\n"
txt += f"\t- Implementation: {get_implementation(contract).detailled_str()}\n" txt += f"\t- Implementation: {get_implementation(contract).to_detailled_str()}\n"
txt += f"\t- References: {[x.detailled_str() for x in get_references(contract)]}\n" txt += (
f"\t- References: {[x.to_detailled_str() for x in get_references(contract)]}\n"
)
txt += "\n\t## Function\n" txt += "\n\t## Function\n"
for func in contract.functions: for func in contract.functions:
txt += f"\t\t- {func.canonical_name}\n" txt += f"\t\t- {func.canonical_name}\n"
txt += f"\t\t\t- Declaration: {get_definition(func, compilation_unit.core.crytic_compile).detailled_str()}\n" txt += f"\t\t\t- Declaration: {get_definition(func, compilation_unit.core.crytic_compile).to_detailled_str()}\n"
txt += f"\t\t\t- Implementation: {get_implementation(func).detailled_str()}\n"
txt += ( txt += (
f"\t\t\t- References: {[x.detailled_str() for x in get_references(func)]}\n" f"\t\t\t- Implementation: {get_implementation(func).to_detailled_str()}\n"
) )
txt += f"\t\t\t- References: {[x.to_detailled_str() for x in get_references(func)]}\n"
txt += "\n\t## State variables\n" txt += "\n\t## State variables\n"
for var in contract.state_variables: for var in contract.state_variables:
txt += f"\t\t- {var.name}\n" txt += f"\t\t- {var.name}\n"
txt += f"\t\t\t- Declaration: {get_definition(var, compilation_unit.core.crytic_compile).detailled_str()}\n" txt += f"\t\t\t- Declaration: {get_definition(var, compilation_unit.core.crytic_compile).to_detailled_str()}\n"
txt += f"\t\t\t- Implementation: {get_implementation(var).detailled_str()}\n" txt += f"\t\t\t- Implementation: {get_implementation(var).to_detailled_str()}\n"
txt += ( txt += f"\t\t\t- References: {[x.to_detailled_str() for x in get_references(var)]}\n"
f"\t\t\t- References: {[x.detailled_str() for x in get_references(var)]}\n"
)
txt += "\n\t## Structures\n" txt += "\n\t## Structures\n"
for st in contract.structures: for st in contract.structures:
txt += f"\t\t- {st.name}\n" txt += f"\t\t- {st.name}\n"
txt += f"\t\t\t- Declaration: {get_definition(st, compilation_unit.core.crytic_compile).detailled_str()}\n" txt += f"\t\t\t- Declaration: {get_definition(st, compilation_unit.core.crytic_compile).to_detailled_str()}\n"
txt += f"\t\t\t- Implementation: {get_implementation(st).detailled_str()}\n" txt += f"\t\t\t- Implementation: {get_implementation(st).to_detailled_str()}\n"
txt += ( txt += f"\t\t\t- References: {[x.to_detailled_str() for x in get_references(st)]}\n"
f"\t\t\t- References: {[x.detailled_str() for x in get_references(st)]}\n"
)
self.info(txt) self.info(txt)
res = self.generate_output(txt) res = self.generate_output(txt)

@ -71,6 +71,9 @@ class Slither(SlitherCore): # pylint: disable=too-many-instance-attributes
embark_ignore_compile (bool): do not run embark build (default False) embark_ignore_compile (bool): do not run embark build (default False)
embark_overwrite_config (bool): overwrite original config file (default false) embark_overwrite_config (bool): overwrite original config file (default false)
change_line_prefix (str): Change the line prefix (default #)
for the displayed source codes (i.e. file.sol#1).
""" """
super().__init__() super().__init__()
@ -78,6 +81,8 @@ class Slither(SlitherCore): # pylint: disable=too-many-instance-attributes
self._skip_assembly: bool = kwargs.get("skip_assembly", False) self._skip_assembly: bool = kwargs.get("skip_assembly", False)
self._show_ignored_findings: bool = kwargs.get("show_ignored_findings", False) self._show_ignored_findings: bool = kwargs.get("show_ignored_findings", False)
self.line_prefix = kwargs.get("change_line_prefix", "#")
self._parsers: List[SlitherCompilationUnitSolc] = [] self._parsers: List[SlitherCompilationUnitSolc] = []
try: try:
if isinstance(target, CryticCompile): if isinstance(target, CryticCompile):

@ -1348,7 +1348,7 @@ class FunctionSolc(CallerContextExpression):
condition = st.condition condition = st.condition
if not condition: if not condition:
raise ParsingError( raise ParsingError(
f"Incorrect ternary conversion {node.expression} {node.source_mapping_str}" f"Incorrect ternary conversion {node.expression} {node.source_mapping}"
) )
true_expr = st.true_expression true_expr = st.true_expression
false_expr = st.false_expression false_expr = st.false_expression

@ -159,7 +159,7 @@ class Flattening:
): ):
to_patch.append(Patch(node.source_mapping.start, "line_removal")) to_patch.append(Patch(node.source_mapping.start, "line_removal"))
logger.info( logger.info(
f"Code commented: {node.expression} ({node.source_mapping_str})" f"Code commented: {node.expression} ({node.source_mapping})"
) )
to_patch.sort(key=lambda x: x.index, reverse=True) to_patch.sort(key=lambda x: x.index, reverse=True)

@ -228,14 +228,14 @@ def _convert_to_description(d):
if isinstance(d, Node): if isinstance(d, Node):
if d.expression: if d.expression:
return f"{d.expression} ({d.source_mapping_str})" return f"{d.expression} ({d.source_mapping})"
return f"{str(d)} ({d.source_mapping_str})" return f"{str(d)} ({d.source_mapping})"
if hasattr(d, "canonical_name"): if hasattr(d, "canonical_name"):
return f"{d.canonical_name} ({d.source_mapping_str})" return f"{d.canonical_name} ({d.source_mapping})"
if hasattr(d, "name"): if hasattr(d, "name"):
return f"{d.name} ({d.source_mapping_str})" return f"{d.name} ({d.source_mapping})"
raise SlitherError(f"{type(d)} cannot be converted (no name, or canonical_name") raise SlitherError(f"{type(d)} cannot be converted (no name, or canonical_name")
@ -249,14 +249,14 @@ def _convert_to_markdown(d, markdown_root):
if isinstance(d, Node): if isinstance(d, Node):
if d.expression: if d.expression:
return f"[{d.expression}]({d.source_mapping_to_markdown(markdown_root)})" return f"[{d.expression}]({d.source_mapping.to_markdown(markdown_root)})"
return f"[{str(d)}]({d.source_mapping_to_markdown(markdown_root)})" return f"[{str(d)}]({d.source_mapping.to_markdown(markdown_root)})"
if hasattr(d, "canonical_name"): if hasattr(d, "canonical_name"):
return f"[{d.canonical_name}]({d.source_mapping_to_markdown(markdown_root)})" return f"[{d.canonical_name}]({d.source_mapping.to_markdown(markdown_root)})"
if hasattr(d, "name"): if hasattr(d, "name"):
return f"[{d.name}]({d.source_mapping_to_markdown(markdown_root)})" return f"[{d.name}]({d.source_mapping.to_markdown(markdown_root)})"
raise SlitherError(f"{type(d)} cannot be converted (no name, or canonical_name") raise SlitherError(f"{type(d)} cannot be converted (no name, or canonical_name")
@ -275,11 +275,11 @@ def _convert_to_id(d):
if isinstance(d, Node): if isinstance(d, Node):
if d.expression: if d.expression:
return f"{d.expression} ({d.source_mapping_str})" return f"{d.expression} ({d.source_mapping})"
return f"{str(d)} ({d.source_mapping_str})" return f"{str(d)} ({d.source_mapping})"
if isinstance(d, Pragma): if isinstance(d, Pragma):
return f"{d} ({d.source_mapping_str})" return f"{d} ({d.source_mapping})"
if hasattr(d, "canonical_name"): if hasattr(d, "canonical_name"):
return f"{d.canonical_name}" return f"{d.canonical_name}"
@ -380,7 +380,7 @@ class Output:
def add(self, add: SupportedOutput, additional_fields: Optional[Dict] = None): def add(self, add: SupportedOutput, additional_fields: Optional[Dict] = None):
if not self._data["first_markdown_element"]: if not self._data["first_markdown_element"]:
self._data["first_markdown_element"] = add.source_mapping_to_markdown( self._data["first_markdown_element"] = add.source_mapping.to_markdown(
self._markdown_root self._markdown_root
) )
if isinstance(add, Variable): if isinstance(add, Variable):

Loading…
Cancel
Save