Merge pull request #453 from crytic/dev-zip

Add Zip export
pull/455/head
Feist Josselin 5 years ago committed by GitHub
commit 57be59ac87
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 66
      slither/__main__.py
  2. 28
      slither/utils/command_line.py
  3. 44
      slither/utils/output.py

@ -19,7 +19,7 @@ from slither.detectors.abstract_detector import (AbstractDetector,
from slither.printers import all_printers from slither.printers import all_printers
from slither.printers.abstract_printer import AbstractPrinter from slither.printers.abstract_printer import AbstractPrinter
from slither.slither import Slither from slither.slither import Slither
from slither.utils.output import output_to_json from slither.utils.output import output_to_json, output_to_zip, ZIP_TYPES_ACCEPTED
from slither.utils.output_capture import StandardOutputCapture from slither.utils.output_capture import StandardOutputCapture
from slither.utils.colors import red, blue, set_colorization_enabled from slither.utils.colors import red, blue, set_colorization_enabled
from slither.utils.command_line import (output_detectors, output_results_to_markdown, from slither.utils.command_line import (output_detectors, output_results_to_markdown,
@ -32,6 +32,7 @@ from slither.exceptions import SlitherException
logging.basicConfig() logging.basicConfig()
logger = logging.getLogger("Slither") logger = logging.getLogger("Slither")
################################################################################### ###################################################################################
################################################################################### ###################################################################################
# region Process functions # region Process functions
@ -63,7 +64,8 @@ def process_all(target, args, detector_classes, printer_classes):
results_printers = [] results_printers = []
analyzed_contracts_count = 0 analyzed_contracts_count = 0
for compilation in compilations: for compilation in compilations:
(slither, current_results_detectors, current_results_printers, current_analyzed_count) = process_single(compilation, args, detector_classes, printer_classes) (slither, current_results_detectors, current_results_printers, current_analyzed_count) = process_single(
compilation, args, detector_classes, printer_classes)
results_detectors.extend(current_results_detectors) results_detectors.extend(current_results_detectors)
results_printers.extend(current_results_printers) results_printers.extend(current_results_printers)
slither_instances.append(slither) slither_instances.append(slither)
@ -108,8 +110,6 @@ def process_from_asts(filenames, args, detector_classes, printer_classes):
return process_single(all_contracts, args, detector_classes, printer_classes) return process_single(all_contracts, args, detector_classes, printer_classes)
# endregion # endregion
################################################################################### ###################################################################################
################################################################################### ###################################################################################
@ -159,6 +159,7 @@ def get_detectors_and_printers():
return detectors, printers return detectors, printers
def choose_detectors(args, all_detector_classes): def choose_detectors(args, all_detector_classes):
# If detectors are specified, run only these ones # If detectors are specified, run only these ones
@ -224,6 +225,7 @@ def choose_printers(args, all_printer_classes):
raise Exception('Error: {} is not a printer'.format(p)) raise Exception('Error: {} is not a printer'.format(p))
return printers_to_run return printers_to_run
# endregion # endregion
################################################################################### ###################################################################################
################################################################################### ###################################################################################
@ -238,8 +240,9 @@ def parse_filter_paths(args):
def parse_args(detector_classes, printer_classes): def parse_args(detector_classes, printer_classes):
parser = argparse.ArgumentParser(description='Slither. For usage information, see https://github.com/crytic/slither/wiki/Usage', parser = argparse.ArgumentParser(
usage="slither.py contract.sol [flag]") description='Slither. For usage information, see https://github.com/crytic/slither/wiki/Usage',
usage="slither.py contract.sol [flag]")
parser.add_argument('filename', parser.add_argument('filename',
help='contract.sol') help='contract.sol')
@ -258,7 +261,7 @@ def parse_args(detector_classes, printer_classes):
group_detector.add_argument('--detect', group_detector.add_argument('--detect',
help='Comma-separated list of detectors, defaults to all, ' help='Comma-separated list of detectors, defaults to all, '
'available detectors: {}'.format( 'available detectors: {}'.format(
', '.join(d.ARGUMENT for d in detector_classes)), ', '.join(d.ARGUMENT for d in detector_classes)),
action='store', action='store',
dest='detectors_to_run', dest='detectors_to_run',
default=defaults_flag_in_config['detectors_to_run']) default=defaults_flag_in_config['detectors_to_run'])
@ -266,7 +269,7 @@ def parse_args(detector_classes, printer_classes):
group_printer.add_argument('--print', group_printer.add_argument('--print',
help='Comma-separated list fo contract information printers, ' help='Comma-separated list fo contract information printers, '
'available printers: {}'.format( 'available printers: {}'.format(
', '.join(d.ARGUMENT for d in printer_classes)), ', '.join(d.ARGUMENT for d in printer_classes)),
action='store', action='store',
dest='printers_to_run', dest='printers_to_run',
default=defaults_flag_in_config['printers_to_run']) default=defaults_flag_in_config['printers_to_run'])
@ -325,12 +328,22 @@ def parse_args(detector_classes, printer_classes):
default=defaults_flag_in_config['json']) default=defaults_flag_in_config['json'])
group_misc.add_argument('--json-types', group_misc.add_argument('--json-types',
help=f'Comma-separated list of result types to output to JSON, defaults to ' +\ help=f'Comma-separated list of result types to output to JSON, defaults to ' + \
f'{",".join(output_type for output_type in DEFAULT_JSON_OUTPUT_TYPES)}. ' +\ f'{",".join(output_type for output_type in DEFAULT_JSON_OUTPUT_TYPES)}. ' + \
f'Available types: {",".join(output_type for output_type in JSON_OUTPUT_TYPES)}', f'Available types: {",".join(output_type for output_type in JSON_OUTPUT_TYPES)}',
action='store', action='store',
default=defaults_flag_in_config['json-types']) default=defaults_flag_in_config['json-types'])
group_misc.add_argument('--zip',
help='Export the results as a zipped JSON file',
action='store',
default=defaults_flag_in_config['zip'])
group_misc.add_argument('--zip-type',
help=f'Zip compression type. One of {",".join(ZIP_TYPES_ACCEPTED)}. Default lzma',
action='store',
default=defaults_flag_in_config['zip_type'])
group_misc.add_argument('--markdown-root', group_misc.add_argument('--markdown-root',
help='URL for markdown generation', help='URL for markdown generation',
action='store', action='store',
@ -429,12 +442,14 @@ def parse_args(detector_classes, printer_classes):
return args return args
class ListDetectors(argparse.Action): class ListDetectors(argparse.Action):
def __call__(self, parser, *args, **kwargs): def __call__(self, parser, *args, **kwargs):
detectors, _ = get_detectors_and_printers() detectors, _ = get_detectors_and_printers()
output_detectors(detectors) output_detectors(detectors)
parser.exit() parser.exit()
class ListDetectorsJson(argparse.Action): class ListDetectorsJson(argparse.Action):
def __call__(self, parser, *args, **kwargs): def __call__(self, parser, *args, **kwargs):
detectors, _ = get_detectors_and_printers() detectors, _ = get_detectors_and_printers()
@ -442,18 +457,21 @@ class ListDetectorsJson(argparse.Action):
print(json.dumps(detector_types_json)) print(json.dumps(detector_types_json))
parser.exit() parser.exit()
class ListPrinters(argparse.Action): class ListPrinters(argparse.Action):
def __call__(self, parser, *args, **kwargs): def __call__(self, parser, *args, **kwargs):
_, printers = get_detectors_and_printers() _, printers = get_detectors_and_printers()
output_printers(printers) output_printers(printers)
parser.exit() parser.exit()
class OutputMarkdown(argparse.Action): class OutputMarkdown(argparse.Action):
def __call__(self, parser, args, values, option_string=None): def __call__(self, parser, args, values, option_string=None):
detectors, printers = get_detectors_and_printers() detectors, printers = get_detectors_and_printers()
output_to_markdown(detectors, printers, values) output_to_markdown(detectors, printers, values)
parser.exit() parser.exit()
class OutputWiki(argparse.Action): class OutputWiki(argparse.Action):
def __call__(self, parser, args, values, option_string=None): def __call__(self, parser, args, values, option_string=None):
detectors, _ = get_detectors_and_printers() detectors, _ = get_detectors_and_printers()
@ -471,7 +489,7 @@ class OutputWiki(argparse.Action):
class FormatterCryticCompile(logging.Formatter): class FormatterCryticCompile(logging.Formatter):
def format(self, record): def format(self, record):
#for i, msg in enumerate(record.msg): # for i, msg in enumerate(record.msg):
if record.msg.startswith('Compilation warnings/errors on '): if record.msg.startswith('Compilation warnings/errors on '):
txt = record.args[1] txt = record.args[1]
txt = txt.split('\n') txt = txt.split('\n')
@ -480,6 +498,7 @@ class FormatterCryticCompile(logging.Formatter):
record.args = (record.args[0], txt) record.args = (record.args[0], txt)
return super().format(record) return super().format(record)
# endregion # endregion
################################################################################### ###################################################################################
################################################################################### ###################################################################################
@ -511,6 +530,9 @@ def main_impl(all_detector_classes, all_printer_classes):
output_error = None output_error = None
outputting_json = args.json is not None outputting_json = args.json is not None
outputting_json_stdout = args.json == '-' outputting_json_stdout = args.json == '-'
outputting_zip = args.zip is not None
if args.zip_type not in ZIP_TYPES_ACCEPTED:
logger.eror(f'Zip type not accepted, it must be one of {",".join(ZIP_TYPES_ACCEPTED)}')
# If we are outputting JSON, capture all standard output. If we are outputting to stdout, we block typical stdout # If we are outputting JSON, capture all standard output. If we are outputting to stdout, we block typical stdout
# output. # output.
@ -533,7 +555,7 @@ def main_impl(all_detector_classes, all_printer_classes):
('TypeParsing', default_log), ('TypeParsing', default_log),
('SSA_Conversion', default_log), ('SSA_Conversion', default_log),
('Printers', default_log), ('Printers', default_log),
#('CryticCompile', default_log) # ('CryticCompile', default_log)
]: ]:
l = logging.getLogger(l_name) l = logging.getLogger(l_name)
l.setLevel(l_level) l.setLevel(l_level)
@ -563,11 +585,15 @@ def main_impl(all_detector_classes, all_printer_classes):
slither_instances = [] slither_instances = []
if args.splitted: if args.splitted:
(slither_instance, results_detectors, results_printers, number_contracts) = process_from_asts(filenames, args, detector_classes, printer_classes) (slither_instance, results_detectors, results_printers, number_contracts) = process_from_asts(filenames,
args,
detector_classes,
printer_classes)
slither_instances.append(slither_instance) slither_instances.append(slither_instance)
else: else:
for filename in filenames: for filename in filenames:
(slither_instance, results_detectors_tmp, results_printers_tmp, number_contracts_tmp) = process_single(filename, args, detector_classes, printer_classes) (slither_instance, results_detectors_tmp, results_printers_tmp,
number_contracts_tmp) = process_single(filename, args, detector_classes, printer_classes)
number_contracts += number_contracts_tmp number_contracts += number_contracts_tmp
results_detectors += results_detectors_tmp results_detectors += results_detectors_tmp
results_printers += results_printers_tmp results_printers += results_printers_tmp
@ -575,10 +601,12 @@ def main_impl(all_detector_classes, all_printer_classes):
# Rely on CryticCompile to discern the underlying type of compilations. # Rely on CryticCompile to discern the underlying type of compilations.
else: else:
(slither_instances, results_detectors, results_printers, number_contracts) = process_all(filename, args, detector_classes, printer_classes) (slither_instances, results_detectors, results_printers, number_contracts) = process_all(filename, args,
detector_classes,
printer_classes)
# Determine if we are outputting JSON # Determine if we are outputting JSON
if outputting_json: if outputting_json or outputting_zip:
# Add our compilation information to JSON # Add our compilation information to JSON
if 'compilations' in args.json_types: if 'compilations' in args.json_types:
compilation_results = [] compilation_results = []
@ -642,6 +670,9 @@ def main_impl(all_detector_classes, all_printer_classes):
StandardOutputCapture.disable() StandardOutputCapture.disable()
output_to_json(None if outputting_json_stdout else args.json, output_error, json_results) output_to_json(None if outputting_json_stdout else args.json, output_error, json_results)
if outputting_zip:
output_to_zip(args.zip, output_error, json_results, args.zip_type)
# Exit with the appropriate status code # Exit with the appropriate status code
if output_error: if output_error:
sys.exit(-1) sys.exit(-1)
@ -652,7 +683,4 @@ def main_impl(all_detector_classes, all_printer_classes):
if __name__ == '__main__': if __name__ == '__main__':
main() main()
# endregion # endregion

@ -13,7 +13,6 @@ logger = logging.getLogger("Slither")
DEFAULT_JSON_OUTPUT_TYPES = ["detectors", "printers"] DEFAULT_JSON_OUTPUT_TYPES = ["detectors", "printers"]
JSON_OUTPUT_TYPES = ["compilations", "console", "detectors", "printers", "list-detectors", "list-printers"] JSON_OUTPUT_TYPES = ["compilations", "console", "detectors", "printers", "list-detectors", "list-printers"]
# Those are the flags shared by the command line and the config file # Those are the flags shared by the command line and the config file
defaults_flag_in_config = { defaults_flag_in_config = {
'detectors_to_run': 'all', 'detectors_to_run': 'all',
@ -33,8 +32,11 @@ defaults_flag_in_config = {
# debug command # debug command
'legacy_ast': False, 'legacy_ast': False,
'ignore_return_value': False, 'ignore_return_value': False,
'zip': None,
'zip_type': 'lzma',
**DEFAULTS_FLAG_IN_CONFIG_CRYTIC_COMPILE **DEFAULTS_FLAG_IN_CONFIG_CRYTIC_COMPILE
} }
def read_config_file(args): def read_config_file(args):
if os.path.isfile(args.config_file): if os.path.isfile(args.config_file):
@ -52,7 +54,6 @@ def read_config_file(args):
def output_to_markdown(detector_classes, printer_classes, filter_wiki): def output_to_markdown(detector_classes, printer_classes, filter_wiki):
def extract_help(cls): def extract_help(cls):
if cls.WIKI == '': if cls.WIKI == '':
return cls.HELP return cls.HELP
@ -97,6 +98,7 @@ def output_to_markdown(detector_classes, printer_classes, filter_wiki):
print('{} | `{}` | {}'.format(idx, argument, help_info)) print('{} | `{}` | {}'.format(idx, argument, help_info))
idx = idx + 1 idx = idx + 1
def get_level(l): def get_level(l):
tab = l.count('\t') + 1 tab = l.count('\t') + 1
if l.replace('\t', '').startswith(' -'): if l.replace('\t', '').startswith(' -'):
@ -105,6 +107,7 @@ def get_level(l):
tab = tab + 1 tab = tab + 1
return tab return tab
def convert_result_to_markdown(txt): def convert_result_to_markdown(txt):
# -1 to remove the last \n # -1 to remove the last \n
lines = txt[0:-1].split('\n') lines = txt[0:-1].split('\n')
@ -114,14 +117,15 @@ def convert_result_to_markdown(txt):
next_level = get_level(l) next_level = get_level(l)
prefix = '<li>' prefix = '<li>'
if next_level < level: if next_level < level:
prefix = '</ul>'*(level - next_level) + prefix prefix = '</ul>' * (level - next_level) + prefix
if next_level > level: if next_level > level:
prefix = '<ul>'*(next_level - level) + prefix prefix = '<ul>' * (next_level - level) + prefix
level = next_level level = next_level
ret.append(prefix + l) ret.append(prefix + l)
return ''.join(ret) return ''.join(ret)
def output_results_to_markdown(all_results): def output_results_to_markdown(all_results):
checks = defaultdict(list) checks = defaultdict(list)
for results in all_results: for results in all_results:
@ -140,12 +144,13 @@ def output_results_to_markdown(all_results):
result_markdown = convert_result_to_markdown(result) result_markdown = convert_result_to_markdown(result)
print(f'| <ul><li>[ ] TP</li><li>[ ] FP</li><li>[ ] Unknown</li></ul> | {result_markdown}') print(f'| <ul><li>[ ] TP</li><li>[ ] FP</li><li>[ ] Unknown</li></ul> | {result_markdown}')
def output_wiki(detector_classes, filter_wiki):
def output_wiki(detector_classes, filter_wiki):
detectors_list = [] detectors_list = []
# Sort by impact, confidence, and name # Sort by impact, confidence, and name
detectors_list = sorted(detector_classes, key=lambda element: (element.IMPACT, element.CONFIDENCE, element.ARGUMENT)) detectors_list = sorted(detector_classes,
key=lambda element: (element.IMPACT, element.CONFIDENCE, element.ARGUMENT))
for detector in detectors_list: for detector in detectors_list:
argument = detector.ARGUMENT argument = detector.ARGUMENT
@ -176,7 +181,6 @@ def output_wiki(detector_classes, filter_wiki):
print(recommendation) print(recommendation)
def output_detectors(detector_classes): def output_detectors(detector_classes):
detectors_list = [] detectors_list = []
for detector in detector_classes: for detector in detector_classes:
@ -237,12 +241,13 @@ def output_detectors_json(detector_classes):
'impact': classification_txt[impact], 'impact': classification_txt[impact],
'confidence': confidence, 'confidence': confidence,
'wiki_url': wiki_url, 'wiki_url': wiki_url,
'description':description, 'description': description,
'exploit_scenario':exploit, 'exploit_scenario': exploit,
'recommendation':recommendation}) 'recommendation': recommendation})
idx = idx + 1 idx = idx + 1
return table return table
def output_printers(printer_classes): def output_printers(printer_classes):
printers_list = [] printers_list = []
for printer in printer_classes: for printer in printer_classes:
@ -281,4 +286,3 @@ def output_printers_json(printer_classes):
'title': help_info}) 'title': help_info})
idx = idx + 1 idx = idx + 1
return table return table

@ -2,7 +2,10 @@ import hashlib
import os import os
import json import json
import logging import logging
import zipfile
from collections import OrderedDict from collections import OrderedDict
from typing import Optional, Dict
from zipfile import ZipFile
from slither.core.cfg.node import Node from slither.core.cfg.node import Node
from slither.core.declarations import Contract, Function, Enum, Event, Structure, Pragma from slither.core.declarations import Contract, Function, Enum, Event, Structure, Pragma
@ -52,6 +55,44 @@ def output_to_json(filename, error, results):
json.dump(json_result, f, indent=2) json.dump(json_result, f, indent=2)
# https://docs.python.org/3/library/zipfile.html#zipfile-objects
ZIP_TYPES_ACCEPTED = ['lzma', 'stored', 'deflated', 'bzip2']
def output_to_zip(filename: str, error: Optional[str], results: Dict, zip_type: str = "lzma"):
"""
Output the results to a zip
The file in the zip is named slither_results.json
Note: the json file will not have indentation, as a result the resulting json file will be smaller
:param zip_type:
:param filename:
:param error:
:param results:
:return:
"""
json_result = {
"success": error is None,
"error": error,
"results": results
}
if os.path.isfile(filename):
logger.info(yellow(f'{filename} exists already, the overwrite is prevented'))
else:
if zip_type == "lzma":
with ZipFile(filename, "w", compression=zipfile.ZIP_LZMA) as file_desc:
file_desc.writestr("slither_results.json", json.dumps(json_result).encode('utf8'))
elif zip_type == 'stored':
with ZipFile(filename, "w", compression=zipfile.ZIP_STORED) as file_desc:
file_desc.writestr("slither_results.json", json.dumps(json_result).encode('utf8'))
elif zip_type == 'deflated':
with ZipFile(filename, "w", compression=zipfile.ZIP_DEFLATED) as file_desc:
file_desc.writestr("slither_results.json", json.dumps(json_result).encode('utf8'))
else:
assert zip_type == 'bzip2'
with ZipFile(filename, "w", compression=zipfile.ZIP_BZIP2) as file_desc:
file_desc.writestr("slither_results.json", json.dumps(json_result).encode('utf8'))
# endregion # endregion
################################################################################### ###################################################################################
################################################################################### ###################################################################################
@ -102,6 +143,7 @@ def _convert_to_markdown(d, markdown_root):
raise SlitherError(f'{type(d)} cannot be converted (no name, or canonical_name') raise SlitherError(f'{type(d)} cannot be converted (no name, or canonical_name')
def _convert_to_id(d): def _convert_to_id(d):
''' '''
Id keeps the source mapping of the node, otherwise we risk to consider two different node as the same Id keeps the source mapping of the node, otherwise we risk to consider two different node as the same
@ -131,6 +173,7 @@ def _convert_to_id(d):
raise SlitherError(f'{type(d)} cannot be converted (no name, or canonical_name') raise SlitherError(f'{type(d)} cannot be converted (no name, or canonical_name')
# endregion # endregion
################################################################################### ###################################################################################
################################################################################### ###################################################################################
@ -202,7 +245,6 @@ class Output:
if additional_fields: if additional_fields:
self._data['additional_fields'] = additional_fields self._data['additional_fields'] = additional_fields
def add(self, add, additional_fields=None): def add(self, add, additional_fields=None):
if isinstance(add, Variable): if isinstance(add, Variable):
self.add_variable(add, additional_fields=additional_fields) self.add_variable(add, additional_fields=additional_fields)

Loading…
Cancel
Save