Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions config.yml
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
compilers:
- name: venom
- name: adder
queue:
host: localhost
port: 5672
exec_params:
venom: True
- name: default
venom: False
- name: nagini
queue:
host: localhost
port: 5673
Expand Down
35 changes: 18 additions & 17 deletions converters/typed_converters.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,7 @@ def visit(self):
"""
Runs the conversion of the message and stores the result in the result variable
"""
random.seed(0)
for i, var in enumerate(self.contract.decls):
if i >= MAX_STORAGE_VARIABLES:
break
Expand Down Expand Up @@ -531,36 +532,36 @@ def _visit_assignment(self, assignment):
def _visit_statement(self, statement):
# if not in `for` theres always assignment; probs need another default value
if self._for_block_count > 0:
if statement.HasField("cont_stmt"):
if _has_field(statement, "cont_stmt"):
return self._visit_continue_statement()
if statement.HasField("break_stmt"):
if _has_field(statement, "break_stmt"):
return self._visit_break_statement()
if statement.HasField("decl"):
if _has_field(statement, "decl"):
return self.visit_var_decl(statement.decl)
if statement.HasField("for_stmt"):
if _has_field(statement, "for_stmt"):
return self._visit_for_stmt(statement.for_stmt)
if statement.HasField("if_stmt"):
if _has_field(statement, "if_stmt"):
return self._visit_if_stmt(statement.if_stmt)
if statement.HasField("assert_stmt"):
if _has_field(statement, "assert_stmt"):
return self._visit_assert_stmt(statement.assert_stmt)
if statement.HasField("func_call"):
if _has_field(statement, "func_call"):
if len(self._func_tracker) > 0:
func_num = statement.func_call.func_num % len(self._func_tracker)
if func_num in self._function_call_map[self._current_func.id]:
return self._visit_func_call(statement.func_call)
if statement.HasField("append_stmt"):
if _has_field(statement, "append_stmt"):
append_st = self._visit_append_stmt(statement.append_stmt)
if append_st is not None:
return append_st
if statement.HasField("pop_stmt"):
if _has_field(statement, "pop_stmt"):
pop_st = self._visit_pop_stmt(statement.pop_stmt)
if pop_st is not None:
return pop_st
if statement.HasField("send_stmt"):
if _has_field(statement, "send_stmt"):
return self._visit_send_stmt(statement.send_stmt)
if statement.HasField("raw_call"):
if _has_field(statement, "raw_call"):
return self._visit_raw_call(statement.raw_call)
if statement.HasField("raw_log"):
if _has_field(statement, "raw_log"):
return self._visit_raw_log(statement.raw_log)
return self._visit_assignment(statement.assignment)

Expand Down Expand Up @@ -687,17 +688,17 @@ def visit_address_expression(self, expr):
# result = self._visit_convert(expr.convert)
# return result
current_type = self.type_stack[-1]
if expr.HasField("cmp") and not self._is_constant:
if _has_field(expr, "cmp") and not self._is_constant:
name = "create_minimal_proxy_to"
return self.visit_create_min_proxy_or_copy_of(expr.cmp, name)
if expr.HasField("cfb") and not self._is_constant:
if _has_field(expr, "cfb") and not self._is_constant:
return self.visit_create_from_blueprint(expr.cfb)
if expr.HasField("cco") and not self._is_constant:
if _has_field(expr, "cco") and not self._is_constant:
name = "create_copy_of"
return self.visit_create_min_proxy_or_copy_of(expr.cco, name)
if expr.HasField("ecRec"):
if _has_field(expr, "ecRec"):
return self.visit_ecrecover(expr.ecRec)
if expr.HasField("varRef"):
if _has_field(expr, "varRef"):
# TODO: it has to be decided how exactly to track a current block level or if it has to be passed
result = self._visit_var_ref(expr.varRef, self._block_level_count)
if result is not None:
Expand Down
2 changes: 1 addition & 1 deletion converters/typed_converters_4.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def visit_init(self, init):

# https://github.com/vyperlang/vyper/pull/3769
def _visit_reentrancy(self, ret):
return "@nonreentrant"
return "@nonreentrant\n"

# https://github.com/vyperlang/vyper/pull/2937
def _visit_int_expression(self, expr):
Expand Down
4 changes: 4 additions & 0 deletions proto_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@
import vyper

conf = Config()

from vyperProtoNewRawCallBug_pb2 import *
"""
if vyper.__version__ == '0.3.10':
from vyperProtoNew_pb2 import *
# 0.3.10 and 0.4.0 w decimals are the same for now
Expand All @@ -10,3 +13,4 @@
from vyperProtoNew_pb2 import *
elif vyper.__version__ == '0.4.0':
from vyperProtoNewNoDecimal_pb2 import *
"""
21 changes: 13 additions & 8 deletions run.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
with atheris.instrument_imports():
import sys
import vyper
#from converters.typed_converters import TypedConverter
from converters.typed_converters import TypedConverter
from converters.typed_converters_4 import NaginiConverter

__version__ = "0.1.3" # same version as images' one
Expand Down Expand Up @@ -45,7 +45,8 @@
def TestOneProtoInput(msg):
data = {
"json_msg": MessageToJson(msg),
"generation_result": None,
"generation_result_nagini": None,
"generation_result_adder": None,
"compilation_result": None,
"error_type": None,
"error_message": None,
Expand All @@ -57,8 +58,10 @@ def TestOneProtoInput(msg):
c_log = db_client["compilation_log"]
f_log = db_client['failure_log']
try:
proto = NaginiConverter(msg)
proto.visit()
proto_converter_nagini = NaginiConverter(msg)
proto_converter_nagini.visit()
proto_converter_adder = TypedConverter(msg)
proto_converter_adder.visit()
except Exception as e:
converter_error = {
"error_type": type(e).__name__,
Expand All @@ -69,9 +72,10 @@ def TestOneProtoInput(msg):

logger.critical("Converter has crashed: %s", converter_error)
raise e # Do we actually want to fail here?
data["generation_result"] = proto.result
data["generation_result_nagini"] = proto_converter_nagini.result
data["generation_result_adder"] = proto_converter_adder.result
try:
c_result = vyper.compile_code(proto.result)
c_result = vyper.compile_code(proto_converter_nagini.result)
data["compilation_result"] = c_result
except Exception as e:
data["error_type"] = type(e).__name__
Expand All @@ -80,7 +84,7 @@ def TestOneProtoInput(msg):
logger.debug("Compilation result: %s", data)

input_values = dict()
for name, types in proto.function_inputs.items():
for name, types in proto_converter_nagini.function_inputs.items():
for i in conf.input_strategies:
input_generator.change_strategy(InputStrategy(i))

Expand All @@ -96,7 +100,8 @@ def TestOneProtoInput(msg):

message = {
"_id": str(ins_res.inserted_id),
"generation_result": proto.result,
"generation_result_nagini": proto_converter_nagini.result,
"generation_result_adder": proto_converter_adder.result,
"function_input_values": input_values,
"json_msg": MessageToJson(msg),
"generator_version": __version__,
Expand Down
5 changes: 3 additions & 2 deletions tests/integration_runner/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,8 @@
conf = Config("./config.yml")
compiler_params = conf.get_compiler_params_by_name(compiler_name)

compiler_key = f"{vyper.__version__.replace('.', '_')}_{compiler_name}"
#compiler_key = f"{vyper.__version__.replace('.', '_')}_{compiler_name}"
compiler_key = f"{compiler_name}"

logger_level = getattr(logging, conf.verbosity)
logger = logging.getLogger(f"runner_{compiler_key}")
Expand Down Expand Up @@ -74,7 +75,7 @@ def handle_compilation(_contract_desc):
for iv in init_values:
logger.debug("Constructor values: %s", iv)
try:
contract = boa.loads(_contract_desc["generation_result"],
contract = boa.loads(_contract_desc[f"generation_result_{compiler_key}"],
*iv, compiler_args=comp_settings)
except Exception as e:
logger.debug("Deployment failed: %s", str(e))
Expand Down
63 changes: 52 additions & 11 deletions verifiers/simple_verifier.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,23 @@ def runtime_error_handler(_res0, _res1):
pass


def compilation_error_handler(_res0, _res1):
if _res0 != _res1:
raise VerifierException(f"Compilation error discrepancy: {_res0} | {_res1}")

RUNTIME_ERROR = "runtime_error"


def verify_and_catch(verifier, params):
try:
verifier(*params)
err = None
except VerifierException as e:
logger.error(str(e))
err = str(e)
return err


def verify_two_results(_res0, _res1):
if RUNTIME_ERROR in _res0 or RUNTIME_ERROR in _res1:
runtime_error_handler(_res0, _res1)
Expand All @@ -50,12 +64,7 @@ def verify_two_results(_res0, _res1):
}
d = {}
for name, (verifier, params) in verifiers.items():
try:
verifier(*params)
d[name] = None
except VerifierException as e:
logger.error(str(e))
d[name] = str(e)
d[name] = verify_and_catch(verifier, params)
return d


Expand All @@ -80,7 +89,7 @@ def verify_results(_conf: Config, data):


def target_fields(_conf: Config) -> list:
return [f"result_0_4_0_{c['name']}" for c in _conf.compilers]
return [f"result_{c['name']}" for c in _conf.compilers]


def ready_to_handle(_conf: Config, _res) -> bool:
Expand Down Expand Up @@ -111,13 +120,39 @@ def reshape_data(_conf, _res):
def is_valid(_conf, _res):
fields = target_fields(_conf)
for f in fields:
# contract is empty, regardless of inputs
if len(_res[f][0]) == 0:
return False
if "deploy_error" in _res[f][0]:
# TODO: deploy errors are supposed to be compared and handled as well
return False
return True

def check_deploy_errors(_conf, _res):
deploy_errors = []
fields = target_fields(_conf)

has_error = False
# reshaping: init->[compilers]
for f in fields:
for j, depl in enumerate(_res[f]):
if j > len(deploy_errors) - 1:
deploy_errors.append([])
deploy_errors[j].append(_res[f][j].get("deploy_error", None))
if deploy_errors[j][-1] is not None:
has_error = True

deploy_results = []
for i, errors in enumerate(deploy_errors):
for j, error in enumerate(errors):
if j == len(errors) - 1:
break
verify_result = verify_and_catch(compilation_error_handler,
(errors[j], errors[j+1]))
deploy_results.append({
"compilers": (fields[j], fields[j + 1]),
"deployment": i,
"results": verify_result
})
return has_error, deploy_results


if __name__ == '__main__':
conf = Config()
Expand All @@ -137,14 +172,20 @@ def is_valid(_conf, _res):

verification_results = []
for res in unhandled_results:
logger.debug(f"Handling result: {res['generation_id']}")
logger.info(f"Handling result: {res['generation_id']}")
logger.debug(res)
if not ready_to_handle(conf, res):
logger.debug("%s is not ready yet", res["generation_id"])
continue

if not is_valid(conf, res):
continue

has_errors, results = check_deploy_errors(conf, res)
if has_errors:
verification_results.append({"generation_id": res["generation_id"], "results": results})
continue

reshaped_res = reshape_data(conf, res)
_r = verify_results(conf, reshaped_res)
verification_results.append({"generation_id": res["generation_id"], "results": _r})
Expand Down
Loading