Skip to content

Commit

Permalink
Merge branch 'master' into 31-constantinople-bug
Browse files Browse the repository at this point in the history
  • Loading branch information
ESultanik committed Nov 1, 2018
2 parents ac7e20c + 045b383 commit 94f2ee6
Show file tree
Hide file tree
Showing 7 changed files with 119 additions and 142 deletions.
19 changes: 11 additions & 8 deletions etheno/differentials.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,16 @@
from enum import Enum
import os

from .client import JSONRPCError, SelfPostingClient
from .etheno import EthenoPlugin

class DifferentialTest(object):
def __init__(self, test_name, success, message = ''):
def __init__(self, tester, test_name, success, message = ''):
self.tester = tester
self.test_name = test_name
self.message = message
self.success = success
self.tester.logger.make_constant_logged_file(self.message, prefix=['FAILED', 'PASSED'][self.success.value], suffix='.log', dir=os.path.join(self.tester.logger.directory, self.test_name))
def __str__(self):
return "[%s] %s\t%s" % (self.test_name, self.success, self.message)
__repr__ = __str__
Expand Down Expand Up @@ -39,19 +42,19 @@ def after_post(self, data, client_results):
if clients_with_errors:
clients = [self.etheno.master_client] + self.etheno.clients
if clients_without_errors:
test = DifferentialTest('JSON_RPC_ERRORS', TestResult.FAILED, "%s executed JSON RPC call %s with no errors, but %s executed the same transaction with errors:\n%s" % (
test = DifferentialTest(self, 'JSON_RPC_ERRORS', TestResult.FAILED, "%s executed JSON RPC call %s with no errors, but %s executed the same transaction with errors:\n%s" % (
', '.join(str(clients[client]) for client in clients_without_errors),
data,
', '.join(str(clients[client]) for client in clients_with_errors),
'\n'.join(str(client_results[client]) for client in clients_with_errors)
))
else:
test = DifferentialTest('JSON_RPC_ERRORS', TestResult.PASSED, "All clients executed JSON RPC call %s with errors" % data)
test = DifferentialTest(self, 'JSON_RPC_ERRORS', TestResult.PASSED, "All clients executed JSON RPC call %s with errors" % data)
self.add_test_result(test)
self.logger.error(test.message)
return
else:
self.add_test_result(DifferentialTest('JSON_RPC_ERRORS', TestResult.PASSED, "All clients executed transaction %s without error" % data))
self.add_test_result(DifferentialTest(self, 'JSON_RPC_ERRORS', TestResult.PASSED, "All clients executed transaction %s without error" % data))

master_result = client_results[0]
if method == 'eth_sendTransaction' or method == 'eth_sendRawTransaction':
Expand All @@ -73,11 +76,11 @@ def after_post(self, data, client_results):
except Exception:
pass
if not created:
test = DifferentialTest('CONTRACT_CREATION', TestResult.FAILED, "the master client created a contract for transaction %s, but %s did not" % (data['params'][0], client))
test = DifferentialTest(self, 'CONTRACT_CREATION', TestResult.FAILED, "the master client created a contract for transaction %s, but %s did not" % (data['params'][0], client))
self.add_test_result(test)
self.logger.error(test.message)
else:
self.add_test_result(DifferentialTest('CONTRACT_CREATION', TestResult.PASSED, "client %s transaction %s" % (client, data['params'][0])))
self.add_test_result(DifferentialTest(self, 'CONTRACT_CREATION', TestResult.PASSED, "client %s transaction %s" % (client, data['params'][0])))
if 'gasUsed' in master_result['result'] and master_result['result']['gasUsed']:
# make sure each client used the same amount of gas
master_gas = int(master_result['result']['gasUsed'], 16)
Expand All @@ -88,11 +91,11 @@ def after_post(self, data, client_results):
except Exception:
pass
if gas_used != master_gas:
test = DifferentialTest('GAS_USAGE', TestResult.FAILED, "transaction %s used 0x%x gas in the master client but only 0x%x gas in %s!" % (data['params'][0], master_gas, gas_used, client))
test = DifferentialTest(self, 'GAS_USAGE', TestResult.FAILED, "transaction %s used 0x%x gas in the master client but only 0x%x gas in %s!" % (data['params'][0], master_gas, gas_used, client))
self.add_test_result(test)
self.logger.error(test.message)
else:
self.add_test_result(DifferentialTest('GAS_USAGE', TestResult.PASSED, "client %s transaction %s used 0x%x gas" % (client, data['params'][0], gas_used)))
self.add_test_result(DifferentialTest(self, 'GAS_USAGE', TestResult.PASSED, "client %s transaction %s used 0x%x gas" % (client, data['params'][0], gas_used)))

def finalize(self):
unprocessed = self._unprocessed_transactions
Expand Down
30 changes: 18 additions & 12 deletions etheno/echidna.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import os
import subprocess
import tempfile

Expand Down Expand Up @@ -100,18 +101,23 @@ def run(self):
self._shutdown()
return
self.logger.info("Deployed Echidna test contract to %s" % self.contract_address)
with ConstantTemporaryFile(ECHIDNA_CONFIG, prefix='echidna', suffix='.yaml') as config:
with ConstantTemporaryFile(self.contract_source, prefix='echidna', suffix='.sol') as sol:
echidna = subprocess.Popen(['/usr/bin/env', 'echidna-test', sol, '--config', config], stderr=subprocess.DEVNULL, stdout=subprocess.PIPE, bufsize=1, universal_newlines=True)
while self.limit is None or self._transaction < self.limit:
line = echidna.stdout.readline()
if line != b'':
txn = decode_binary_json(line)
if txn is None:
continue
self.emit_transaction(txn)
else:
break
config = self.logger.make_constant_logged_file(ECHIDNA_CONFIG, prefix='echidna', suffix='.yaml')
sol = self.logger.make_constant_logged_file(self.contract_source, prefix='echidna', suffix='.sol')
echidna_args = ['/usr/bin/env', 'echidna-test', self.logger.to_log_path(sol), '--config', self.logger.to_log_path(config)]
run_script = self.logger.make_constant_logged_file(' '.join(echidna_args), prefix='run_echidna', suffix='.sh')
# make the script executable:
os.chmod(run_script, 0o755)

echidna = subprocess.Popen(echidna_args, stderr=subprocess.DEVNULL, stdout=subprocess.PIPE, bufsize=1, universal_newlines=True, cwd=self.log_directory)
while self.limit is None or self._transaction < self.limit:
line = echidna.stdout.readline()
if line != b'':
txn = decode_binary_json(line)
if txn is None:
continue
self.emit_transaction(txn)
else:
break
self._shutdown()

def _shutdown(self):
Expand Down
8 changes: 8 additions & 0 deletions etheno/etheno.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,6 +189,14 @@ def etheno(self, instance):
self._etheno = instance
self.logger = logger.EthenoLogger(self.__class__.__name__, parent=self._etheno.logger)

@property
def log_directory(self):
'''Returns a log directory that this client can use to save additional files, or None if one is not available'''
if self.logger is None:
return None
else:
return self.logger.directory

def added(self):
'''
A callback when this plugin is added to an Etheno instance
Expand Down
30 changes: 13 additions & 17 deletions etheno/geth.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from . import logger
from .client import JSONRPCError
from .jsonrpcclient import JSONRPCClient
from .utils import ConstantTemporaryFile, format_hex_address
from .utils import format_hex_address

def ltrim_ansi(text):
if text.startswith(logger.ANSI_RESET):
Expand Down Expand Up @@ -47,28 +47,24 @@ def log(logger, message):
def etheno_set(self):
super().etheno_set()
try:
args = ['/usr/bin/env', 'geth', 'init', self.genesis_file, '--datadir', self.datadir]
args = ['/usr/bin/env', 'geth', 'init', self.logger.to_log_path(self.genesis_file), '--datadir', self.logger.to_log_path(self.datadir)]
self.add_to_run_script(args)
subprocess.check_call(args, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
subprocess.check_call(args, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, cwd=self.log_directory)
except Exception as e:
self.cleanup()
raise e

def import_account(self, private_key):
content = format_hex_address(private_key).encode('utf-8') + bytes([ord('\n')])
with ConstantTemporaryFile(content, prefix='private', suffix='.key') as keyfile:
if self.log_directory:
import_dir = os.path.join(self.log_directory, 'private_keys')
with self.make_tempfile(prefix='private', suffix='.key', dir=import_dir, delete_on_exit=False) as f:
f.write(content)
keyfile = f.name
while True:
args = ['/usr/bin/env', 'geth', 'account', 'import', '--datadir', self.datadir, '--password', self.passwords, keyfile]
self.add_to_run_script(args)
geth = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if geth.wait() == 0:
return
# This sometimes happens with geth, I have no idea why, so just try again
import_dir = os.path.join(self.log_directory, 'private_keys')
keyfile = self.logger.make_constant_logged_file(content, prefix='private', suffix='.key', dir=import_dir)
while True:
args = ['/usr/bin/env', 'geth', 'account', 'import', '--datadir', self.logger.to_log_path(self.datadir), '--password', self.logger.to_log_path(self.passwords), self.logger.to_log_path(keyfile)]
self.add_to_run_script(args)
geth = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.log_directory)
if geth.wait() == 0:
return
# This sometimes happens with geth, I have no idea why, so just try again

def post(self, data):
# geth takes a while to unlock all of the accounts, so check to see if that caused an error and just wait a bit
Expand All @@ -93,7 +89,7 @@ def get_start_command(self, unlock_accounts=True):
verbosity = 3
else:
verbosity = 4
base_args = ['/usr/bin/env', 'geth', '--nodiscover', '--rpc', '--rpcport', "%d" % self.port, '--networkid', "%d" % self.genesis['config']['chainId'], '--datadir', self.datadir, '--mine', '--etherbase', format_hex_address(self.miner_account.address), f"--verbosity={verbosity}", '--minerthreads=1']
base_args = ['/usr/bin/env', 'geth', '--nodiscover', '--rpc', '--rpcport', "%d" % self.port, '--networkid', "%d" % self.genesis['config']['chainId'], '--datadir', self.logger.to_log_path(self.datadir), '--mine', '--etherbase', format_hex_address(self.miner_account.address), f"--verbosity={verbosity}", '--minerthreads=1']
if unlock_accounts:
addresses = filter(lambda a : a != format_hex_address(self.miner_account.address), map(format_hex_address, self.genesis['alloc']))
unlock_args = ['--unlock', ','.join(addresses), '--password', self.passwords]
Expand Down
106 changes: 15 additions & 91 deletions etheno/jsonrpcclient.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,6 @@
from .logger import PtyLogger
from .utils import ConstantTemporaryFile, format_hex_address, is_port_free

class Tempfile(object):
path=''
requested_name=''
delete_on_exit=True
rewrite_paths=False

class JSONRPCClient(RpcProxyClient):
def __init__(self, name, genesis, port=8546):
super().__init__("http://localhost:%d/" % port)
Expand All @@ -26,29 +20,13 @@ def __init__(self, name, genesis, port=8546):
self.genesis = copy.deepcopy(genesis)
self.miner_account = make_accounts(1)[0]
self.genesis['alloc'][format_hex_address(self.miner_account.address)] = {'balance' : '0', 'privateKey' : format_hex_address(self.miner_account.private_key)}
self._tempfiles = []
self._accounts = []
self._created_address_index = -1
genesis_output = self.make_tempfile(prefix='genesis', suffix='.json')
self.genesis_file = genesis_output.name
password_output = self.make_tempfile(prefix=name, suffix='.passwd')
self.passwords = password_output.name
self._runscript = []

try:
self.write_genesis(genesis_output)
finally:
genesis_output.close()

try:
self.write_passwords(password_output)
finally:
password_output.close()

# This is set in self.etheno_set():
# These are set in self.etheno_set():
self.genesis_file = None
self.passwords = None
self.datadir = None
self._datadir_tmp = None

# This is set when self.start() is called:
self.instance = None

Expand All @@ -61,41 +39,20 @@ def write_passwords(self, outfile):

def etheno_set(self):
super().etheno_set()
if self.log_directory:
self.datadir = os.path.join(self.log_directory, 'chain_data')
os.makedirs(self.datadir)
else:
self._datadir_tmp = tempfile.TemporaryDirectory()
self.datadir = self._datadir_tmp.name
self.datadir = os.path.join(self.log_directory, 'chain_data')
os.makedirs(self.datadir)
with self.logger.make_logged_file(prefix='genesis', suffix='.json') as genesis_output:
self.genesis_file = genesis_output.name
self.write_genesis(genesis_output)
genesis_output.close()
with self.logger.make_logged_file(prefix=self._basename, suffix='.passwd') as password_output:
self.passwords = password_output.name
self.write_passwords(password_output)

def add_to_run_script(self, command):
if isinstance(command, Sequence):
command = ' '.join(command)
self._runscript.append(command)

def make_tempfile(self, save_to_log=True, **kwargs):
if 'dir' in kwargs:
# make sure the dir exists:
os.makedirs(kwargs['dir'], exist_ok=True)
tmpfile = Tempfile()
tmpfile.delete_on_exit = kwargs.get('delete_on_exit', True)
if 'delete_on_exit' in kwargs:
del kwargs['delete_on_exit']
tmpfile.rewrite_paths = kwargs.get('rewrite_paths', False)
if 'rewrite_paths' in kwargs:
del kwargs['rewrite_paths']
kwargs['delete'] = False
stream = tempfile.NamedTemporaryFile(**kwargs)
tmpfile.path = stream.name
if save_to_log:
if 'prefix' in kwargs:
tmpfile.requested_name = kwargs['prefix']
if 'suffix' in kwargs:
tmpfile.requested_name += kwargs['suffix']
if not tmpfile.requested_name:
tmpfile.requested_name = tmpfile.name
self._tempfiles.append(tmpfile)
return stream

def import_account(self, private_key):
raise NotImplementedError()
Expand All @@ -119,45 +76,21 @@ def create_account(self, balance=0, address=None):

def get_start_command(self, unlock_accounts=True):
raise NotImplementedError()

def _rewrite_paths(self, text, copy_files=False):
path_mapping = {}
for tmpfile in self._tempfiles:
if tmpfile.requested_name and not tmpfile.path.startswith(self.log_directory):
newpath = os.path.join(self.log_directory, tmpfile.requested_name)
path_mapping[tmpfile.path] = newpath
if copy_files:
shutil.copyfile(tmpfile.path, newpath)
if tmpfile.rewrite_paths:
with open(newpath, 'r') as f:
content = f.read()
with open(newpath, 'w') as f:
f.write(self._rewrite_paths(content, copy_files=False))
for oldpath, newpath in path_mapping.items():
if oldpath.startswith(self.log_directory):
# This file is already in the log directory, so skip it
continue
text = text.replace(oldpath, os.path.basename(newpath))
text = text.replace(self.log_directory, '.')
return text

def save_logs(self):
if not self.log_directory:
raise ValueError("A log directory has not been set for %s" % str(self))
def save_run_script(self):
run_script = os.path.join(self.log_directory, "run_%s.sh" % self._basename.lower())
with open(run_script, 'w') as f:
script = '\n'.join(self._runscript)
script = self._rewrite_paths(script, copy_files=True)
f.write(script)
# make the script executable:
os.chmod(run_script, 0o755)

def start(self, unlock_accounts=True):
start_args = self.get_start_command(unlock_accounts)
self.instance = PtyLogger(self.logger, start_args)
self.instance = PtyLogger(self.logger, start_args, cwd=self.log_directory)
if self.log_directory:
self.add_to_run_script(start_args)
self.save_logs()
self.save_run_script()
self.initialized()
self.instance.start()
self.wait_until_running()
Expand All @@ -177,14 +110,5 @@ def stop(self):
instance.wait()
instance.close()

def cleanup(self):
if self._datadir_tmp and os.path.exists(self.datadir):
self._datadir_tmp.cleanup()
for tmpfile in self._tempfiles:
if tmpfile.delete_on_exit and os.path.exists(tmpfile.path):
os.remove(tmpfile.path)
self._tempfiles = []

def shutdown(self):
self.stop()
self.cleanup()
Loading

0 comments on commit 94f2ee6

Please sign in to comment.