Skip to content

Commit

Permalink
Merge branch 'ph4r05:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
petrs committed Sep 13, 2023
2 parents 67b8f30 + fe7b221 commit 9b9d391
Show file tree
Hide file tree
Showing 6 changed files with 352 additions and 47 deletions.
11 changes: 6 additions & 5 deletions booltest/booltest_json.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,20 +145,21 @@ def eacirc_generator(self, tmpdir, generator_path, config_js):
new_generator_path = os.path.join(tmpdir, 'generator')
shutil.copy(generator_path, new_generator_path)

cfg_file = os.path.join(tmpdir, 'generator.json')
config_str = json.dumps(config_js, indent=2)
with open(os.path.join(tmpdir, 'generator.json'), 'w') as fh:
with open(cfg_file, 'w') as fh:
fh.write(config_str)

# Generate some data here

p = subprocess.Popen(new_generator_path, shell=True, cwd=tmpdir)
tmp_out = os.path.join(tmpdir, 'stdout')
cmd = "%s -c=%s > %s" % (new_generator_path, cfg_file, tmp_out)
p = subprocess.Popen(cmd, shell=True, cwd=tmpdir)
p.communicate()
if p.returncode != 0:
logger.error('Could not generate data, code: %s' % p.returncode)
return None

# Generated file:
data_files = [f for f in os.listdir(tmpdir) if os.path.isfile(os.path.join(tmpdir, f))
data_files = [f for f in (os.listdir(tmpdir) + ['stdout']) if os.path.isfile(os.path.join(tmpdir, f))
and f.endswith('bin')]
if len(data_files) != 1:
logger.error('Error in generating data to process. Files found: %s' % data_files)
Expand Down
21 changes: 12 additions & 9 deletions booltest/booltest_main.py
Original file line number Diff line number Diff line change
Expand Up @@ -1076,12 +1076,16 @@ def adjust_tvsize(self, tvsize, size, coffset=0):
if tvsize < 0:
raise ValueError('Negative TV size: %s' % tvsize)

coef = 8 if not self.do_halving else 4
if (tvsize * coef) % self.blocklen != 0:
rem = (tvsize * coef) % self.blocklen
# multiplier when not byte-aligned to make it byte-aligned on tvsize.
align_blocklen = self.blocklen if not self.do_halving else 2 * self.blocklen
align_mutl = common.comp_byte_align_multiplier(align_blocklen, 8 if not self.do_halving else 16)
aligned_blocklen = align_blocklen * align_mutl

if (tvsize * 8) % aligned_blocklen != 0:
logger.warning('Input data size not aligned to the block size. '
'Input bytes: %d, block bits: %d, rem: %d' % (tvsize, self.blocklen, rem))
tvsize -= rem // coef
'Input bytes: %d, block bits: %d, align_mutl: %d, align_block: %s, halving: %s'
% (tvsize, self.blocklen, align_mutl, aligned_blocklen, self.do_halving))
tvsize = (((tvsize * 8) // aligned_blocklen) * aligned_blocklen) // 8
logger.info('Updating TV to %d' % tvsize)
return int(tvsize)

Expand Down Expand Up @@ -1225,7 +1229,6 @@ def work(self):
jscres['offset'] = coffset

tvsize = self.adjust_tvsize(tvsize, size, coffset)
logger.warning("FUCK")
self.hwanalysis = self.setup_hwanalysis(self.deg, self.top_comb, self.top_k, self.all_deg, zscore_thresh)
if self.hwanalysis.ref_db_path:
logger.info('Using reference data file %s' % self.hwanalysis.ref_db_path)
Expand Down Expand Up @@ -1324,8 +1327,8 @@ def analyze_iobj(self, iobj, coffset=0, tvsize=None, jscres=None):
break

if (len(data) * 8 % self.hwanalysis.blocklen) != 0:
logger.info('Not aligned block read, terminating. Data left: %s bits, block size: %s bits'
% (len(data) * 8, self.hwanalysis.blocklen))
logger.info('Not aligned block read, terminating. Data left: %s bits, block size: %s bits, tv %s'
% (len(data) * 8, self.hwanalysis.blocklen, tvsize))
break

with self.timer_data_bins:
Expand Down Expand Up @@ -1488,7 +1491,7 @@ def argparser(self):
help='Probability the given combination is going to be chosen. '
'Enables stochastic test, useful for large degrees.')

parser.add_argument('--default-params', dest='default_params', action='store_const', const=True, default=False,
parser.add_argument('--default-params', dest='default_params', type=int, default=1,
help='Default parameter settings for testing, used in the paper')

parser.add_argument('files', nargs=argparse.ZERO_OR_MORE, default=[],
Expand Down
15 changes: 15 additions & 0 deletions booltest/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -638,6 +638,21 @@ def generate_seed(iteration=0):
return binascii.hexlify(seed)


def gcd(x, y):
while y:
x, y = y, x % y
return x


def comp_byte_align_multiplier(bit_size, bit_align=8):
"""
Computes multiplier X such that bit_size * X % 8 == 0
X is in [1, 8], 8 is for sure, we can make it smaller
X = 8 / gcd(bit_size, 8)
"""
return bit_align / gcd(bit_size, bit_align)


# Re-exports, compatibility


Expand Down
19 changes: 9 additions & 10 deletions booltest/job_server.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-

#import zmq
import argparse
import coloredlogs
import logging
Expand Down Expand Up @@ -58,22 +57,22 @@ def __init__(self):
self.is_running = True
self.job_ctr = 0
self.job_src_files = []
self.job_entries = {} # type: Dict[str,JobEntry]
self.job_entries = {} # type: Dict[str, JobEntry]
self.job_queue = [] # type: List[str]
self.preloaded_jobs = [] # type: List[JobEntry]
self.failed_jobs = []

# Mapping worker_id -> job_id
self.worker_map = {} # type: Dict[str,str]
self.workers = {} # type: Dict[str,WorkerEntry]
self.worker_map = {} # type: Dict[str, Optional[str]]
self.workers = {} # type: Dict[str, WorkerEntry]
self.db_lock = asyncio.Lock()
self.db_lock_t = threading.Lock()
self.input_lock_t = threading.Lock()
self.thread_watchdog = None
self.thread_loader = None
self.key = None

def job_get(self, worker_id=None):
def job_get(self, worker_id=None) -> Optional[JobEntry]:
self.check_job_queue() # TODO: has to be done in async way...
if len(self.job_queue) == 0:
return None
Expand Down Expand Up @@ -103,7 +102,7 @@ def on_job_fail(self, jb: JobEntry, timeout=False):
def on_job_success(self, jb: JobEntry):
jb.unit = None

def on_job_alloc(self, jb, worker_id):
def on_job_alloc(self, jb: Optional[JobEntry], worker_id) -> Optional[JobEntry]:
if not jb:
return
# If worker had another job, finish it now. It failed probably
Expand All @@ -116,7 +115,7 @@ def on_job_alloc(self, jb, worker_id):
self.worker_map[worker_id] = jb.uuid
return jb

def on_job_finished(self, uid, worker_id, jmsg):
def on_job_finished(self, uid, worker_id, jmsg: Optional[Dict]):
self.worker_map[worker_id] = None
jb = self.job_entries[uid]
if jmsg and 'ret_code' in jmsg:
Expand All @@ -140,7 +139,7 @@ def on_worker_ping(self, worker_id):
self.workers[worker_id] = WorkerEntry(worker_id)
self.workers[worker_id].last_ping = time.time()

def check_auth(self, msg):
def check_auth(self, msg: Dict):
if not self.key:
return True

Expand Down Expand Up @@ -239,7 +238,7 @@ def run_loader(self):
except Exception as e:
logger.warning("Exception in loader: %s" % (e,), exc_info=e)

def buid_resp_job(self, jb):
def buid_resp_job(self, jb: Optional[JobEntry]) -> Dict:
if jb is None:
return {'res': None}
return {'res': jb.unit}
Expand All @@ -258,7 +257,7 @@ async def on_ws_msg(self, websocket, path):
resp_js = json.dumps(resp)
await websocket.send(resp_js)

async def on_msg(self, message):
async def on_msg(self, message) -> Dict[str, Any]:
try:
jmsg = json.loads(message)
if 'action' not in jmsg:
Expand Down

0 comments on commit 9b9d391

Please sign in to comment.