Skip to content

Commit

Permalink
nuTerm for VCDs
Browse files Browse the repository at this point in the history
Based upon https://dl.acm.org/do/10.1145/3554332/ and adjusted to use
VCD files as source of traces rather than data sampled from running Java
programs.
  • Loading branch information
tautschnig committed Dec 6, 2023
1 parent 409db4b commit 750de4e
Show file tree
Hide file tree
Showing 22 changed files with 3,572 additions and 0 deletions.
38 changes: 38 additions & 0 deletions src/ebmc/ranking-function-synthesis/nuterm-wrapper.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
#!/bin/sh

BASE=$(dirname $0)

if ! which ebmc 2>/dev/null 1>&2 ; then
echo "ebmc binary not found in PATH"
exit 1
fi

VERILOG_FILES=$@

cleanup()
{
rm -rf "$vcd_dir"
}
vcd_dir=$(mktemp -d vcd.XXXXX)
trap cleanup EXIT

ebmc $VERILOG_FILES --random-traces --random-seed 0 \
--trace-steps 100 --number-of-traces 20 \
--vcd $vcd_dir/trace.vcd

# pyvcd does not support indexed names as arise from generate statements
perl -p -i -e 's/\[(\d+)\]/__$1__/g' $vcd_dir/trace.vcd*

## If we were to use --neural-engine:
## python3 $BASE/nuterm/nuterm.py \
## --strategy anticorr_sumofrelu2 \
## $BASE/../../bla.vcd.* | \
## tail -n 1 | cut -f2 -d: | \
## sed 's/main\.//g' | sed 's/^/Candidate: /'
## echo

python3 $BASE/nuterm/nuterm.py \
--strategy anticorr_sumoflinears_or_concat \
--vcd-prefix $vcd_dir/trace.vcd \
--seed 0 \
$VERILOG_FILES
59 changes: 59 additions & 0 deletions src/ebmc/ranking-function-synthesis/nuterm/LICENSE.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
nuTerm
BSD 2-Clause License

Copyright (c) 2021, Julian Parsert, Mirco Giacobbe, Daniel Kröning
All rights reserved.

Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:

1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.

2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.

THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.



----------------------------------------------------------------------
nuTerm uses a json library for c++ found in ./deps/traceAgent/json
with the following license:

MIT License

Copyright (c) 2013-2022 Niels Lohmann

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.


----------------------------------------------------------------------
The license file for the SV-COMP problems can be found in
benchmarking/termination-suite/sv-comp/termination-crafted-lit-C/LICENSE.txt
13 changes: 13 additions & 0 deletions src/ebmc/ranking-function-synthesis/nuterm/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
# nuTerm

### Requirements
python packages:
- torch
- sympy
- matplotlib
- datetime
- pandas
- numpy
- tqdm
- termcolor
- psutil
129 changes: 129 additions & 0 deletions src/ebmc/ranking-function-synthesis/nuterm/javachecker.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@


# READ THIS BEFORE REMOVING !!!!!!!!
# PLEASE LEAVE THIS THE WAY IT IS. If you remove this my workflow and everything get's destroyed. It is maybe not
# nicest solution but it work for the time being. If you remove this one has to work from the terminal and export
# all paths, which I do not think is a nice solution either, as I am not working from the terminal but the IDE.
# PLEASE leave this until we have a nice and robust solution.
import pathlib
import os

from loopheads import get_loop_heads

file_path = str(pathlib.Path(__file__).parent.absolute())
# os.environ['CLASSPATH'] = ':'.join([e.path for e in os.scandir(file_path + '/../libs/')])
from jnius import autoclass, cast

import numpy as np


def check(jar_name, class_name, method_name, offset, ranking_args, ranking_fun):
#for x in os.environ['CLASSPATH']:
# print(x)
#exit(0)
RankChecker = autoclass('javachecker.RankChecker')

URL = autoclass('java.net.URL')
URLClassLoader = autoclass('java.net.URLClassLoader')
List = autoclass('java.util.List')

urls = [URL('jar:file:' + jar_name + '!/')]
cl = cast("java.lang.ClassLoader", URLClassLoader.newInstance(urls))
#cl = URLClassLoader.newInstance(urls)

args = List.of(*ranking_args)
fun = List.of(*ranking_fun)

res = RankChecker._check(cl, class_name, method_name, offset, args, fun)

return bool(res[0]), bool(res[1])


def last_line_offset(jar_name, class_name, method_name, line):

URL = autoclass('java.net.URL')
URLClassLoader = autoclass('java.net.URLClassLoader')

CFGAnalyzer = autoclass('javachecker.CFGAnalyzer')

urls = [URL('jar:file:' + jar_name + '!/')]
cl = URLClassLoader.newInstance(urls)

line_to_offset = CFGAnalyzer.lineToLabelOffset(cl, class_name, method_name)
return line_to_offset.get(line).last()

def check_sum_of_relu(jar_name, class_name, method_name, ranking_heads,
ranking_args, ranking_out, ranking_W, ranking_b):

#for x in os.environ['LD_LIBRARY_PATH'].split(":"):
# print(x)
RankChecker = autoclass('javachecker.RankChecker')

URL = autoclass('java.net.URL')
URLClassLoader = autoclass('java.net.URLClassLoader')
List = autoclass('java.util.List')
HashMap = autoclass('java.util.HashMap')

urls = [URL('jar:file:' + jar_name + '!/')]
cl = cast("java.lang.ClassLoader", URLClassLoader.newInstance(urls))
#cl = URLClassLoader.newInstance(urls)

#print(ranking_W)
#print(ranking_b)
#print(ranking_out)

assert len(ranking_W) == len(ranking_heads)
assert len(ranking_b) == len(ranking_heads)
assert len(ranking_out) == len(ranking_heads)
fun = []
for W,b in zip(ranking_W, ranking_b):

assert np.shape(W)[0] == np.shape(b)[0]
SOR = []
for i in range(0, np.shape(W)[0]):
SOR.append([int(x) for x in W[i,:]] + [int(b[i])])
fun.append(SOR)

args = List.of(*ranking_args)
heads = List.of(*ranking_heads)
out = List.of(*[List.of(*[int(x) for x in W[0,:]]) for W in ranking_out])
hidden = List.of(*[List.of(*[List.of(*row) for row in relus]) for relus in fun])
cex = HashMap()

res = RankChecker._checkLexiReluOrCex2(cl, class_name, method_name, heads, args, out, hidden, cex)

cexDict = {}
i = cex.entrySet().iterator()
while i.hasNext():
e = i.next()
(k,v) = e.getKey(),e.getValue()
cexDict[k] = v

return bool(res[0]), bool(res[2]), cexDict

if __name__ == '__main__':

jarfile = "../benchmarking/termination-suite/termination-crafted-lit/terminating/NoriSharmaFSE2013Fig7/NoriSharmaFSE2013Fig7.jar"
classname = "NoriSharmaFSE2013Fig7"
methodname = "loop"
input_vars = ['i', 'j', 'M', 'N', 'a', 'b', 'c']

loop_heads = get_loop_heads(jarfile, classname, methodname)

W_to_check = [np.array([[-1., 0., -2., 2., 1., 0., -0.],
[-0., -2., 2., -2., 3., 0., -0.],
[ 0., 0., 0., 0., 0., 0., 0.],
[-2., 1., 3., -0., -2., 0., -0.],
[-1., -1., -0., 2., -2., 0., 0.]])]
b_to_check = [np.array([1., 1., 2., 3., 3.])]
out_to_check = [np.array([[1., 1., 0., 1., 1.]])]



decrease, invar, cex = check_sum_of_relu(jarfile, classname, methodname,
loop_heads, input_vars,
out_to_check, W_to_check, b_to_check)

print("Decreasing: {}".format(decrease))
print("Invar: {}".format(invar))
print("Cex: {}".format(cex))
33 changes: 33 additions & 0 deletions src/ebmc/ranking-function-synthesis/nuterm/learning.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
from torch import optim


def simple_learning(trace, loop_head, Model, loss_func, iterations=10000):
"""
Function for simplest learning algorithm.
"""
input_before, input_after = trace.get_pairing_of_all_traces_as_tensors(loop_head)
print("Training data: {} pairs".format(len(input_after)))

input_dim = len(trace.get_pos_identifiers())
input_vars = trace.get_pos_identifiers(frame=5)

# creating model with info from trace
model = Model(input_dim, input_vars=input_vars)

# Learning
optimiser = optim.AdamW(model.parameters(), lr=.01)
for t in range(iterations):
optimiser.zero_grad()

output_before = model(input_before)
output_after = model(input_after)

loss = loss_func(output_before, output_after)

#print(t, "loss:", loss.item())
if loss == 0.:
break
loss.backward()
optimiser.step()

return model, loss.item()
39 changes: 39 additions & 0 deletions src/ebmc/ranking-function-synthesis/nuterm/loopheads.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
import os
import pathlib

def get_loop_heads(jar, klass, function):
from jnius import autoclass

# String = autoclass("java.lang.String")
# calc_lh = autoclass('main.CalculateLoopHeads')

# x = calc_lh.calculateLoopHeadsHelper(String(jar), String(klass), String(function))

URL = autoclass('java.net.URL')
URLClassLoader = autoclass('java.net.URLClassLoader')

#print()
#for x in os.environ['CLASSPATH'].split(":"):
# print(x)
#print()
CFGAnalyzer = autoclass('javachecker.CFGAnalyzer')

urls = [URL('jar:file:' + jar + '!/')]
cl = URLClassLoader.newInstance(urls)
heads = CFGAnalyzer.loopHeaders(cl, klass, function)
print("Done")

return heads.toArray()


if __name__ == '__main__':
from jnius import autoclass

os.environ['CLASSPATH'] = str(pathlib.Path(__file__).parent.absolute()) + "../libs/CalculateLoopHeads.jar"

String = autoclass("java.lang.String")
calc_lh = autoclass('main.CalculateLoopHeads')

exit(0)
x = get_loop_heads("./examples/GCD/GCD.jar", "classes.GCD", "gcd")
print(x)
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
import torch
import torch.nn as nn
import torch.nn.functional as F
import sympy as sp


class SumOfReluWithBias(nn.Module):
def __init__(self, n_in, n_out=1, n_summands=2, input_vars=None):
super(SumOfReluWithBias, self).__init__()

self.n_summands = n_summands
self.n_in = n_in
self.n_out = n_out

if input_vars is not None:
self.input_vars = input_vars

self.fc1 = nn.Linear(n_in, n_summands * n_out, bias=True)
self.out = []
for i in range(0, n_out):
row = torch.ones(n_summands)
for j in range(0, i):
row = torch.cat((torch.zeros(n_summands), row), 0)
for j in range(i + 1, n_out):
row = torch.cat((row, torch.zeros(n_summands)), 0)
self.out.append(row)

self.out = torch.stack(self.out, dim=0)

def forward(self, state):
layer = self.fc1(state)
layer = F.relu(layer)
layer = F.linear(layer, self.out)
return layer

def get_weights(self):
return [self.fc1.weight.data.numpy()]

def get_bias(self):
return [self.fc1.bias.data.numpy()]

def get_round_weights(self):
return self.fc1.weight.data.numpy().round()

def get_round_bias(self):
return self.fc1.bias.data.numpy().round()

def sympy_expr(self):
relu = sp.Function('relu')

expr = sp.Matrix(sp.symbols(self.input_vars, real=True))

print("##########")
#print( self.fc1.bias.data.numpy())

expr1 = self.fc1.weight.data.numpy() * expr + self.fc1.bias.data.numpy()[:, None]
expr1 = expr1.applyfunc(relu)
return expr1

def sympy_rounded_expr(self):
relu = sp.Function('relu')

expr = sp.Matrix(sp.symbols(self.input_vars, real=True))
expr1 = self.fc1.weight.data.numpy().round() * expr + self.fc1.bias.data.numpy().round()[:, None]
expr1 = expr1.applyfunc(relu)
return expr1

def print_sympy(self):
expr1 = self.sympy_expr()

for e in expr1:
print(e)

print("After rounding:")
expr = self.sympy_rounded_expr()
for e in expr:
print(e)

0 comments on commit 750de4e

Please sign in to comment.