diff --git a/soot-infoflow-cmd/src/soot/jimple/infoflow/cmd/MainClass.java b/soot-infoflow-cmd/src/soot/jimple/infoflow/cmd/MainClass.java index eb170c977..ff7a84437 100644 --- a/soot-infoflow-cmd/src/soot/jimple/infoflow/cmd/MainClass.java +++ b/soot-infoflow-cmd/src/soot/jimple/infoflow/cmd/MainClass.java @@ -108,6 +108,7 @@ public class MainClass { private static final String OPTION_MAX_CALLBACKS_COMPONENT = "mc"; private static final String OPTION_MAX_CALLBACKS_DEPTH = "md"; private static final String OPTION_PATH_SPECIFIC_RESULTS = "ps"; + private static final String OPTION_MAX_THREAD_NUMBER = "mt"; // Inter-component communication private static final String OPTION_ICC_MODEL = "im"; @@ -126,6 +127,7 @@ public class MainClass { private static final String OPTION_IMPLICIT_FLOW_MODE = "i"; private static final String OPTION_STATIC_FLOW_TRACKING_MODE = "sf"; private static final String OPTION_DATA_FLOW_DIRECTION = "dir"; + private static final String OPTION_GC_SLEEP_TIME = "st"; // Evaluation-specific options private static final String OPTION_ANALYZE_FRAMEWORKS = "ff"; @@ -193,7 +195,8 @@ private void initializeCommandLineOptions() { "Compute the taint propagation paths and not just source-to-sink connections. This is a shorthand notation for -pr fast."); options.addOption(OPTION_LOG_SOURCES_SINKS, "logsourcesandsinks", false, "Write the discovered sources and sinks to the log output"); - options.addOption("mt", "maxthreadnum", true, "Limit the maximum number of threads to the given value"); + options.addOption(OPTION_MAX_THREAD_NUMBER, "maxthreadnum", true, + "Limit the maximum number of threads to the given value"); options.addOption(OPTION_ONE_COMPONENT, "onecomponentatatime", false, "Analyze one Android component at a time"); options.addOption(OPTION_ONE_SOURCE, "onesourceatatime", false, "Analyze one source at a time"); @@ -241,6 +244,8 @@ private void initializeCommandLineOptions() { "Use the specified mode when tracking static data flows (CONTEXTFLOWSENSITIVE, CONTEXTFLOWINSENSITIVE, NONE)"); options.addOption(OPTION_DATA_FLOW_DIRECTION, "direction", true, "Specifies the direction of the infoflow analysis (FORWARDS, BACKWARDS)"); + options.addOption(OPTION_GC_SLEEP_TIME, "gcsleeptime", true, + "Specifies the sleep time for path edge collectors in seconds"); // Evaluation-specific options options.addOption(OPTION_ANALYZE_FRAMEWORKS, "analyzeframeworks", false, @@ -588,6 +593,8 @@ else if (solver.equalsIgnoreCase("FLOWINSENSITIVE")) return DataFlowSolver.FlowInsensitive; else if (solver.equalsIgnoreCase("GC")) return DataFlowSolver.GarbageCollecting; + else if (solver.equalsIgnoreCase("FPC")) + return DataFlowSolver.FineGrainedGC; else { System.err.println(String.format("Invalid data flow solver: %s", solver)); throw new AbortAnalysisException(); @@ -790,6 +797,12 @@ private void parseCommandLineOptions(CommandLine cmd, InfoflowAndroidConfigurati if (maxDepth != null) config.getCallbackConfig().setMaxAnalysisCallbackDepth(maxDepth); } + { + Integer maxthreadnum = getIntOption(cmd, OPTION_MAX_THREAD_NUMBER); + if (maxthreadnum != null) { + config.setMaxThreadNum(maxthreadnum); + } + } // Inter-component communication if (cmd.hasOption(OPTION_ICC_NO_PURIFY)) @@ -887,6 +900,13 @@ private void parseCommandLineOptions(CommandLine cmd, InfoflowAndroidConfigurati config.getCallbackConfig().setCallbacksFile(callgraphFile); } } + + { + Integer sleepTime = getIntOption(cmd, OPTION_GC_SLEEP_TIME); + if (sleepTime != null) { + config.getSolverConfiguration().setSleepTime(sleepTime); + } + } } private Integer getIntOption(CommandLine cmd, String option) { diff --git a/soot-infoflow/src/soot/jimple/infoflow/AbstractInfoflow.java b/soot-infoflow/src/soot/jimple/infoflow/AbstractInfoflow.java index 627af607e..929b0dd83 100644 --- a/soot-infoflow/src/soot/jimple/infoflow/AbstractInfoflow.java +++ b/soot-infoflow/src/soot/jimple/infoflow/AbstractInfoflow.java @@ -18,6 +18,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import heros.solver.Pair; import soot.FastHierarchy; import soot.G; import soot.MethodOrMethodContext; @@ -704,7 +705,13 @@ public Thread newThread(Runnable r) { manager = initializeInfoflowManager(sourcesSinks, iCfg, globalTaintManager); // Create the solver peer group - solverPeerGroup = new GCSolverPeerGroup(); + switch (manager.getConfig().getSolverConfiguration().getDataFlowSolver()) { + case FineGrainedGC: + solverPeerGroup = new GCSolverPeerGroup>(); + break; + default: + solverPeerGroup = new GCSolverPeerGroup(); + } // Initialize the alias analysis Abstraction zeroValue = Abstraction.getZeroAbstraction(manager.getConfig().getFlowSensitiveAliasing()); @@ -1238,10 +1245,18 @@ protected IInfoflowSolver createDataFlowSolver(InterruptableExecutor executor, A return new soot.jimple.infoflow.solver.fastSolver.flowInsensitive.InfoflowSolver(problem, executor); case GarbageCollecting: logger.info("Using garbage-collecting solver"); - IInfoflowSolver solver = new soot.jimple.infoflow.solver.gcSolver.InfoflowSolver(problem, executor); + IInfoflowSolver solver = new soot.jimple.infoflow.solver.gcSolver.InfoflowSolver(problem, executor, + solverConfig.getSleepTime()); solverPeerGroup.addSolver(solver); solver.setPeerGroup(solverPeerGroup); return solver; + case FineGrainedGC: + logger.info("Using fine-grained garbage-collecting solver"); + IInfoflowSolver fgSolver = new soot.jimple.infoflow.solver.gcSolver.fpc.InfoflowSolver(problem, executor, + solverConfig.getSleepTime()); + solverPeerGroup.addSolver(fgSolver); + fgSolver.setPeerGroup(solverPeerGroup); + return fgSolver; default: throw new RuntimeException("Unsupported data flow solver"); } diff --git a/soot-infoflow/src/soot/jimple/infoflow/InfoflowConfiguration.java b/soot-infoflow/src/soot/jimple/infoflow/InfoflowConfiguration.java index 1673b422e..aa8c573c9 100644 --- a/soot-infoflow/src/soot/jimple/infoflow/InfoflowConfiguration.java +++ b/soot-infoflow/src/soot/jimple/infoflow/InfoflowConfiguration.java @@ -133,7 +133,12 @@ public static enum DataFlowSolver { /** * Use the garbage-collecting solver */ - GarbageCollecting + GarbageCollecting, + + /** + * Use the fine-grained GC solver + * */ + FineGrainedGC, } public static enum DataFlowDirection { @@ -971,6 +976,7 @@ public static class SolverConfiguration { private int maxJoinPointAbstractions = 10; private int maxCalleesPerCallSite = 75; private int maxAbstractionPathLength = 100; + private int sleepTime = 1; /** * Copies the settings of the given configuration into this configuration object @@ -1084,6 +1090,24 @@ public void setMaxAbstractionPathLength(int maxAbstractionPathLength) { this.maxAbstractionPathLength = maxAbstractionPathLength; } + /** + * Sets the sleep time of garbage colletors + * + * @param sleeptime The interval in second for the path edge collection + */ + public void setSleepTime(int sleeptime) { + this.sleepTime = sleeptime; + } + + /** + * Gets the sleep time of garbage colletors + * + * @return The interval in second for the path edge collection + */ + public int getSleepTime() { + return this.sleepTime; + } + @Override public int hashCode() { final int prime = 31; diff --git a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/AbstractGarbageCollector.java b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/AbstractGarbageCollector.java index 6f86b8037..c4f285178 100644 --- a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/AbstractGarbageCollector.java +++ b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/AbstractGarbageCollector.java @@ -11,15 +11,15 @@ * @author Steven Arzt * */ -public abstract class AbstractGarbageCollector implements IGarbageCollector { +public abstract class AbstractGarbageCollector implements IGarbageCollector { protected final BiDiInterproceduralCFG icfg; - protected final IGCReferenceProvider referenceProvider; - protected final ConcurrentHashMultiMap> jumpFunctions; + protected final IGCReferenceProvider referenceProvider; + protected final ConcurrentHashMultiMap> jumpFunctions; public AbstractGarbageCollector(BiDiInterproceduralCFG icfg, - ConcurrentHashMultiMap> jumpFunctions, - IGCReferenceProvider referenceProvider) { + ConcurrentHashMultiMap> jumpFunctions, + IGCReferenceProvider referenceProvider) { this.icfg = icfg; this.referenceProvider = referenceProvider; this.jumpFunctions = jumpFunctions; @@ -27,7 +27,7 @@ public AbstractGarbageCollector(BiDiInterproceduralCFG icfg, } public AbstractGarbageCollector(BiDiInterproceduralCFG icfg, - ConcurrentHashMultiMap> jumpFunctions) { + ConcurrentHashMultiMap> jumpFunctions) { this.icfg = icfg; this.referenceProvider = createReferenceProvider(); this.jumpFunctions = jumpFunctions; @@ -46,8 +46,10 @@ protected void initialize() { * * @return The new reference provider */ - protected IGCReferenceProvider createReferenceProvider() { - return new OnDemandReferenceProvider<>(icfg); + protected abstract IGCReferenceProvider createReferenceProvider(); + + protected long getRemainingPathEdgeCount() { + return jumpFunctions.values().size(); } } diff --git a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/AbstractReferenceCountingGarbageCollector.java b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/AbstractReferenceCountingGarbageCollector.java index f81de7b4c..70acdc3f7 100644 --- a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/AbstractReferenceCountingGarbageCollector.java +++ b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/AbstractReferenceCountingGarbageCollector.java @@ -18,17 +18,17 @@ * @author Steven Arzt * */ -public abstract class AbstractReferenceCountingGarbageCollector extends AbstractGarbageCollector - implements IGarbageCollectorPeer { - - private ConcurrentCountingMap jumpFnCounter = new ConcurrentCountingMap<>(); - private final Set gcScheduleSet = new ConcurrentHashSet<>(); - private final AtomicInteger gcedMethods = new AtomicInteger(); - private final AtomicInteger gcedEdges = new AtomicInteger(); - private final ExtendedAtomicInteger edgeCounterForThreshold = new ExtendedAtomicInteger(); - private GarbageCollectionTrigger trigger = GarbageCollectionTrigger.Immediate; - private GarbageCollectorPeerGroup peerGroup = null; - private boolean checkChangeCounter = false; +public abstract class AbstractReferenceCountingGarbageCollector extends AbstractGarbageCollector + implements IGarbageCollectorPeer { + + protected ConcurrentCountingMap jumpFnCounter = new ConcurrentCountingMap<>(); + protected final Set gcScheduleSet = new ConcurrentHashSet<>(); + protected final AtomicInteger gcedAbstractions = new AtomicInteger(); + protected final AtomicInteger gcedEdges = new AtomicInteger(); + protected final ExtendedAtomicInteger edgeCounterForThreshold = new ExtendedAtomicInteger(); + protected GarbageCollectionTrigger trigger = GarbageCollectionTrigger.Immediate; + protected GarbageCollectorPeerGroup peerGroup = null; + protected boolean checkChangeCounter = false; protected boolean validateEdges = false; protected Set> oldEdges = new HashSet<>(); @@ -44,21 +44,23 @@ public abstract class AbstractReferenceCountingGarbageCollector extends Ab protected int edgeThreshold = 0; public AbstractReferenceCountingGarbageCollector(BiDiInterproceduralCFG icfg, - ConcurrentHashMultiMap> jumpFunctions, - IGCReferenceProvider referenceProvider) { + ConcurrentHashMultiMap> jumpFunctions, + IGCReferenceProvider referenceProvider) { super(icfg, jumpFunctions, referenceProvider); } public AbstractReferenceCountingGarbageCollector(BiDiInterproceduralCFG icfg, - ConcurrentHashMultiMap> jumpFunctions) { + ConcurrentHashMultiMap> jumpFunctions) { super(icfg, jumpFunctions); } + protected abstract A genAbstraction(PathEdge edge); + @Override public void notifyEdgeSchedule(PathEdge edge) { - SootMethod sm = icfg.getMethodOf(edge.getTarget()); - jumpFnCounter.increment(sm); - gcScheduleSet.add(sm); + A abstraction = genAbstraction(edge); + jumpFnCounter.increment(abstraction); + gcScheduleSet.add(abstraction); if (trigger == GarbageCollectionTrigger.EdgeThreshold) edgeCounterForThreshold.incrementAndGet(); @@ -70,42 +72,8 @@ public void notifyEdgeSchedule(PathEdge edge) { @Override public void notifyTaskProcessed(PathEdge edge) { - jumpFnCounter.decrement(icfg.getMethodOf(edge.getTarget())); - } - - /** - * Checks whether the given method has any open dependencies that prevent its - * jump functions from being garbage collected - * - * @param method The method to check - * @param referenceCounter The counter that keeps track of active references to - * taint abstractions - * @return True it the method has active dependencies and thus cannot be - * garbage-collected, false otherwise - */ - private boolean hasActiveDependencies(SootMethod method, ConcurrentCountingMap referenceCounter) { - int changeCounter = -1; - do { - // Update the change counter for the next round - changeCounter = referenceCounter.getChangeCounter(); - - // Check the method itself - if (referenceCounter.get(method) > 0) - return true; - - // Check the transitive callees - Set references = referenceProvider.getMethodReferences(method, null); - for (SootMethod ref : references) { - if (referenceCounter.get(ref) > 0) - return true; - } - } while (checkChangeCounter && changeCounter != referenceCounter.getChangeCounter()); - return false; - } - - @Override - public boolean hasActiveDependencies(SootMethod method) { - return hasActiveDependencies(method, jumpFnCounter); + A abstraction = genAbstraction(edge); + jumpFnCounter.decrement(abstraction); } /** @@ -120,18 +88,17 @@ protected void gcImmediate() { // Perform the garbage collection if required if (gc) { - int tempMethods = 0; onBeforeRemoveEdges(); - for (SootMethod sm : gcScheduleSet) { + for (A abst : gcScheduleSet) { // Is it safe to remove this method? if (peerGroup != null) { - if (peerGroup.hasActiveDependencies(sm)) + if (peerGroup.hasActiveDependencies(abst)) continue; - } else if (hasActiveDependencies(sm)) + } else if (hasActiveDependencies(abst)) continue; // Get stats for the stuff we are about to remove - Set> oldFunctions = jumpFunctions.get(sm); + Set> oldFunctions = jumpFunctions.get(abst); if (oldFunctions != null) { int gcedSize = oldFunctions.size(); gcedEdges.addAndGet(gcedSize); @@ -142,15 +109,14 @@ protected void gcImmediate() { // First unregister the method, then delete the edges. In case some other thread // concurrently schedules a new edge, the method gets back into the GC work list // this way. - gcScheduleSet.remove(sm); - if (jumpFunctions.remove(sm)) { - gcedMethods.incrementAndGet(); - tempMethods++; + gcScheduleSet.remove(abst); + if (jumpFunctions.remove(abst)) { + gcedAbstractions.incrementAndGet(); if (validateEdges) oldEdges.addAll(oldFunctions); } } - onAfterRemoveEdges(tempMethods); + onAfterRemoveEdges(); } } } @@ -168,12 +134,12 @@ protected void onBeforeRemoveEdges() { * * @param gcedMethods The number of methods for which edges have been removed */ - protected void onAfterRemoveEdges(int gcedMethods) { + protected void onAfterRemoveEdges() { } @Override - public int getGcedMethods() { - return gcedMethods.get(); + public int getGcedAbstractions() { + return gcedAbstractions.get(); } @Override @@ -219,7 +185,7 @@ public void setTrigger(GarbageCollectionTrigger trigger) { * * @param peerGroup The peer group */ - public void setPeerGroup(GarbageCollectorPeerGroup peerGroup) { + public void setPeerGroup(GarbageCollectorPeerGroup peerGroup) { this.peerGroup = peerGroup; peerGroup.addGarbageCollector(this); } diff --git a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/AbstractReferenceProvider.java b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/AbstractReferenceProvider.java index b5718b360..1422d2432 100644 --- a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/AbstractReferenceProvider.java +++ b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/AbstractReferenceProvider.java @@ -14,7 +14,7 @@ * @author Steven Arzt * */ -public abstract class AbstractReferenceProvider implements IGCReferenceProvider { +public abstract class AbstractReferenceProvider implements IGCReferenceProvider { protected final BiDiInterproceduralCFG icfg; diff --git a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/AggressiveGarbageCollector.java b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/AggressiveGarbageCollector.java index 20a9afed0..344a54e69 100644 --- a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/AggressiveGarbageCollector.java +++ b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/AggressiveGarbageCollector.java @@ -17,7 +17,7 @@ * @author Steven Arzt * */ -public class AggressiveGarbageCollector extends AbstractGarbageCollector { +public class AggressiveGarbageCollector extends AbstractGarbageCollector { private final AtomicInteger gcedMethods = new AtomicInteger(); @@ -55,7 +55,7 @@ public void gc() { } @Override - public int getGcedMethods() { + public int getGcedAbstractions() { return gcedMethods.get(); } @@ -65,6 +65,11 @@ public int getGcedEdges() { return 0; } + @Override + protected IGCReferenceProvider createReferenceProvider() { + return new OnDemandReferenceProvider<>(icfg); + } + /** * Sets the number of methods for which edges must have been added before * garbage collection is started diff --git a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/AheadOfTimeReferenceProvider.java b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/AheadOfTimeReferenceProvider.java index 0f2fb1a52..83ed56c51 100644 --- a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/AheadOfTimeReferenceProvider.java +++ b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/AheadOfTimeReferenceProvider.java @@ -5,7 +5,6 @@ import soot.Scene; import soot.SootClass; import soot.SootMethod; -import soot.jimple.infoflow.solver.fastSolver.FastSolverLinkedNode; import soot.jimple.toolkits.ide.icfg.BiDiInterproceduralCFG; import soot.util.HashMultiMap; import soot.util.MultiMap; @@ -18,7 +17,7 @@ * * @author Steven Arzt */ -public class AheadOfTimeReferenceProvider extends AbstractReferenceProvider { +public class AheadOfTimeReferenceProvider extends AbstractReferenceProvider { private final MultiMap methodToCallees = new HashMultiMap<>(); @@ -33,7 +32,7 @@ public AheadOfTimeReferenceProvider(BiDiInterproceduralCFG icfg) } @Override - public Set getMethodReferences(SootMethod method, FastSolverLinkedNode context) { + public Set getAbstractionReferences(SootMethod method) { return methodToCallees.get(method); } diff --git a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/DefaultGarbageCollector.java b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/DefaultGarbageCollector.java index 0248144af..aa37880af 100644 --- a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/DefaultGarbageCollector.java +++ b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/DefaultGarbageCollector.java @@ -11,7 +11,7 @@ * @author Steven Arzt * */ -public class DefaultGarbageCollector extends AbstractReferenceCountingGarbageCollector { +public class DefaultGarbageCollector extends MethodLevelReferenceCountingGarbageCollector { public DefaultGarbageCollector(BiDiInterproceduralCFG icfg, ConcurrentHashMultiMap> jumpFunctions) { @@ -20,7 +20,7 @@ public DefaultGarbageCollector(BiDiInterproceduralCFG icfg, public DefaultGarbageCollector(BiDiInterproceduralCFG icfg, ConcurrentHashMultiMap> jumpFunctions, - IGCReferenceProvider referenceProvider) { + IGCReferenceProvider referenceProvider) { super(icfg, jumpFunctions, referenceProvider); } diff --git a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/GCSolverPeerGroup.java b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/GCSolverPeerGroup.java index b22c98ca4..f8f6d4624 100644 --- a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/GCSolverPeerGroup.java +++ b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/GCSolverPeerGroup.java @@ -8,9 +8,9 @@ * @author Steven Arzt * */ -public class GCSolverPeerGroup extends SolverPeerGroup { +public class GCSolverPeerGroup extends SolverPeerGroup { - private GarbageCollectorPeerGroup gcPeerGroup = null; + private GarbageCollectorPeerGroup gcPeerGroup = null; public GCSolverPeerGroup() { } @@ -20,9 +20,9 @@ public GCSolverPeerGroup() { * * @return The garbage collector peer group */ - public GarbageCollectorPeerGroup getGCPeerGroup() { + public GarbageCollectorPeerGroup getGCPeerGroup() { if (gcPeerGroup == null) - gcPeerGroup = new GarbageCollectorPeerGroup(); + gcPeerGroup = new GarbageCollectorPeerGroup<>(); return gcPeerGroup; } diff --git a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/GarbageCollectorPeerGroup.java b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/GarbageCollectorPeerGroup.java index 45ce234d7..3be104ab7 100644 --- a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/GarbageCollectorPeerGroup.java +++ b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/GarbageCollectorPeerGroup.java @@ -1,9 +1,7 @@ package soot.jimple.infoflow.solver.gcSolver; +import java.util.ArrayList; import java.util.Collection; -import java.util.HashSet; - -import soot.SootMethod; /** * Set of multiple garbage collectors that share a set of active dependencies @@ -11,33 +9,40 @@ * @author Steven Arzt * */ -public class GarbageCollectorPeerGroup implements IGarbageCollectorPeer { +public class GarbageCollectorPeerGroup implements IGarbageCollectorPeer { - private final Collection peers; + private final Collection> peers; public GarbageCollectorPeerGroup() { - this.peers = new HashSet<>(); + this.peers = new ArrayList<>(); } - public GarbageCollectorPeerGroup(Collection peers) { + public GarbageCollectorPeerGroup(Collection> peers) { this.peers = peers; } @Override - public boolean hasActiveDependencies(SootMethod method) { - for (IGarbageCollectorPeer peer : peers) { - if (peer.hasActiveDependencies(method)) + public boolean hasActiveDependencies(A abstraction) { + for (IGarbageCollectorPeer peer : peers) { + if (peer.hasActiveDependencies(abstraction)) return true; } return false; } + @Override + public void notifySolverTerminated() { + for(IGarbageCollectorPeer peer : peers) { + peer.notifySolverTerminated(); + } + } + /** * Adds a garbage collector to this peer group * * @param peer The garbage collector to add */ - public void addGarbageCollector(IGarbageCollectorPeer peer) { + public void addGarbageCollector(IGarbageCollectorPeer peer) { this.peers.add(peer); } diff --git a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/IFDSSolver.java b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/IFDSSolver.java index c62232099..fc7f35b8a 100644 --- a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/IFDSSolver.java +++ b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/IFDSSolver.java @@ -134,12 +134,14 @@ public class IFDSSolver, I extends BiDiI protected SolverPeerGroup solverPeerGroup; + protected int sleepTime = 1; + /** * Creates a solver for the given problem, which caches flow functions and edge * functions. The solver must then be started by calling {@link #solve()}. */ - public IFDSSolver(IFDSTabulationProblem tabulationProblem) { - this(tabulationProblem, DEFAULT_CACHE_BUILDER); + public IFDSSolver(IFDSTabulationProblem tabulationProblem, int sleepTime) { + this(tabulationProblem, DEFAULT_CACHE_BUILDER, sleepTime); } /** @@ -153,7 +155,7 @@ public IFDSSolver(IFDSTabulationProblem tabulationProblem) * for flow functions. */ public IFDSSolver(IFDSTabulationProblem tabulationProblem, - @SuppressWarnings("rawtypes") CacheBuilder flowFunctionCacheBuilder) { + @SuppressWarnings("rawtypes") CacheBuilder flowFunctionCacheBuilder, int sleepTime) { if (logger.isDebugEnabled()) flowFunctionCacheBuilder = flowFunctionCacheBuilder.recordStats(); this.zeroValue = tabulationProblem.zeroValue(); @@ -167,6 +169,7 @@ public IFDSSolver(IFDSTabulationProblem tabulationProblem, } else { ffCache = null; } + this.sleepTime = sleepTime; this.flowFunctions = flowFunctions; this.initialSeeds = tabulationProblem.initialSeeds(); this.followReturnsPastSeeds = tabulationProblem.followReturnsPastSeeds(); @@ -185,7 +188,9 @@ protected IGarbageCollector createGarbageCollector() { // DefaultGarbageCollector gc = new DefaultGarbageCollector<>(icfg, jumpFunctions); ThreadedGarbageCollector gc = new ThreadedGarbageCollector<>(icfg, jumpFunctions); - GCSolverPeerGroup gcSolverGroup = (GCSolverPeerGroup) solverPeerGroup; + gc.setSleepTimeSeconds(sleepTime); + @SuppressWarnings("unchecked") + GCSolverPeerGroup gcSolverGroup = (GCSolverPeerGroup) solverPeerGroup; gc.setPeerGroup(gcSolverGroup.getGCPeerGroup()); return garbageCollector = gc; } @@ -215,8 +220,21 @@ public void solve() { for (IMemoryBoundedSolverStatusNotification listener : notificationListeners) listener.notifySolverTerminated(this); - logger.info(String.format("GC removed abstractions for %d methods", garbageCollector.getGcedMethods())); - this.garbageCollector.notifySolverTerminated(); + logger.info(String.format("GC removed abstractions for %d methods", garbageCollector.getGcedAbstractions())); + logger.info(String.format("GC removed abstractions for %d edges", garbageCollector.getGcedEdges())); + if (garbageCollector instanceof ThreadedGarbageCollector) { + ThreadedGarbageCollector threadedgc =(ThreadedGarbageCollector) garbageCollector; + int fwEndSumCnt = 0; + for(Map, EndSummary> map: this.endSummary.values()) { + fwEndSumCnt += map.size(); + } + int bwEndSumCnt = 0; + logger.info(String.format("forward end Summary size: %d", fwEndSumCnt)); + logger.info(String.format("Recorded Maximum Path edges count is %d", threadedgc.getMaxPathEdgeCount())); + } + @SuppressWarnings("unchecked") + GCSolverPeerGroup gcSolverGroup = (GCSolverPeerGroup) solverPeerGroup; + gcSolverGroup.getGCPeerGroup().notifySolverTerminated(); } /** @@ -634,10 +652,8 @@ protected void propagate(D sourceVal, N target, D targetVal, if (maxAbstractionPathLength >= 0 && targetVal.getPathLength() > maxAbstractionPathLength) return; - D activeVal = targetVal.getActiveCopy(); - final PathEdge activeEdge = new PathEdge(sourceVal, target, activeVal); final PathEdge edge = new PathEdge<>(sourceVal, target, targetVal); - final D existingVal = addFunction(activeEdge); + final D existingVal = addFunction(edge); if (existingVal != null) { if (existingVal != targetVal) { // Check whether we need to retain this abstraction diff --git a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/IGCReferenceProvider.java b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/IGCReferenceProvider.java index c46a3d50a..0626d5daf 100644 --- a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/IGCReferenceProvider.java +++ b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/IGCReferenceProvider.java @@ -2,27 +2,23 @@ import java.util.Set; -import soot.SootMethod; -import soot.jimple.infoflow.solver.fastSolver.FastSolverLinkedNode; - /** * Interface for all implementations that can provide reference counting. These - * classes answer the following question: Given a method X, in which methods can + * classes answer the following question: Given an abstraction X, in which abstractions can * the solver transitively spawn new analysis tasks starting from X? * * @author Steven Arzt * */ -public interface IGCReferenceProvider { +public interface IGCReferenceProvider { /** - * Given a method and a context, gets the set of methods that in which the + * Given an abstraction, gets the set of abstractions that in which the * solver can transitively spawn new analysis tasks * - * @param method - * @param context + * @param abstraction * @return */ - public Set getMethodReferences(SootMethod method, FastSolverLinkedNode context); + public Set getAbstractionReferences(A abstraction); } diff --git a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/IGarbageCollector.java b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/IGarbageCollector.java index 1e1dc7497..17c3b5961 100644 --- a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/IGarbageCollector.java +++ b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/IGarbageCollector.java @@ -37,7 +37,7 @@ public interface IGarbageCollector { * @return The number of methods for which taint abstractions were removed * during garbage collection */ - public int getGcedMethods(); + public int getGcedAbstractions(); /** * Gets the number of taint abstractions that were removed during garbage diff --git a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/IGarbageCollectorPeer.java b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/IGarbageCollectorPeer.java index be0f0ce38..a74b12631 100644 --- a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/IGarbageCollectorPeer.java +++ b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/IGarbageCollectorPeer.java @@ -1,24 +1,23 @@ package soot.jimple.infoflow.solver.gcSolver; -import soot.SootMethod; - /** * A garbage collector that can operate as part of a peer group * * @author Steven Arzt * */ -public interface IGarbageCollectorPeer { +public interface IGarbageCollectorPeer { /** - * Checks whether the given method has any open dependencies in any of the + * Checks whether the given abstraction has any open dependencies in any of the * solvers that are members of this peer group that prevent its jump functions * from being garbage collected * - * @param method The method to check - * @return True it the method has active dependencies and thus cannot be + * @param abstraction The abstraction to check + * @return True if the abstraction has active dependencies and thus cannot be * garbage-collected, false otherwise */ - public boolean hasActiveDependencies(SootMethod method); + public boolean hasActiveDependencies(A abstraction); + public void notifySolverTerminated(); } diff --git a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/InfoflowSolver.java b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/InfoflowSolver.java index cabdd3a03..52dedbc29 100644 --- a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/InfoflowSolver.java +++ b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/InfoflowSolver.java @@ -42,8 +42,8 @@ public class InfoflowSolver extends IFDSSolver + extends AbstractReferenceCountingGarbageCollector { + public MethodLevelReferenceCountingGarbageCollector(BiDiInterproceduralCFG icfg, + ConcurrentHashMultiMap> jumpFunctions, + IGCReferenceProvider referenceProvider) { + super(icfg, jumpFunctions, referenceProvider); + } + + public MethodLevelReferenceCountingGarbageCollector(BiDiInterproceduralCFG icfg, + ConcurrentHashMultiMap> jumpFunctions) { + super(icfg, jumpFunctions); + } + + /** + * Checks whether the given method has any open dependencies that prevent its + * jump functions from being garbage collected + * + * @param method The method to check + * @param referenceCounter The counter that keeps track of active references to + * taint abstractions + * @return True it the method has active dependencies and thus cannot be + * garbage-collected, false otherwise + */ + private boolean hasActiveDependencies(SootMethod method, ConcurrentCountingMap referenceCounter) { + int changeCounter = -1; + do { + // Update the change counter for the next round + changeCounter = referenceCounter.getChangeCounter(); + + // Check the method itself + if (referenceCounter.get(method) > 0) + return true; + + // Check the transitive callees + Set references = referenceProvider.getAbstractionReferences(method); + for (SootMethod ref : references) { + if (referenceCounter.get(ref) > 0) + return true; + } + } while (checkChangeCounter && changeCounter != referenceCounter.getChangeCounter()); + return false; + } + + @Override + public boolean hasActiveDependencies(SootMethod method) { + return hasActiveDependencies(method, jumpFnCounter); + } + + @Override + protected SootMethod genAbstraction(PathEdge edge) { + return icfg.getMethodOf(edge.getTarget()); + } + + @Override + public void gc() { + + } + + @Override + public void notifySolverTerminated() { + + } + + @Override + protected IGCReferenceProvider createReferenceProvider() { + return new OnDemandReferenceProvider<>(icfg); + } +} diff --git a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/NullGarbageCollector.java b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/NullGarbageCollector.java index 232089160..34aaf1c8c 100644 --- a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/NullGarbageCollector.java +++ b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/NullGarbageCollector.java @@ -1,6 +1,7 @@ package soot.jimple.infoflow.solver.gcSolver; import heros.solver.PathEdge; +import soot.SootMethod; /** * Mock implementation for a garbage collector that does nothing @@ -26,7 +27,7 @@ public void gc() { } @Override - public int getGcedMethods() { + public int getGcedAbstractions() { return 0; } @@ -39,4 +40,8 @@ public int getGcedEdges() { public void notifySolverTerminated() { } + public void setPeerGroup(GarbageCollectorPeerGroup peerGroup) { + // do nothing. + } + } diff --git a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/OnDemandReferenceProvider.java b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/OnDemandReferenceProvider.java index 6fe109254..0df86f65f 100644 --- a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/OnDemandReferenceProvider.java +++ b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/OnDemandReferenceProvider.java @@ -8,7 +8,6 @@ import heros.SynchronizedBy; import heros.solver.IDESolver; import soot.SootMethod; -import soot.jimple.infoflow.solver.fastSolver.FastSolverLinkedNode; import soot.jimple.toolkits.ide.icfg.BiDiInterproceduralCFG; /** @@ -20,7 +19,7 @@ * @param * @param */ -public class OnDemandReferenceProvider extends AbstractReferenceProvider { +public class OnDemandReferenceProvider extends AbstractReferenceProvider { @SynchronizedBy("by use of synchronized LoadingCache class") protected final LoadingCache> methodToReferences = IDESolver.DEFAULT_CACHE_BUILDER @@ -38,7 +37,7 @@ public OnDemandReferenceProvider(BiDiInterproceduralCFG icfg) { } @Override - public Set getMethodReferences(SootMethod method, FastSolverLinkedNode context) { + public Set getAbstractionReferences(SootMethod method) { return methodToReferences.getUnchecked(method); } diff --git a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/ThreadedGarbageCollector.java b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/ThreadedGarbageCollector.java index 1bbb388e5..0cb149e68 100644 --- a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/ThreadedGarbageCollector.java +++ b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/ThreadedGarbageCollector.java @@ -1,5 +1,8 @@ package soot.jimple.infoflow.solver.gcSolver; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import heros.solver.PathEdge; import soot.SootMethod; import soot.jimple.toolkits.ide.icfg.BiDiInterproceduralCFG; @@ -13,7 +16,9 @@ * @param * @param */ -public class ThreadedGarbageCollector extends AbstractReferenceCountingGarbageCollector { +public class ThreadedGarbageCollector extends MethodLevelReferenceCountingGarbageCollector { + + protected static final Logger logger = LoggerFactory.getLogger(ThreadedGarbageCollector.class); private class GCThread extends Thread { @@ -49,12 +54,15 @@ public void finish() { } - private int sleepTimeSeconds = 10; + private int sleepTimeSeconds = 1; + private int maxPathEdgeCount = 0; + private int maxMemoryConsumption = 0; + private GCThread gcThread; public ThreadedGarbageCollector(BiDiInterproceduralCFG icfg, ConcurrentHashMultiMap> jumpFunctions, - IGCReferenceProvider referenceProvider) { + IGCReferenceProvider referenceProvider) { super(icfg, jumpFunctions, referenceProvider); } @@ -79,6 +87,13 @@ public void gc() { @Override public void notifySolverTerminated() { + gcImmediate(); + + logger.info(String.format("GC removes %d abstractions", getGcedAbstractions())); + logger.info(String.format("GC removes %d path edges", getGcedEdges())); + logger.info(String.format("Remaining Path edges count is %d", getRemainingPathEdgeCount())); + logger.info(String.format("Recorded Maximum Path edges count is %d", getMaxPathEdgeCount())); + logger.info(String.format("Recorded Maximum memory consumption is %d", getMaxMemoryConsumption())); gcThread.finish(); } @@ -91,4 +106,27 @@ public void setSleepTimeSeconds(int sleepTimeSeconds) { this.sleepTimeSeconds = sleepTimeSeconds; } + private int getUsedMemory() { + Runtime runtime = Runtime.getRuntime(); + return (int) Math.round((runtime.totalMemory() - runtime.freeMemory()) / 1E6); + } + + public long getMaxPathEdgeCount() { + return this.maxPathEdgeCount; + } + + public int getMaxMemoryConsumption() { + return this.maxMemoryConsumption; + } + + @Override + protected void onAfterRemoveEdges() { + int pec = 0; + for(Integer i : jumpFnCounter.values()) { + pec += i; + } + this.maxPathEdgeCount = Math.max(this.maxPathEdgeCount, pec); + this.maxMemoryConsumption = Math.max(this.maxMemoryConsumption, getUsedMemory()); + } + } diff --git a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/fpc/AbstrationDependencyGraph.java b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/fpc/AbstrationDependencyGraph.java new file mode 100644 index 000000000..a6f2a0277 --- /dev/null +++ b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/fpc/AbstrationDependencyGraph.java @@ -0,0 +1,100 @@ +package soot.jimple.infoflow.solver.gcSolver.fpc; + +import heros.solver.Pair; +import soot.SootMethod; +import soot.jimple.infoflow.collect.ConcurrentHashSet; + +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.locks.ReentrantLock; + +public class AbstrationDependencyGraph implements IGraph> { + private final ReentrantLock lock = new ReentrantLock(); + private final Set> nodes = new ConcurrentHashSet<>(); + private final Map, Set>> succMap = new ConcurrentHashMap<>(); + private final Map, Set>> predMap = new ConcurrentHashMap<>(); + + @Override + public Set> getNodes() { + return nodes; + } + + @Override + public Set> succsOf(Pair node) { + return succMap.getOrDefault(node, Collections.emptySet()); + } + + @Override + public Set> predsOf(Pair node) { + return predMap.getOrDefault(node, Collections.emptySet()); + } + + @Override + public void addNode(Pair node) { + nodes.add(node); + } + + @Override + public void addEdge(Pair n1, Pair n2) { + addNode(n1); + addNode(n2); + succMap.computeIfAbsent(n1, k -> new ConcurrentHashSet<>()).add(n2); + predMap.computeIfAbsent(n2, k -> new ConcurrentHashSet<>()).add(n1); + } + + @Override + public boolean contains(Pair node) { + return nodes.contains(node); + } + + @Override + public void removeEdge(Pair n1, Pair n2) { + succsOf(n1).remove(n2); + predsOf(n2).remove(n1); + } + + @Override + public void remove(Pair node) { + nodes.remove(node); + for (Pair pred : predsOf(node)) { + removeEdge(pred, node); + } + for (Pair succ : succsOf(node)) { + removeEdge(node, succ); + } + } + + public void lock() { + lock.lock(); + } + + public void unlock() { + if (lock.isHeldByCurrentThread()) { + lock.unlock(); + } + } + + public int nodeSize() { + return this.nodes.size(); + } + + public int edgeSize() { + int ret = 0; + for (Set> vs : succMap.values()) { + ret += vs.size(); + } + return ret; + } + + public Set> reachableClosure(Pair source) { + final Set> visited = new ConcurrentHashSet<>(); + final Deque> stack = new ArrayDeque<>(); + stack.push(source); + while (!stack.isEmpty()) { + final Pair node = stack.pop(); + visited.add(node); + succsOf(node).stream().filter(n -> !visited.contains(n)).forEach(stack::push); + } + return visited; + } +} diff --git a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/fpc/AggressiveGarbageCollector.java b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/fpc/AggressiveGarbageCollector.java new file mode 100644 index 000000000..290503446 --- /dev/null +++ b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/fpc/AggressiveGarbageCollector.java @@ -0,0 +1,41 @@ +package soot.jimple.infoflow.solver.gcSolver.fpc; + +import heros.solver.Pair; +import heros.solver.PathEdge; +import soot.SootMethod; +import soot.jimple.infoflow.solver.gcSolver.IGCReferenceProvider; +import soot.jimple.toolkits.ide.icfg.BiDiInterproceduralCFG; +import soot.util.ConcurrentHashMultiMap; + +public class AggressiveGarbageCollector extends FineGrainedReferenceCountingGarbageCollector { + public AggressiveGarbageCollector(BiDiInterproceduralCFG icfg, + ConcurrentHashMultiMap, PathEdge> jumpFunctions, + IGCReferenceProvider> referenceProvider) { + super(icfg, jumpFunctions, referenceProvider); + } + + public AggressiveGarbageCollector(BiDiInterproceduralCFG icfg, + ConcurrentHashMultiMap, PathEdge> jumpFunctions) { + super(icfg, jumpFunctions); + } + + @Override + protected IGCReferenceProvider> createReferenceProvider() { + return null; + } + + @Override + public boolean hasActiveDependencies(Pair abstraction) { + int changeCounter = -1; + do { + // Update the change counter for the next round + changeCounter = jumpFnCounter.getChangeCounter(); + + // Check the method itself + if (jumpFnCounter.get(abstraction) > 0) + return true; + + } while (checkChangeCounter && changeCounter != jumpFnCounter.getChangeCounter()); + return false; + } +} diff --git a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/fpc/FineGrainedReferenceCountingGarbageCollector.java b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/fpc/FineGrainedReferenceCountingGarbageCollector.java new file mode 100644 index 000000000..d4e541382 --- /dev/null +++ b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/fpc/FineGrainedReferenceCountingGarbageCollector.java @@ -0,0 +1,131 @@ +package soot.jimple.infoflow.solver.gcSolver.fpc; + +import heros.solver.Pair; +import heros.solver.PathEdge; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import soot.SootMethod; +import soot.jimple.infoflow.solver.gcSolver.AbstractReferenceCountingGarbageCollector; +import soot.jimple.infoflow.solver.gcSolver.IGCReferenceProvider; +import soot.jimple.toolkits.ide.icfg.BiDiInterproceduralCFG; +import soot.util.ConcurrentHashMultiMap; + +public abstract class FineGrainedReferenceCountingGarbageCollector + extends AbstractReferenceCountingGarbageCollector> { + protected static final Logger logger = LoggerFactory.getLogger(FineGrainedReferenceCountingGarbageCollector.class); + + public FineGrainedReferenceCountingGarbageCollector(BiDiInterproceduralCFG icfg, + ConcurrentHashMultiMap, PathEdge> jumpFunctions, + IGCReferenceProvider> referenceProvider) { + super(icfg, jumpFunctions, referenceProvider); + } + + public FineGrainedReferenceCountingGarbageCollector(BiDiInterproceduralCFG icfg, + ConcurrentHashMultiMap, PathEdge> jumpFunctions) { + super(icfg, jumpFunctions); + } + + private class GCThread extends Thread { + + private boolean finished = false; + + public GCThread() { + setName("Fine-grained aggressive IFDS Garbage Collector"); + } + + @Override + public void run() { + while (!finished) { + gcImmediate(); + + if (sleepTimeSeconds > 0) { + try { + Thread.sleep(sleepTimeSeconds * 1000); + } catch (InterruptedException e) { + break; + } + } + } + } + + /** + * Notifies the thread to finish its current garbage collection and then + * terminate + */ + public void finish() { + finished = true; + interrupt(); + } + + } + + protected int sleepTimeSeconds = 1; + protected int maxPathEdgeCount = 0; + protected int maxMemoryConsumption = 0; + + protected GCThread gcThread; + + @Override + protected void initialize() { + super.initialize(); + + // Start the garbage collection thread + gcThread = new GCThread(); + gcThread.start(); + } + + @Override + public void gc() { + // nothing to do here + } + + @Override + public void notifySolverTerminated() { + gcImmediate(); + + logger.info(String.format("GC removes %d abstractions", getGcedAbstractions())); + logger.info(String.format("GC removes %d path edges", getGcedEdges())); + logger.info(String.format("Remaining Path edges count is %d", getRemainingPathEdgeCount())); + logger.info(String.format("Recorded Maximum Path edges count is %d", getMaxPathEdgeCount())); + logger.info(String.format("Recorded Maximum memory consumption is %d", getMaxMemoryConsumption())); + gcThread.finish(); + } + + /** + * Sets the time to wait between garbage collection cycles in seconds + * + * @param sleepTimeSeconds The time to wait between GC cycles in seconds + */ + public void setSleepTimeSeconds(int sleepTimeSeconds) { + this.sleepTimeSeconds = sleepTimeSeconds; + } + + private int getUsedMemory() { + Runtime runtime = Runtime.getRuntime(); + return (int) Math.round((runtime.totalMemory() - runtime.freeMemory()) / 1E6); + } + + public long getMaxPathEdgeCount() { + return this.maxPathEdgeCount; + } + + public int getMaxMemoryConsumption() { + return this.maxMemoryConsumption; + } + + @Override + protected void onAfterRemoveEdges() { + int pec = 0; + for (Integer i : jumpFnCounter.values()) { + pec += i; + } + this.maxPathEdgeCount = Math.max(this.maxPathEdgeCount, pec); + this.maxMemoryConsumption = Math.max(this.maxMemoryConsumption, getUsedMemory()); + } + + @Override + protected Pair genAbstraction(PathEdge edge) { + SootMethod method = icfg.getMethodOf(edge.getTarget()); + return new Pair<>(method, edge.factAtSource()); + } +} diff --git a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/fpc/IFDSSolver.java b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/fpc/IFDSSolver.java new file mode 100644 index 000000000..36156863e --- /dev/null +++ b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/fpc/IFDSSolver.java @@ -0,0 +1,977 @@ +/******************************************************************************* + * Copyright (c) 2012 Eric Bodden. + * Copyright (c) 2013 Tata Consultancy Services & Ecole Polytechnique de Montreal + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the GNU Lesser Public License v2.1 + * which accompanies this distribution, and is available at + * http://www.gnu.org/licenses/old-licenses/gpl-2.0.html + * + * Contributors: + * Eric Bodden - initial API and implementation + * Marc-Andre Laverdiere-Papineau - Fixed race condition + * Steven Arzt - Created FastSolver implementation + ******************************************************************************/ +package soot.jimple.infoflow.solver.gcSolver.fpc; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.LongAdder; +import java.util.function.Consumer; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.cache.CacheBuilder; + +import heros.DontSynchronize; +import heros.FlowFunction; +import heros.FlowFunctionCache; +import heros.FlowFunctions; +import heros.IFDSTabulationProblem; +import heros.SynchronizedBy; +import heros.ZeroedFlowFunctions; +import heros.solver.Pair; +import heros.solver.PathEdge; +import soot.SootMethod; +import soot.Unit; +import soot.jimple.infoflow.collect.MyConcurrentHashMap; +import soot.jimple.infoflow.memory.IMemoryBoundedSolver; +import soot.jimple.infoflow.memory.ISolverTerminationReason; +import soot.jimple.infoflow.solver.AbstractIFDSSolver; +import soot.jimple.infoflow.solver.EndSummary; +import soot.jimple.infoflow.solver.SolverPeerGroup; +import soot.jimple.infoflow.solver.executors.InterruptableExecutor; +import soot.jimple.infoflow.solver.executors.SetPoolExecutor; +import soot.jimple.infoflow.solver.fastSolver.FastSolverLinkedNode; +import soot.jimple.infoflow.solver.gcSolver.GCSolverPeerGroup; +import soot.jimple.infoflow.solver.gcSolver.IGarbageCollector; +import soot.jimple.infoflow.solver.memory.IMemoryManager; +import soot.jimple.toolkits.ide.icfg.BiDiInterproceduralCFG; +import soot.util.ConcurrentHashMultiMap; + +/** + * A solver for an {@link IFDSTabulationProblem}. This solver is not based on + * the IDESolver implementation in Heros for performance reasons. + * + * @param The type of nodes in the interprocedural control-flow graph. + * Typically {@link Unit}. + * @param The type of data-flow facts to be computed by the tabulation + * problem. + * @param The type of inter-procedural control-flow graph being used. + * @see IFDSTabulationProblem + */ +public class IFDSSolver, I extends BiDiInterproceduralCFG> + extends AbstractIFDSSolver implements IMemoryBoundedSolver { + + public static CacheBuilder DEFAULT_CACHE_BUILDER = CacheBuilder.newBuilder() + .concurrencyLevel(Runtime.getRuntime().availableProcessors()).initialCapacity(10000).softValues(); + + protected static final Logger logger = LoggerFactory.getLogger(IFDSSolver.class); + + // enable with -Dorg.slf4j.simpleLogger.defaultLogLevel=trace + public static final boolean DEBUG = logger.isDebugEnabled(); + + protected InterruptableExecutor executor; + + @DontSynchronize("only used by single thread") + protected int numThreads; + + @SynchronizedBy("thread safe data structure, consistent locking when used") + protected ConcurrentHashMultiMap, PathEdge> jumpFunctions = new ConcurrentHashMultiMap<>(); + + @SynchronizedBy("thread safe data structure") + protected volatile IGarbageCollector garbageCollector; + + @SynchronizedBy("thread safe data structure, only modified internally") + protected final I icfg; + + // stores summaries that were queried before they were computed + // see CC 2010 paper by Naeem, Lhotak and Rodriguez + @SynchronizedBy("consistent lock on 'incoming'") + protected final MyConcurrentHashMap, Map, EndSummary>> endSummary = new MyConcurrentHashMap<>(); + + // edges going along calls + // see CC 2010 paper by Naeem, Lhotak and Rodriguez + @SynchronizedBy("consistent lock on field") + protected final ConcurrentHashMultiMap, IncomingRecord> incoming = new ConcurrentHashMultiMap<>(); + + @DontSynchronize("stateless") + protected final FlowFunctions flowFunctions; + + @DontSynchronize("only used by single thread") + protected final Map> initialSeeds; + + @SynchronizedBy("thread safe data structure") + public LongAdder propagationCount = new LongAdder(); + + @DontSynchronize("stateless") + protected final D zeroValue; + + @DontSynchronize("readOnly") + protected final FlowFunctionCache ffCache; + + @DontSynchronize("readOnly") + protected final boolean followReturnsPastSeeds; + + @DontSynchronize("readOnly") + private int maxJoinPointAbstractions = -1; + + @DontSynchronize("readOnly") + protected IMemoryManager memoryManager = null; + + protected boolean solverId; + + private Set notificationListeners = new HashSet<>(); + private ISolverTerminationReason killFlag = null; + + private int maxCalleesPerCallSite = 75; + private int maxAbstractionPathLength = 100; + + protected SolverPeerGroup solverPeerGroup; + + protected AbstrationDependencyGraph abstDependencyGraph; + protected int sleepTime = 1; + + /** + * Creates a solver for the given problem, which caches flow functions and edge + * functions. The solver must then be started by calling {@link #solve()}. + */ + public IFDSSolver(IFDSTabulationProblem tabulationProblem, int sleepTime) { + this(tabulationProblem, DEFAULT_CACHE_BUILDER); + this.sleepTime = sleepTime; + } + + /** + * Creates a solver for the given problem, constructing caches with the given + * {@link CacheBuilder}. The solver must then be started by calling + * {@link #solve()}. + * + * @param tabulationProblem The tabulation problem to solve + * @param flowFunctionCacheBuilder A valid {@link CacheBuilder} or + * null if no caching is to be used + * for flow functions. + */ + public IFDSSolver(IFDSTabulationProblem tabulationProblem, + @SuppressWarnings("rawtypes") CacheBuilder flowFunctionCacheBuilder) { + if (logger.isDebugEnabled()) + flowFunctionCacheBuilder = flowFunctionCacheBuilder.recordStats(); + this.zeroValue = tabulationProblem.zeroValue(); + this.icfg = tabulationProblem.interproceduralCFG(); + FlowFunctions flowFunctions = tabulationProblem.autoAddZero() + ? new ZeroedFlowFunctions(tabulationProblem.flowFunctions(), zeroValue) + : tabulationProblem.flowFunctions(); + if (flowFunctionCacheBuilder != null) { + ffCache = new FlowFunctionCache(flowFunctions, flowFunctionCacheBuilder); + flowFunctions = ffCache; + } else { + ffCache = null; + } + this.flowFunctions = flowFunctions; + this.initialSeeds = tabulationProblem.initialSeeds(); + this.followReturnsPastSeeds = tabulationProblem.followReturnsPastSeeds(); + this.numThreads = Math.max(1, tabulationProblem.numThreads()); + this.executor = getExecutor(); + } + + /** + * Factory method for creating an instance of the garbage collector + * + * @return The new garbage collector + */ + protected IGarbageCollector createGarbageCollector() { + if (garbageCollector != null) + return garbageCollector; + // NullGarbageCollector gc = new NullGarbageCollector<>(); + // AggressiveGarbageCollector gc = new AggressiveGarbageCollector<>(icfg, + // jumpFunctions); + abstDependencyGraph = new AbstrationDependencyGraph<>(); + NormalGarbageCollector gc = new NormalGarbageCollector<>(icfg, jumpFunctions, endSummary, + abstDependencyGraph); + gc.setSleepTimeSeconds(sleepTime); + logger.info("sleep time is {}", sleepTime); + @SuppressWarnings("unchecked") + GCSolverPeerGroup> gcSolverGroup = (GCSolverPeerGroup>) solverPeerGroup; + gc.setPeerGroup(gcSolverGroup.getGCPeerGroup()); + return garbageCollector = gc; + } + + public void setSolverId(boolean solverId) { + this.solverId = solverId; + } + + /** + * Runs the solver on the configured problem. This can take some time. + */ + public void solve() { + reset(); + + // Make sure that we have an instance of the garbage collector + if (this.garbageCollector == null) + this.garbageCollector = createGarbageCollector(); + + // Notify the listeners that the solver has been started + for (IMemoryBoundedSolverStatusNotification listener : notificationListeners) + listener.notifySolverStarted(this); + + submitInitialSeeds(); + awaitCompletionComputeValuesAndShutdown(); + + // Notify the listeners that the solver has been terminated + for (IMemoryBoundedSolverStatusNotification listener : notificationListeners) + listener.notifySolverTerminated(this); + + @SuppressWarnings("unchecked") + GCSolverPeerGroup> gcSolverGroup = (GCSolverPeerGroup>) solverPeerGroup; + gcSolverGroup.getGCPeerGroup().notifySolverTerminated(); + } + + /** + * Schedules the processing of initial seeds, initiating the analysis. Clients + * should only call this methods if performing synchronization on their own. + * Normally, {@link #solve()} should be called instead. + */ + protected void submitInitialSeeds() { + for (Entry> seed : initialSeeds.entrySet()) { + N startPoint = seed.getKey(); + for (D val : seed.getValue()) + propagate(zeroValue, startPoint, val, null, false, null); + addFunction(new PathEdge(zeroValue, startPoint, zeroValue)); + } + } + + /** + * Awaits the completion of the exploded super graph. When complete, computes + * result values, shuts down the executor and returns. + */ + protected void awaitCompletionComputeValuesAndShutdown() { + { + // run executor and await termination of tasks + runExecutorAndAwaitCompletion(); + } + if (logger.isDebugEnabled()) + printStats(); + + // ask executor to shut down; + // this will cause new submissions to the executor to be rejected, + // but at this point all tasks should have completed anyway + executor.shutdown(); + + // Wait for the executor to be really gone + while (!executor.isTerminated()) { + try { + Thread.sleep(100); + } catch (InterruptedException e) { + // silently ignore the exception, it's not an issue if the + // thread gets aborted + } + } + } + + /** + * Runs execution, re-throwing exceptions that might be thrown during its + * execution. + */ + private void runExecutorAndAwaitCompletion() { + try { + executor.awaitCompletion(); + } catch (InterruptedException e) { + e.printStackTrace(); + } + Throwable exception = executor.getException(); + if (exception != null) { + throw new RuntimeException("There were exceptions during IFDS analysis. Exiting.", exception); + } + } + + /** + * Dispatch the processing of a given edge. It may be executed in a different + * thread. + * + * @param newSelfLoop indicate that this path edge is a self-loop edge like -->. + * @param edge the edge to process + * @param orgSrc used for building abstraction dependency graph. + */ + protected void scheduleEdgeProcessing(boolean newSelfLoop, PathEdge edge, Pair orgSrc) { + // If the executor has been killed, there is little point + // in submitting new tasks + if (killFlag != null || executor.isTerminating() || executor.isTerminated()) + return; + + // this condition is used to avoid the second limitation of CleanDroid. + if (newSelfLoop) { + SootMethod sm = icfg.getMethodOf(edge.getTarget()); + Pair abst = new Pair<>(sm, edge.factAtSource()); + Map, EndSummary> map = new MyConcurrentHashMap<>(); + Map, EndSummary> sumMap = endSummary.putIfAbsentElseGet(abst, map); + if (map != sumMap) { // already exists. + return; + } + if (garbageCollector instanceof NormalGarbageCollector && orgSrc != null) { + try { + abstDependencyGraph.lock(); + abstDependencyGraph.addEdge(orgSrc, abst); + } finally { + abstDependencyGraph.unlock(); + } + } + } + garbageCollector.notifyEdgeSchedule(edge); + executor.execute(new PathEdgeProcessingTask(edge, solverId)); + propagationCount.increment(); + garbageCollector.gc(); + } + + /** + * Lines 13-20 of the algorithm; processing a call site in the caller's context. + * + * For each possible callee, registers incoming call edges. Also propagates + * call-to-return flows and summarized callee flows within the caller. + * + * @param edge an edge whose target node resembles a method call + */ + private void processCall(PathEdge edge) { + final D d1 = edge.factAtSource(); + final N n = edge.getTarget(); // a call node; line 14... + final D d2 = edge.factAtTarget(); + assert d2 != null; + Collection returnSiteNs = icfg.getReturnSitesOfCallAt(n); + + // for each possible callee + Collection callees = icfg.getCalleesOfCallAt(n); + if (maxCalleesPerCallSite < 0 || callees.size() <= maxCalleesPerCallSite) { + callees.stream().filter(m -> m.isConcrete()).forEach(new Consumer() { + + @Override + public void accept(SootMethod sCalledProcN) { + // Early termination check + if (killFlag != null) + return; + + // compute the call-flow function + FlowFunction function = flowFunctions.getCallFlowFunction(n, sCalledProcN); + Set res = computeCallFlowFunction(function, d1, d2); + + if (res != null && !res.isEmpty()) { + Collection startPointsOf = icfg.getStartPointsOf(sCalledProcN); + // for each result node of the call-flow function + for (D d3 : res) { + if (memoryManager != null) + d3 = memoryManager.handleGeneratedMemoryObject(d2, d3); + if (d3 == null) + continue; + + // register the fact that has an incoming edge from + // + // line 15.1 of Naeem/Lhotak/Rodriguez + if (!addIncoming(sCalledProcN, d3, n, d1, d2)) + continue; + + // If we already have a summary, we take that summary instead of propagating + // through the callee again + if (applyEndSummaryOnCall(d1, n, d2, returnSiteNs, sCalledProcN, d3)) + continue; + + // for each callee's start point(s) + for (N sP : startPointsOf) { + // create initial self-loop + propagate(d3, sP, d3, n, false, new Pair<>(icfg.getMethodOf(n), d1)); // line 15 + } + } + } + } + + }); + } + + // line 17-19 of Naeem/Lhotak/Rodriguez + // process intra-procedural flows along call-to-return flow functions + for (N returnSiteN : returnSiteNs) { + FlowFunction callToReturnFlowFunction = flowFunctions.getCallToReturnFlowFunction(n, returnSiteN); + Set res = computeCallToReturnFlowFunction(callToReturnFlowFunction, d1, d2); + if (res != null && !res.isEmpty()) { + for (D d3 : res) { + if (memoryManager != null) + d3 = memoryManager.handleGeneratedMemoryObject(d2, d3); + if (d3 != null) + propagate(d1, returnSiteN, d3, n, false, null); + } + } + } + } + + protected boolean applyEndSummaryOnCall(final D d1, final N n, final D d2, Collection returnSiteNs, + SootMethod sCalledProcN, D d3) { + // line 15.2 + Set> endSumm = endSummary(sCalledProcN, d3); + + // still line 15.2 of Naeem/Lhotak/Rodriguez + // for each already-queried exit value reachable + // from , create new caller-side jump functions to + // the return sites because we have observed a potentially + // new incoming edge into + if (endSumm != null && !endSumm.isEmpty()) { + for (EndSummary entry : endSumm) { + N eP = entry.eP; + D d4 = entry.d4; + + // We must acknowledge the incoming abstraction from the other path + entry.calleeD1.addNeighbor(d3); + // for each return site + for (N retSiteN : returnSiteNs) { + // compute return-flow function + FlowFunction retFunction = flowFunctions.getReturnFlowFunction(n, sCalledProcN, eP, retSiteN); + Set retFlowRes = computeReturnFlowFunction(retFunction, d3, d4, n, Collections.singleton(d1)); + if (retFlowRes != null && !retFlowRes.isEmpty()) { + // for each target value of the function + for (D d5 : retFlowRes) { + if (memoryManager != null) + d5 = memoryManager.handleGeneratedMemoryObject(d4, d5); + + // If we have not changed anything in + // the callee, we do not need the facts from + // there. Even if we change something: + // If we don't need the concrete path, + // we can skip the callee in the predecessor + // chain + D d5p = shortenPredecessors(d5, d2, d3, eP, n); + propagate(d1, retSiteN, d5p, n, false, null); + } + } + } + } + return true; + } + return false; + } + + /** + * Computes the call flow function for the given call-site abstraction + * + * @param callFlowFunction The call flow function to compute + * @param d1 The abstraction at the current method's start node. + * @param d2 The abstraction at the call site + * @return The set of caller-side abstractions at the callee's start node + */ + protected Set computeCallFlowFunction(FlowFunction callFlowFunction, D d1, D d2) { + return callFlowFunction.computeTargets(d2); + } + + /** + * Computes the call-to-return flow function for the given call-site abstraction + * + * @param callToReturnFlowFunction The call-to-return flow function to compute + * @param d1 The abstraction at the current method's start + * node. + * @param d2 The abstraction at the call site + * @return The set of caller-side abstractions at the return site + */ + protected Set computeCallToReturnFlowFunction(FlowFunction callToReturnFlowFunction, D d1, D d2) { + return callToReturnFlowFunction.computeTargets(d2); + } + + /** + * Lines 21-32 of the algorithm. + * + * Stores callee-side summaries. Also, at the side of the caller, propagates + * intra-procedural flows to return sites using those newly computed summaries. + * + * @param edge an edge whose target node resembles a method exits + */ + protected void processExit(PathEdge edge) { + final N n = edge.getTarget(); // an exit node; line 21... + SootMethod methodThatNeedsSummary = icfg.getMethodOf(n); + + final D d1 = edge.factAtSource(); + final D d2 = edge.factAtTarget(); + + // for each of the method's start points, determine incoming calls + + // line 21.1 of Naeem/Lhotak/Rodriguez + // register end-summary + if (!addEndSummary(methodThatNeedsSummary, d1, n, d2)) + return; + Set> inc = incoming(d1, methodThatNeedsSummary); + + // for each incoming call edge already processed + // (see processCall(..)) + if (inc != null && !inc.isEmpty()) { + for (IncomingRecord entry : inc) { + // Early termination check + if (killFlag != null) + return; + + // line 22 + N c = entry.n; + Set callerSideDs = Collections.singleton(entry.d1); + // for each return site + for (N retSiteC : icfg.getReturnSitesOfCallAt(c)) { + // compute return-flow function + FlowFunction retFunction = flowFunctions.getReturnFlowFunction(c, methodThatNeedsSummary, n, + retSiteC); + Set targets = computeReturnFlowFunction(retFunction, d1, d2, c, callerSideDs); + // for each incoming-call value + if (targets != null && !targets.isEmpty()) { + final D d4 = entry.d1; + final D predVal = entry.d2; + + for (D d5 : targets) { + if (memoryManager != null) + d5 = memoryManager.handleGeneratedMemoryObject(d2, d5); + if (d5 == null) + continue; + + // If we have not changed anything in the callee, we do not need the facts from + // there. Even if we change something: If we don't need the concrete path, we + // can skip the callee in the predecessor chain + D d5p = shortenPredecessors(d5, predVal, d1, n, c); + propagate(d4, retSiteC, d5p, c, false, null); + + // Make sure all of the incoming edges are registered with the edge from the new + // summary + d1.addNeighbor(entry.d3); + } + } + } + } + } + + // handling for unbalanced problems where we return out of a method with + // a fact for which we have no incoming flow + // note: we propagate that way only values that originate from ZERO, as + // conditionally generated values should only be propagated into callers that + // have an incoming edge for this condition + if (followReturnsPastSeeds && d1 == zeroValue && (inc == null || inc.isEmpty())) { + Collection callers = icfg.getCallersOf(methodThatNeedsSummary); + for (N c : callers) { + for (N retSiteC : icfg.getReturnSitesOfCallAt(c)) { + FlowFunction retFunction = flowFunctions.getReturnFlowFunction(c, methodThatNeedsSummary, n, + retSiteC); + Set targets = computeReturnFlowFunction(retFunction, d1, d2, c, + Collections.singleton(zeroValue)); + if (targets != null && !targets.isEmpty()) { + for (D d5 : targets) { + if (memoryManager != null) + d5 = memoryManager.handleGeneratedMemoryObject(d2, d5); + if (d5 != null) + propagate(zeroValue, retSiteC, d5, c, true, null); + } + } + } + } + // in cases where there are no callers, the return statement would + // normally not be processed at all; this might be undesirable if the flow + // function has a side effect such as registering a taint; instead we thus call + // the return flow function will a null caller + if (callers.isEmpty()) { + FlowFunction retFunction = flowFunctions.getReturnFlowFunction(null, methodThatNeedsSummary, n, + null); + retFunction.computeTargets(d2); + } + } + } + + /** + * Computes the return flow function for the given set of caller-side + * abstractions. + * + * @param retFunction The return flow function to compute + * @param d1 The abstraction at the beginning of the callee + * @param d2 The abstraction at the exit node in the callee + * @param callSite The call site + * @param callerSideDs The abstractions at the call site + * @return The set of caller-side abstractions at the return site + */ + protected Set computeReturnFlowFunction(FlowFunction retFunction, D d1, D d2, N callSite, + Collection callerSideDs) { + return retFunction.computeTargets(d2); + } + + /** + * Lines 33-37 of the algorithm. Simply propagate normal, intra-procedural + * flows. + * + * @param edge + */ + private void processNormalFlow(PathEdge edge) { + final D d1 = edge.factAtSource(); + final N n = edge.getTarget(); + final D d2 = edge.factAtTarget(); + + for (N m : icfg.getSuccsOf(n)) { + // Early termination check + if (killFlag != null) + return; + + // Compute the flow function + FlowFunction flowFunction = flowFunctions.getNormalFlowFunction(n, m); + Set res = computeNormalFlowFunction(flowFunction, d1, d2); + if (res != null && !res.isEmpty()) { + for (D d3 : res) { + if (memoryManager != null && d2 != d3) + d3 = memoryManager.handleGeneratedMemoryObject(d2, d3); + if (d3 != null) + propagate(d1, m, d3, null, false, null); + } + } + } + } + + /** + * Computes the normal flow function for the given set of start and end + * abstractions. + * + * @param flowFunction The normal flow function to compute + * @param d1 The abstraction at the method's start node + * @param d2 The abstraction at the current node + * @return The set of abstractions at the successor node + */ + protected Set computeNormalFlowFunction(FlowFunction flowFunction, D d1, D d2) { + return flowFunction.computeTargets(d2); + } + + /** + * Propagates the flow further down the exploded super graph. + * + * @param sourceVal the source value of the propagated summary edge + * @param target the target statement + * @param targetVal the target value at the target statement + * @param relatedCallSite for call and return flows the related call + * statement, null otherwise (this value + * is not used within this implementation but may be + * useful for subclasses of + * {@link soot.jimple.infoflow.solver.gcSolver.IFDSSolver}) + * @param isUnbalancedReturn true if this edge is propagating an + * unbalanced return (this value is not used within + * this implementation but may be useful for + * subclasses of + * {@link soot.jimple.infoflow.solver.gcSolver.IFDSSolver}) + * @param orgSrc extended for building abstraction dependency graph. + */ + protected void propagate(D sourceVal, N target, D targetVal, + /* deliberately exposed to clients */ N relatedCallSite, + /* deliberately exposed to clients */ boolean isUnbalancedReturn, + Pair orgSrc) { + // Let the memory manager run + if (memoryManager != null) { + sourceVal = memoryManager.handleMemoryObject(sourceVal); + targetVal = memoryManager.handleMemoryObject(targetVal); + if (targetVal == null) + return; + } + + // Check the path length + if (maxAbstractionPathLength >= 0 && targetVal.getPathLength() > maxAbstractionPathLength) + return; + + final PathEdge edge = new PathEdge<>(sourceVal, target, targetVal); + final D existingVal = addFunction(edge); + if (existingVal != null) { + if (existingVal != targetVal) { + // Check whether we need to retain this abstraction + boolean isEssential; + if (memoryManager == null) + isEssential = relatedCallSite != null && icfg.isCallStmt(relatedCallSite); + else + isEssential = memoryManager.isEssentialJoinPoint(targetVal, relatedCallSite); + + if (maxJoinPointAbstractions < 0 || existingVal.getNeighborCount() < maxJoinPointAbstractions + || isEssential) { + existingVal.addNeighbor(targetVal); + } + } + } else { + boolean isSelfLoopEdge = sourceVal == targetVal && icfg.isStartPoint(target); + scheduleEdgeProcessing(isSelfLoopEdge, edge, orgSrc); + } + } + + /** + * Records a jump function. The source statement is implicit. + * + * @see PathEdge + */ + public D addFunction(PathEdge edge) { + SootMethod method = icfg.getMethodOf(edge.getTarget()); + PathEdge oldEdge = jumpFunctions.putIfAbsent(new Pair<>(method, edge.factAtSource()), edge); + return oldEdge == null ? null : oldEdge.factAtTarget(); + } + + protected Set> endSummary(SootMethod m, D d3) { + Map, EndSummary> map = endSummary.get(new Pair<>(m, d3)); + return map == null ? null : map.keySet(); + } + + private boolean addEndSummary(SootMethod m, D d1, N eP, D d2) { + if (d1 == zeroValue) + return true; + + Map, EndSummary> summaries = endSummary.putIfAbsentElseGet(new Pair<>(m, d1), + () -> new ConcurrentHashMap<>()); + EndSummary newSummary = new EndSummary<>(eP, d2, d1); + EndSummary existingSummary = summaries.putIfAbsent(newSummary, newSummary); + if (existingSummary != null) { + existingSummary.calleeD1.addNeighbor(d2); + return false; + } + return true; + } + + protected static class IncomingRecord> { + + public final N n; + public final D d1; + public final D d2; + public final D d3; + + public IncomingRecord(N n, D d1, D d2, D d3) { + this.n = n; + this.d1 = d1; + this.d2 = d2; + this.d3 = d3; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((d1 == null) ? 0 : d1.hashCode()); + result = prime * result + ((d2 == null) ? 0 : d2.hashCode()); + result = prime * result + ((d3 == null) ? 0 : d3.hashCode()); + result = prime * result + ((n == null) ? 0 : n.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + IncomingRecord other = (IncomingRecord) obj; + if (d1 == null) { + if (other.d1 != null) + return false; + } else if (!d1.equals(other.d1)) + return false; + if (d2 == null) { + if (other.d2 != null) + return false; + } else if (!d2.equals(other.d2)) + return false; + if (d3 == null) { + if (other.d3 != null) + return false; + } else if (!d3.equals(other.d3)) + return false; + if (n == null) { + if (other.n != null) + return false; + } else if (!n.equals(other.n)) + return false; + return true; + } + + } + + protected Set> incoming(D d1, SootMethod m) { + Set> inc = incoming.get(new Pair(m, d1)); + return inc; + } + + protected boolean addIncoming(SootMethod m, D d3, N n, D d1, D d2) { + IncomingRecord newRecord = new IncomingRecord(n, d1, d2, d3); + IncomingRecord rec = incoming.putIfAbsent(new Pair(m, d3), newRecord); + return rec == null; + } + + /** + * Factory method for this solver's thread-pool executor. + */ + protected InterruptableExecutor getExecutor() { + SetPoolExecutor executor = new SetPoolExecutor(1, this.numThreads, 30, TimeUnit.SECONDS, + new LinkedBlockingQueue()); + executor.setThreadFactory(new ThreadFactory() { + + @Override + public Thread newThread(Runnable r) { + Thread thrIFDS = new Thread(r); + thrIFDS.setDaemon(true); + thrIFDS.setName("IFDS Solver"); + return thrIFDS; + } + }); + return executor; + } + + /** + * Returns a String used to identify the output of this solver in debug mode. + * Subclasses can overwrite this string to distinguish the output from different + * solvers. + */ + protected String getDebugName() { + return "FAST IFDS SOLVER"; + } + + public void printStats() { + if (logger.isDebugEnabled()) { + if (ffCache != null) + ffCache.printStats(); + } else { + logger.info("No statistics were collected, as DEBUG is disabled."); + } + } + + private class PathEdgeProcessingTask implements Runnable { + + private final PathEdge edge; + private final boolean solverId; + + public PathEdgeProcessingTask(PathEdge edge, boolean solverId) { + this.edge = edge; + this.solverId = solverId; + } + + public void run() { + if (icfg.isCallStmt(edge.getTarget())) { + processCall(edge); + } else { + // note that some statements, such as "throw" may be + // both an exit statement and a "normal" statement + if (icfg.isExitStmt(edge.getTarget())) + processExit(edge); + if (!icfg.getSuccsOf(edge.getTarget()).isEmpty()) + processNormalFlow(edge); + } + garbageCollector.notifyTaskProcessed(edge); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((edge == null) ? 0 : edge.hashCode()); + result = prime * result + (solverId ? 1231 : 1237); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) + return true; + if (obj == null) + return false; + if (getClass() != obj.getClass()) + return false; + PathEdgeProcessingTask other = (PathEdgeProcessingTask) obj; + if (edge == null) { + if (other.edge != null) + return false; + } else if (!edge.equals(other.edge)) + return false; + if (solverId != other.solverId) + return false; + return true; + } + + } + + /** + * Sets the maximum number of abstractions that shall be recorded per join + * point. In other words, enabling this option disables the recording of + * neighbors beyond the given count. + * + * @param maxJoinPointAbstractions The maximum number of abstractions per join + * point, or -1 to record an arbitrary number of + * join point abstractions + */ + public void setMaxJoinPointAbstractions(int maxJoinPointAbstractions) { + this.maxJoinPointAbstractions = maxJoinPointAbstractions; + } + + /** + * Sets the memory manager that shall be used to manage the abstractions + * + * @param memoryManager The memory manager that shall be used to manage the + * abstractions + */ + public void setMemoryManager(IMemoryManager memoryManager) { + this.memoryManager = memoryManager; + } + + /** + * Gets the memory manager used by this solver to reduce memory consumption + * + * @return The memory manager registered with this solver + */ + public IMemoryManager getMemoryManager() { + return this.memoryManager; + } + + @Override + public void forceTerminate(ISolverTerminationReason reason) { + this.killFlag = reason; + this.executor.interrupt(); + this.executor.shutdown(); + } + + @Override + public boolean isTerminated() { + return killFlag != null || this.executor.isFinished(); + } + + @Override + public boolean isKilled() { + return killFlag != null; + } + + @Override + public void reset() { + this.killFlag = null; + } + + @Override + public void addStatusListener(IMemoryBoundedSolverStatusNotification listener) { + this.notificationListeners.add(listener); + } + + @Override + public ISolverTerminationReason getTerminationReason() { + return killFlag; + } + + public void setMaxCalleesPerCallSite(int maxCalleesPerCallSite) { + this.maxCalleesPerCallSite = maxCalleesPerCallSite; + } + + public void setMaxAbstractionPathLength(int maxAbstractionPathLength) { + this.maxAbstractionPathLength = maxAbstractionPathLength; + } + + /** + * Sets the peer group in which this solver operates. Peer groups allow for + * synchronization between solvers + * + * @param solverPeerGroup The solver peer group + */ + public void setPeerGroup(SolverPeerGroup solverPeerGroup) { + this.solverPeerGroup = solverPeerGroup; + } + + /** + * Notifies the solver that no further edges will be scheduled + */ + public void terminate() { + } + +} diff --git a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/fpc/IGraph.java b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/fpc/IGraph.java new file mode 100644 index 000000000..67bdec479 --- /dev/null +++ b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/fpc/IGraph.java @@ -0,0 +1,25 @@ +package soot.jimple.infoflow.solver.gcSolver.fpc; + +import java.util.Set; + +public interface IGraph { + + public Set getNodes(); + + public Set succsOf(N n); + + public Set predsOf(N n); + + public void addNode(N n); + + public void addEdge(N n1, N n2); + + public boolean contains(N n); + + /* + * removing the node itself and all edges it associated with. + */ + public void remove(N n); + + public void removeEdge(N n1, N n2); +} diff --git a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/fpc/InfoflowSolver.java b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/fpc/InfoflowSolver.java new file mode 100644 index 000000000..c3fdf0950 --- /dev/null +++ b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/fpc/InfoflowSolver.java @@ -0,0 +1,164 @@ +/******************************************************************************* + * Copyright (c) 2012 Secure Software Engineering Group at EC SPRIDE. + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the GNU Lesser Public License v2.1 + * which accompanies this distribution, and is available at + * http://www.gnu.org/licenses/old-licenses/gpl-2.0.html + * + * Contributors: Christian Fritz, Steven Arzt, Siegfried Rasthofer, Eric + * Bodden, and others. + ******************************************************************************/ +package soot.jimple.infoflow.solver.gcSolver.fpc; + +import java.util.Collection; +import java.util.Set; + +import heros.FlowFunction; +import heros.solver.PathEdge; +import soot.SootMethod; +import soot.Unit; +import soot.jimple.infoflow.data.Abstraction; +import soot.jimple.infoflow.problems.AbstractInfoflowProblem; +import soot.jimple.infoflow.solver.EndSummary; +import soot.jimple.infoflow.solver.IFollowReturnsPastSeedsHandler; +import soot.jimple.infoflow.solver.IInfoflowSolver; +import soot.jimple.infoflow.solver.executors.InterruptableExecutor; +import soot.jimple.infoflow.solver.functions.SolverCallFlowFunction; +import soot.jimple.infoflow.solver.functions.SolverCallToReturnFlowFunction; +import soot.jimple.infoflow.solver.functions.SolverNormalFlowFunction; +import soot.jimple.infoflow.solver.functions.SolverReturnFlowFunction; +import soot.jimple.toolkits.ide.icfg.BiDiInterproceduralCFG; +import soot.util.ConcurrentHashMultiMap; + +/** + * We are subclassing the JimpleIFDSSolver because we need the same executor for + * both the forward and the backward analysis Also we need to be able to insert + * edges containing new taint information + * + */ +public class InfoflowSolver extends IFDSSolver> + implements IInfoflowSolver { + + private IFollowReturnsPastSeedsHandler followReturnsPastSeedsHandler = null; + private final AbstractInfoflowProblem problem; + + public InfoflowSolver(AbstractInfoflowProblem problem, InterruptableExecutor executor, int sleepTime) { + super(problem, sleepTime); + this.problem = problem; + this.executor = executor; + problem.setSolver(this); + } + + @Override + protected InterruptableExecutor getExecutor() { + return executor; + } + + @Override + public boolean processEdge(PathEdge edge) { + // We might not have a garbage collector yet + if (this.garbageCollector == null) { + synchronized (this) { + if (this.garbageCollector == null) + this.garbageCollector = createGarbageCollector(); + } + } + + propagate(edge.factAtSource(), edge.getTarget(), edge.factAtTarget(), null, false, null); + return true; + } + + @Override + public void injectContext(IInfoflowSolver otherSolver, SootMethod callee, Abstraction d3, Unit callSite, + Abstraction d2, Abstraction d1) { + if (!addIncoming(callee, d3, callSite, d1, d2)) + return; + + Collection returnSiteNs = icfg.getReturnSitesOfCallAt(callSite); + applyEndSummaryOnCall(d1, callSite, d2, returnSiteNs, callee, d3); + } + + @Override + protected Set computeReturnFlowFunction(FlowFunction retFunction, Abstraction d1, + Abstraction d2, Unit callSite, Collection callerSideDs) { + if (retFunction instanceof SolverReturnFlowFunction) { + // Get the d1s at the start points of the caller + return ((SolverReturnFlowFunction) retFunction).computeTargets(d2, d1, callerSideDs); + } else + return retFunction.computeTargets(d2); + } + + @Override + protected Set computeNormalFlowFunction(FlowFunction flowFunction, Abstraction d1, + Abstraction d2) { + if (flowFunction instanceof SolverNormalFlowFunction) + return ((SolverNormalFlowFunction) flowFunction).computeTargets(d1, d2); + else + return flowFunction.computeTargets(d2); + } + + @Override + protected Set computeCallToReturnFlowFunction(FlowFunction flowFunction, Abstraction d1, + Abstraction d2) { + if (flowFunction instanceof SolverCallToReturnFlowFunction) + return ((SolverCallToReturnFlowFunction) flowFunction).computeTargets(d1, d2); + else + return flowFunction.computeTargets(d2); + } + + @Override + protected Set computeCallFlowFunction(FlowFunction flowFunction, Abstraction d1, + Abstraction d2) { + if (flowFunction instanceof SolverCallFlowFunction) + return ((SolverCallFlowFunction) flowFunction).computeTargets(d1, d2); + else + return flowFunction.computeTargets(d2); + } + + @Override + public void cleanup() { + this.jumpFunctions = new ConcurrentHashMultiMap<>(); + this.incoming.clear(); + this.endSummary.clear(); + if (this.ffCache != null) + this.ffCache.invalidate(); + } + + @Override + public Set> endSummary(SootMethod m, Abstraction d3) { + return super.endSummary(m, d3); + } + + @Override + protected void processExit(PathEdge edge) { + super.processExit(edge); + + if (followReturnsPastSeeds && followReturnsPastSeedsHandler != null) { + final Abstraction d1 = edge.factAtSource(); + final Unit u = edge.getTarget(); + final Abstraction d2 = edge.factAtTarget(); + + final SootMethod methodThatNeedsSummary = icfg.getMethodOf(u); + final Set> inc = incoming(d1, methodThatNeedsSummary); + + if (inc == null || inc.isEmpty()) + followReturnsPastSeedsHandler.handleFollowReturnsPastSeeds(d1, u, d2); + } + } + + @Override + public void setFollowReturnsPastSeedsHandler(IFollowReturnsPastSeedsHandler handler) { + this.followReturnsPastSeedsHandler = handler; + } + + @Override + public long getPropagationCount() { + return propagationCount.sum(); + } + + @Override + public AbstractInfoflowProblem getTabulationProblem() { + return problem; + } + +} diff --git a/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/fpc/NormalGarbageCollector.java b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/fpc/NormalGarbageCollector.java new file mode 100644 index 000000000..7480a15ee --- /dev/null +++ b/soot-infoflow/src/soot/jimple/infoflow/solver/gcSolver/fpc/NormalGarbageCollector.java @@ -0,0 +1,90 @@ +package soot.jimple.infoflow.solver.gcSolver.fpc; + +import heros.solver.Pair; +import heros.solver.PathEdge; +import soot.SootMethod; +import soot.jimple.infoflow.collect.MyConcurrentHashMap; +import soot.jimple.infoflow.solver.EndSummary; +import soot.jimple.infoflow.solver.cfg.BackwardsInfoflowCFG; +import soot.jimple.infoflow.solver.fastSolver.FastSolverLinkedNode; +import soot.jimple.infoflow.solver.gcSolver.IGCReferenceProvider; +import soot.jimple.toolkits.ide.icfg.BiDiInterproceduralCFG; +import soot.util.ConcurrentHashMultiMap; + +import java.util.Map; +import java.util.Set; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class NormalGarbageCollector> + extends FineGrainedReferenceCountingGarbageCollector { + + protected static final Logger logger = LoggerFactory.getLogger(NormalGarbageCollector.class); + protected final AbstrationDependencyGraph abstDependencyGraph; + protected final MyConcurrentHashMap, Map, EndSummary>> endSummary; + + public NormalGarbageCollector(BiDiInterproceduralCFG icfg, + ConcurrentHashMultiMap, PathEdge> jumpFunctions, + MyConcurrentHashMap, Map, EndSummary>> endSummary, + AbstrationDependencyGraph adg) { + super(icfg, jumpFunctions, null); + this.abstDependencyGraph = adg; + this.endSummary = endSummary; + } + + @Override + public boolean hasActiveDependencies(Pair abstraction) { + int changeCounter = -1; + try { + abstDependencyGraph.lock(); + do { + // Update the change counter for the next round + changeCounter = jumpFnCounter.getChangeCounter(); + + // Check the method itself + if (jumpFnCounter.get(abstraction) > 0) + return true; + + // Check the transitive callees + Set> references = abstDependencyGraph.reachableClosure(abstraction); + for (Pair ref : references) { + if (jumpFnCounter.get(ref) > 0) + return true; + } + } while (checkChangeCounter && changeCounter != jumpFnCounter.getChangeCounter()); + // we actually can remove these nodes. + // Set> references = + // abstDependencyGraph.reachableClosure(abstraction); + // for (Pair ref : references) { + // abstDependencyGraph.remove(ref); + // } + } finally { + abstDependencyGraph.unlock(); + } + return false; + } + + @Override + protected IGCReferenceProvider> createReferenceProvider() { + return null; + } + + @Override + public void notifySolverTerminated() { + super.notifySolverTerminated(); + String s = "forward"; + if (icfg instanceof BackwardsInfoflowCFG) { + s = "backward"; + } + logger.info(icfg.getClass().toString()); + logger.info(String.format("#nodes of %s Abstraction Dependency Graph: %d", s, abstDependencyGraph.nodeSize())); + logger.info(String.format("#edges of %s Abstraction Dependency Graph: %d", s, abstDependencyGraph.edgeSize())); + logger.info(String.format("#dummy end summary edges of %s: %d", s, this.endSummary.keySet().size())); + long v = 0; + for(Map, EndSummary> map: this.endSummary.values()) { + v += map.size(); + } + logger.info(String.format("#end summary edges of %s: %d", s, v)); + } +}