}
if (log.isDebugEnabled()) {
log.debug(g.toDot(nodes));
}
log.info("Transforming into grammar...");
FlowGraph2Grammar f2g = new FlowGraph2Grammar(g);
Grammar r = f2g.convert();
Set<Nonterminal> hs_nt = new HashSet<Nonterminal>();
for (Node hn : nodes) {
hs_nt.add(f2g.getNonterminal(hn));
}
if (log.isDebugEnabled()) {
log.debug(r.toString() + "Hotspots: " + hs_nt);
}
// Approximate grammar
log.info("Cutting operation cycles...");
r.approximateOperationCycles();
log.info("Performing regular approximation...");
r.approximateNonLinear(hs_nt);
if (log.isDebugEnabled()) {
log.debug(r.toString() + "Hotspots: " + hs_nt);
}
log.info("Converting to MLFA...");
Grammar2MLFA gm = new Grammar2MLFA(r);
MLFA mlfa = gm.convert();
propagateTaint(r);
for (Node n : nodes) {
Nonterminal nt = f2g.getNonterminal(n);
MLFAStatePair sp = gm.getMLFAStatePair(nt);
if (nt.isTaint()) {
sp.setTaint(true);
}
}
log.debug(mlfa.toString());
// Make map
map = new HashMap<ValueBox, MLFAStatePair>();
for (ValueBox box : hotspots) {
Node n = m3.get(m2.get(m1.get(box)));
if (n != null) {
Nonterminal nt = f2g.getNonterminal(n);
MLFAStatePair sp = gm.getMLFAStatePair(nt);
map.put(box, sp);
}
}
tostring_map = new HashMap<SootClass, MLFAStatePair>();
Map<SootClass, StringStatement> tostring_hotspot_map = jt.getToStringHotspotMap();
for (Map.Entry<SootClass, StringStatement> tse : tostring_hotspot_map.entrySet()) {
SootClass tsc = tse.getKey();
StringStatement ss = tse.getValue();
Node n = m3.get(m2.get(ss));
if (n != null) {
Nonterminal nt = f2g.getNonterminal(n);
MLFAStatePair sp = gm.getMLFAStatePair(nt);
tostring_map.put(tsc, sp);
}
}
sourcefile_map = jt.getSourceFileMap();