public SparkTransformer( Singletons.Global g ) {}
public static SparkTransformer v() { return G.v().soot_jimple_spark_SparkTransformer(); }
protected void internalTransform( String phaseName, Map options )
{
SparkOptions opts = new SparkOptions( options );
final String output_dir = SourceLocator.v().getOutputDir();
// Build pointer assignment graph
ContextInsensitiveBuilder b = new ContextInsensitiveBuilder();
if( opts.pre_jimplify() ) b.preJimplify();
if( opts.force_gc() ) doGC();
Date startBuild = new Date();
final PAG pag = b.setup( opts );
b.build();
Date endBuild = new Date();
reportTime( "Pointer Assignment Graph", startBuild, endBuild );
if( opts.force_gc() ) doGC();
// Build type masks
Date startTM = new Date();
pag.getTypeManager().makeTypeMask();
Date endTM = new Date();
reportTime( "Type masks", startTM, endTM );
if( opts.force_gc() ) doGC();
if( opts.verbose() ) {
G.v().out.println( "VarNodes: "+pag.getVarNodeNumberer().size() );
G.v().out.println( "FieldRefNodes: "+pag.getFieldRefNodeNumberer().size() );
G.v().out.println( "AllocNodes: "+pag.getAllocNodeNumberer().size() );
}
// Simplify pag
Date startSimplify = new Date();
// We only simplify if on_fly_cg is false. But, if vta is true, it
// overrides on_fly_cg, so we can still simplify. Something to handle
// these option interdependencies more cleanly would be nice...
if( ( opts.simplify_sccs() && !opts.on_fly_cg() ) || opts.vta() ) {
new SCCCollapser( pag, opts.ignore_types_for_sccs() ).collapse();
}
if( opts.simplify_offline() && !opts.on_fly_cg() ) {
new EBBCollapser( pag ).collapse();
}
if( true || opts.simplify_sccs() || opts.vta() || opts.simplify_offline() ) {
pag.cleanUpMerges();
}
Date endSimplify = new Date();
reportTime( "Pointer Graph simplified", startSimplify, endSimplify );
if( opts.force_gc() ) doGC();
// Dump pag
PAGDumper dumper = null;
if( opts.dump_pag() || opts.dump_solution() ) {
dumper = new PAGDumper( pag, output_dir );
}
if( opts.dump_pag() ) dumper.dump();
// Propagate
Date startProp = new Date();
final Propagator[] propagator = new Propagator[1];
switch( opts.propagator() ) {
case SparkOptions.propagator_iter:
propagator[0] = new PropIter( pag );
break;
case SparkOptions.propagator_worklist:
propagator[0] = new PropWorklist( pag );
break;
case SparkOptions.propagator_cycle:
propagator[0] = new PropCycle( pag );
break;
case SparkOptions.propagator_merge:
propagator[0] = new PropMerge( pag );
break;
case SparkOptions.propagator_alias:
propagator[0] = new PropAlias( pag );
break;
case SparkOptions.propagator_none:
break;
default:
throw new RuntimeException();
}
if( propagator[0] != null ) propagator[0].propagate();
Date endProp = new Date();
reportTime( "Propagation", startProp, endProp );
reportTime( "Solution found", startSimplify, endProp );
if( opts.force_gc() ) doGC();
if( !opts.on_fly_cg() || opts.vta() ) {
CallGraphBuilder cgb = new CallGraphBuilder( pag );
cgb.build();
}
if( opts.verbose() ) {
G.v().out.println( "[Spark] Number of reachable methods: "
+Scene.v().getReachableMethods().size() );
}
if( opts.set_mass() ) findSetMass( pag );
if( opts.dump_answer() ) new ReachingTypeDumper( pag, output_dir ).dump();
if( opts.dump_solution() ) dumper.dumpPointsToSets();
if( opts.dump_html() ) new PAG2HTML( pag, output_dir ).dump();
Scene.v().setPointsToAnalysis( pag );
if( opts.add_tags() ) {
addTags( pag );
}
if(opts.cs_demand()) {
//replace by demand-driven refinement-based context-sensitive analysis
Date startOnDemand = new Date();
PointsToAnalysis onDemandAnalysis = DemandCSPointsTo.makeWithBudget(opts.traversal(), opts.passes(), opts.lazy_pts());
Date endOndemand = new Date();
reportTime( "Initialized on-demand refinement-based context-sensitive analysis", startOnDemand, endOndemand );
Scene.v().setPointsToAnalysis(onDemandAnalysis);
}
}