001: /* Soot - a J*va Optimization Framework
002: * Copyright (C) 2002 Ondrej Lhotak
003: *
004: * This library is free software; you can redistribute it and/or
005: * modify it under the terms of the GNU Lesser General Public
006: * License as published by the Free Software Foundation; either
007: * version 2.1 of the License, or (at your option) any later version.
008: *
009: * This library is distributed in the hope that it will be useful,
010: * but WITHOUT ANY WARRANTY; without even the implied warranty of
011: * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
012: * Lesser General Public License for more details.
013: *
014: * You should have received a copy of the GNU Lesser General Public
015: * License along with this library; if not, write to the
016: * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
017: * Boston, MA 02111-1307, USA.
018: */
019:
020: package soot.jimple.spark;
021:
022: import soot.*;
023: import soot.jimple.spark.builder.*;
024: import soot.jimple.spark.ondemand.DemandCSPointsTo;
025: import soot.jimple.spark.pag.*;
026: import soot.jimple.spark.solver.*;
027: import soot.jimple.spark.sets.*;
028: import soot.jimple.toolkits.callgraph.*;
029: import soot.jimple.*;
030: import java.util.*;
031: import soot.options.SparkOptions;
032: import soot.tagkit.*;
033:
034: /** Main entry point for Spark.
035: * @author Ondrej Lhotak
036: */
037: public class SparkTransformer extends SceneTransformer {
038: public SparkTransformer(Singletons.Global g) {
039: }
040:
041: public static SparkTransformer v() {
042: return G.v().soot_jimple_spark_SparkTransformer();
043: }
044:
045: protected void internalTransform(String phaseName, Map options) {
046: SparkOptions opts = new SparkOptions(options);
047: final String output_dir = SourceLocator.v().getOutputDir();
048:
049: // Build pointer assignment graph
050: ContextInsensitiveBuilder b = new ContextInsensitiveBuilder();
051: if (opts.pre_jimplify())
052: b.preJimplify();
053: if (opts.force_gc())
054: doGC();
055: Date startBuild = new Date();
056: final PAG pag = b.setup(opts);
057: b.build();
058: Date endBuild = new Date();
059: reportTime("Pointer Assignment Graph", startBuild, endBuild);
060: if (opts.force_gc())
061: doGC();
062:
063: // Build type masks
064: Date startTM = new Date();
065: pag.getTypeManager().makeTypeMask();
066: Date endTM = new Date();
067: reportTime("Type masks", startTM, endTM);
068: if (opts.force_gc())
069: doGC();
070:
071: if (opts.verbose()) {
072: G.v().out.println("VarNodes: "
073: + pag.getVarNodeNumberer().size());
074: G.v().out.println("FieldRefNodes: "
075: + pag.getFieldRefNodeNumberer().size());
076: G.v().out.println("AllocNodes: "
077: + pag.getAllocNodeNumberer().size());
078: }
079:
080: // Simplify pag
081: Date startSimplify = new Date();
082:
083: // We only simplify if on_fly_cg is false. But, if vta is true, it
084: // overrides on_fly_cg, so we can still simplify. Something to handle
085: // these option interdependencies more cleanly would be nice...
086: if ((opts.simplify_sccs() && !opts.on_fly_cg()) || opts.vta()) {
087: new SCCCollapser(pag, opts.ignore_types_for_sccs())
088: .collapse();
089: }
090: if (opts.simplify_offline() && !opts.on_fly_cg()) {
091: new EBBCollapser(pag).collapse();
092: }
093: if (true || opts.simplify_sccs() || opts.vta()
094: || opts.simplify_offline()) {
095: pag.cleanUpMerges();
096: }
097: Date endSimplify = new Date();
098: reportTime("Pointer Graph simplified", startSimplify,
099: endSimplify);
100: if (opts.force_gc())
101: doGC();
102:
103: // Dump pag
104: PAGDumper dumper = null;
105: if (opts.dump_pag() || opts.dump_solution()) {
106: dumper = new PAGDumper(pag, output_dir);
107: }
108: if (opts.dump_pag())
109: dumper.dump();
110:
111: // Propagate
112: Date startProp = new Date();
113: final Propagator[] propagator = new Propagator[1];
114: switch (opts.propagator()) {
115: case SparkOptions.propagator_iter:
116: propagator[0] = new PropIter(pag);
117: break;
118: case SparkOptions.propagator_worklist:
119: propagator[0] = new PropWorklist(pag);
120: break;
121: case SparkOptions.propagator_cycle:
122: propagator[0] = new PropCycle(pag);
123: break;
124: case SparkOptions.propagator_merge:
125: propagator[0] = new PropMerge(pag);
126: break;
127: case SparkOptions.propagator_alias:
128: propagator[0] = new PropAlias(pag);
129: break;
130: case SparkOptions.propagator_none:
131: break;
132: default:
133: throw new RuntimeException();
134: }
135: if (propagator[0] != null)
136: propagator[0].propagate();
137: Date endProp = new Date();
138: reportTime("Propagation", startProp, endProp);
139: reportTime("Solution found", startSimplify, endProp);
140: /*
141: //Measurement code inserted by Adam
142: //Count how many bitvectors are shared for the Heintze set
143: long numBitVectors = 0;
144: for (int i = 0; i < AllSharedHybridNodes.v().lookupMap.map.length; ++i)
145: {
146: if (AllSharedHybridNodes.v().lookupMap.map[i] != null)
147: {
148: ListIterator li = AllSharedHybridNodes.v().lookupMap.m
149: final Propagator[] propagator = new Propagator[1];
150: switch( opts.propagator() ) {
151: case SparkOptions.propagator_iter:ap[i].listIterator();
152: while (li.hasNext())
153: {
154: li.next();
155: ++numBitVectors;
156: }
157: }
158: }
159: System.out.println("Number of shared bit vectors for Heintze: " + numBitVectors);
160: System.out.println("Number of bit vectors for Hybrid: " + HybridPointsToSet.numBitVectors);
161: */
162: if (opts.force_gc())
163: doGC();
164:
165: if (!opts.on_fly_cg() || opts.vta()) {
166: CallGraphBuilder cgb = new CallGraphBuilder(pag);
167: cgb.build();
168: }
169:
170: if (opts.verbose()) {
171: G.v().out.println("[Spark] Number of reachable methods: "
172: + Scene.v().getReachableMethods().size());
173: }
174:
175: if (opts.set_mass())
176: findSetMass(pag);
177:
178: /*
179: if( propagator[0] instanceof PropMerge ) {
180: new MergeChecker( pag ).check();
181: } else if( propagator[0] != null ) {
182: new Checker( pag ).check();
183: }
184: */
185:
186: if (opts.dump_answer())
187: new ReachingTypeDumper(pag, output_dir).dump();
188: if (opts.dump_solution())
189: dumper.dumpPointsToSets();
190: if (opts.dump_html())
191: new PAG2HTML(pag, output_dir).dump();
192: Scene.v().setPointsToAnalysis(pag);
193: if (opts.add_tags()) {
194: addTags(pag);
195: }
196:
197: if (opts.cs_demand()) {
198: //replace by demand-driven refinement-based context-sensitive analysis
199: Date startOnDemand = new Date();
200: PointsToAnalysis onDemandAnalysis = DemandCSPointsTo
201: .makeDefault();
202: Date endOndemand = new Date();
203: reportTime(
204: "Initialized on-demand refinement-based context-sensitive analysis",
205: startOnDemand, endOndemand);
206: Scene.v().setPointsToAnalysis(onDemandAnalysis);
207: }
208: }
209:
210: protected void addTags(PAG pag) {
211: final Tag unknown = new StringTag("Untagged Spark node");
212: final Map<Node, Tag> nodeToTag = pag.getNodeTags();
213: for (Iterator cIt = Scene.v().getClasses().iterator(); cIt
214: .hasNext();) {
215: final SootClass c = (SootClass) cIt.next();
216: for (Iterator mIt = c.methodIterator(); mIt.hasNext();) {
217: SootMethod m = (SootMethod) mIt.next();
218: if (!m.isConcrete())
219: continue;
220: if (!m.hasActiveBody())
221: continue;
222: for (Iterator sIt = m.getActiveBody().getUnits()
223: .iterator(); sIt.hasNext();) {
224: final Stmt s = (Stmt) sIt.next();
225: if (s instanceof DefinitionStmt) {
226: Value lhs = ((DefinitionStmt) s).getLeftOp();
227: VarNode v = null;
228: if (lhs instanceof Local) {
229: v = pag.findLocalVarNode(lhs);
230: } else if (lhs instanceof FieldRef) {
231: v = pag.findGlobalVarNode(((FieldRef) lhs)
232: .getField());
233: }
234: if (v != null) {
235: PointsToSetInternal p2set = v.getP2Set();
236: p2set.forall(new P2SetVisitor() {
237: public final void visit(Node n) {
238: addTag(s, n, nodeToTag, unknown);
239: }
240: });
241: Node[] simpleSources = pag
242: .simpleInvLookup(v);
243: for (Node element : simpleSources) {
244: addTag(s, element, nodeToTag, unknown);
245: }
246: simpleSources = pag.allocInvLookup(v);
247: for (Node element : simpleSources) {
248: addTag(s, element, nodeToTag, unknown);
249: }
250: simpleSources = pag.loadInvLookup(v);
251: for (Node element : simpleSources) {
252: addTag(s, element, nodeToTag, unknown);
253: }
254: }
255: }
256: }
257: }
258: }
259: }
260:
261: protected static void reportTime(String desc, Date start, Date end) {
262: long time = end.getTime() - start.getTime();
263: G.v().out.println("[Spark] " + desc + " in " + time / 1000
264: + "." + (time / 100) % 10 + " seconds.");
265: }
266:
267: protected static void doGC() {
268: // Do 5 times because the garbage collector doesn't seem to always collect
269: // everything on the first try.
270: System.gc();
271: System.gc();
272: System.gc();
273: System.gc();
274: System.gc();
275: }
276:
277: protected void addTag(Host h, Node n, Map<Node, Tag> nodeToTag,
278: Tag unknown) {
279: if (nodeToTag.containsKey(n))
280: h.addTag(nodeToTag.get(n));
281: else
282: h.addTag(unknown);
283: }
284:
285: protected void findSetMass(PAG pag) {
286: int mass = 0;
287: int varMass = 0;
288: int adfs = 0;
289: int scalars = 0;
290: if (false) {
291: for (Iterator it = Scene.v().getReachableMethods()
292: .listener(); it.hasNext();) {
293: SootMethod m = (SootMethod) it.next();
294: G.v().out.println(m.getBytecodeSignature());
295: }
296: }
297:
298: for (Iterator vIt = pag.getVarNodeNumberer().iterator(); vIt
299: .hasNext();) {
300:
301: final VarNode v = (VarNode) vIt.next();
302: scalars++;
303: PointsToSetInternal set = v.getP2Set();
304: if (set != null)
305: mass += set.size();
306: if (set != null)
307: varMass += set.size();
308: if (set != null && set.size() > 0) {
309: //G.v().out.println( "V "+v.getVariable()+" "+set.size() );
310: // G.v().out.println( ""+v.getVariable()+" "+v.getMethod()+" "+set.size() );
311: }
312: }
313: for (Iterator<Object> anIt = pag.allocSourcesIterator(); anIt
314: .hasNext();) {
315: final AllocNode an = (AllocNode) anIt.next();
316: for (Iterator adfIt = an.getFields().iterator(); adfIt
317: .hasNext();) {
318: final AllocDotField adf = (AllocDotField) adfIt.next();
319: PointsToSetInternal set = adf.getP2Set();
320: if (set != null)
321: mass += set.size();
322: if (set != null && set.size() > 0) {
323: adfs++;
324: // G.v().out.println( ""+adf.getBase().getNewExpr()+"."+adf.getField()+" "+set.size() );
325: }
326: }
327: }
328: G.v().out.println("Set mass: " + mass);
329: G.v().out.println("Variable mass: " + varMass);
330: G.v().out.println("Scalars: " + scalars);
331: G.v().out.println("adfs: " + adfs);
332: // Compute points-to set sizes of dereference sites BEFORE
333: // trimming sets by declared type
334: int[] deRefCounts = new int[30001];
335: for (VarNode v : pag.getDereferences()) {
336: PointsToSetInternal set = v.getP2Set();
337: int size = 0;
338: if (set != null)
339: size = set.size();
340: deRefCounts[size]++;
341: }
342: int total = 0;
343: for (int element : deRefCounts)
344: total += element;
345: G.v().out
346: .println("Dereference counts BEFORE trimming (total = "
347: + total + "):");
348: for (int i = 0; i < deRefCounts.length; i++) {
349: if (deRefCounts[i] > 0) {
350: G.v().out.println("" + i + " " + deRefCounts[i] + " "
351: + (deRefCounts[i] * 100.0 / total) + "%");
352: }
353: }
354: // Compute points-to set sizes of dereference sites AFTER
355: // trimming sets by declared type
356: /*
357: if( pag.getTypeManager().getFastHierarchy() == null ) {
358: pag.getTypeManager().clearTypeMask();
359: pag.getTypeManager().setFastHierarchy( Scene.v().getOrMakeFastHierarchy() );
360: pag.getTypeManager().makeTypeMask( pag );
361: deRefCounts = new int[30001];
362: for( Iterator vIt = pag.getDereferences().iterator(); vIt.hasNext(); ) {
363: final VarNode v = (VarNode) vIt.next();
364: PointsToSetInternal set =
365: pag.getSetFactory().newSet( v.getType(), pag );
366: int size = 0;
367: if( set != null ) {
368: v.getP2Set().setType( null );
369: v.getP2Set().getNewSet().setType( null );
370: v.getP2Set().getOldSet().setType( null );
371: set.addAll( v.getP2Set(), null );
372: size = set.size();
373: }
374: deRefCounts[size]++;
375: }
376: total = 0;
377: for( int i=0; i < deRefCounts.length; i++ ) total+= deRefCounts[i];
378: G.v().out.println( "Dereference counts AFTER trimming (total = "+total+"):" );
379: for( int i=0; i < deRefCounts.length; i++ ) {
380: if( deRefCounts[i] > 0 ) {
381: G.v().out.println( ""+i+" "+deRefCounts[i]+" "+(deRefCounts[i]*100.0/total)+"%" );
382: }
383: }
384: }
385: */
386: /*
387: deRefCounts = new int[30001];
388: for( Iterator siteIt = ig.getAllSites().iterator(); siteIt.hasNext(); ) {
389: final Stmt site = (Stmt) siteIt.next();
390: SootMethod method = ig.getDeclaringMethod( site );
391: Value ie = site.getInvokeExpr();
392: if( !(ie instanceof VirtualInvokeExpr)
393: && !(ie instanceof InterfaceInvokeExpr) ) continue;
394: InstanceInvokeExpr expr = (InstanceInvokeExpr) site.getInvokeExpr();
395: Local receiver = (Local) expr.getBase();
396: Collection types = pag.reachingObjects( method, site, receiver )
397: .possibleTypes();
398: Type receiverType = receiver.getType();
399: if( receiverType instanceof ArrayType ) {
400: receiverType = RefType.v( "java.lang.Object" );
401: }
402: Collection targets =
403: Scene.v().getOrMakeFastHierarchy().resolveConcreteDispatchWithoutFailing(
404: types, expr.getMethod(), (RefType) receiverType );
405: deRefCounts[targets.size()]++;
406: }
407: total = 0;
408: for( int i=0; i < deRefCounts.length; i++ ) total+= deRefCounts[i];
409: G.v().out.println( "Virtual invoke target counts (total = "+total+"):" );
410: for( int i=0; i < deRefCounts.length; i++ ) {
411: if( deRefCounts[i] > 0 ) {
412: G.v().out.println( ""+i+" "+deRefCounts[i]+" "+(deRefCounts[i]*100.0/total)+"%" );
413: }
414: }
415: */
416: }
417: }
|