Source Code Cross Referenced for ADag.java in  » Workflow-Engines » pegasus-2.1.0 » org » griphyn » cPlanner » classes » Java Source Code / Java DocumentationJava Source Code and Java Documentation

Java Source Code / Java Documentation
1. 6.0 JDK Core
2. 6.0 JDK Modules
3. 6.0 JDK Modules com.sun
4. 6.0 JDK Modules com.sun.java
5. 6.0 JDK Modules sun
6. 6.0 JDK Platform
7. Ajax
8. Apache Harmony Java SE
9. Aspect oriented
10. Authentication Authorization
11. Blogger System
12. Build
13. Byte Code
14. Cache
15. Chart
16. Chat
17. Code Analyzer
18. Collaboration
19. Content Management System
20. Database Client
21. Database DBMS
22. Database JDBC Connection Pool
23. Database ORM
24. Development
25. EJB Server geronimo
26. EJB Server GlassFish
27. EJB Server JBoss 4.2.1
28. EJB Server resin 3.1.5
29. ERP CRM Financial
30. ESB
31. Forum
32. GIS
33. Graphic Library
34. Groupware
35. HTML Parser
36. IDE
37. IDE Eclipse
38. IDE Netbeans
39. Installer
40. Internationalization Localization
41. Inversion of Control
42. Issue Tracking
43. J2EE
44. JBoss
45. JMS
46. JMX
47. Library
48. Mail Clients
49. Net
50. Parser
51. PDF
52. Portal
53. Profiler
54. Project Management
55. Report
56. RSS RDF
57. Rule Engine
58. Science
59. Scripting
60. Search Engine
61. Security
62. Sevlet Container
63. Source Control
64. Swing Library
65. Template Engine
66. Test Coverage
67. Testing
68. UML
69. Web Crawler
70. Web Framework
71. Web Mail
72. Web Server
73. Web Services
74. Web Services apache cxf 2.0.1
75. Web Services AXIS2
76. Wiki Engine
77. Workflow Engines
78. XML
79. XML UI
Java
Java Tutorial
Java Open Source
Jar File Download
Java Articles
Java Products
Java by API
Photoshop Tutorials
Maya Tutorials
Flash Tutorials
3ds-Max Tutorials
Illustrator Tutorials
GIMP Tutorials
C# / C Sharp
C# / CSharp Tutorial
C# / CSharp Open Source
ASP.Net
ASP.NET Tutorial
JavaScript DHTML
JavaScript Tutorial
JavaScript Reference
HTML / CSS
HTML CSS Reference
C / ANSI-C
C Tutorial
C++
C++ Tutorial
Ruby
PHP
Python
Python Tutorial
Python Open Source
SQL Server / T-SQL
SQL Server / T-SQL Tutorial
Oracle PL / SQL
Oracle PL/SQL Tutorial
PostgreSQL
SQL / MySQL
MySQL Tutorial
VB.Net
VB.Net Tutorial
Flash / Flex / ActionScript
VBA / Excel / Access / Word
XML
XML Tutorial
Microsoft Office PowerPoint 2007 Tutorial
Microsoft Office Excel 2007 Tutorial
Microsoft Office Word 2007 Tutorial
Java Source Code / Java Documentation » Workflow Engines » pegasus 2.1.0 » org.griphyn.cPlanner.classes 
Source Cross Referenced  Class Diagram Java Document (Java Doc) 


001:        /*
002:         * This file or a portion of this file is licensed under the terms of
003:         * the Globus Toolkit Public License, found in file GTPL, or at
004:         * http://www.globus.org/toolkit/download/license.html. This notice must
005:         * appear in redistributions of this file, with or without modification.
006:         *
007:         * Redistributions of this Software, with or without modification, must
008:         * reproduce the GTPL in: (1) the Software, or (2) the Documentation or
009:         * some other similar material which is provided with the Software (if
010:         * any).
011:         *
012:         * Copyright 1999-2004 University of Chicago and The University of
013:         * Southern California. All rights reserved.
014:         */
015:
016:        package org.griphyn.cPlanner.classes;
017:
018:        import java.util.Enumeration;
019:        import java.util.Iterator;
020:        import java.util.Set;
021:        import java.util.TreeSet;
022:        import java.util.Vector;
023:
024:        import java.io.Writer;
025:        import java.io.StringWriter;
026:        import java.io.IOException;
027:
028:        /**
029:         *  This class object contains the info about a Dag.
030:         *  DagInfo object contains the information to create the .dax file.
031:         *  vJobSubInfos is a Vector containing SubInfo objects of jobs making
032:         *  the Dag.
033:         *  Each subinfo object contains information needed to generate a submit
034:         *  file for that job.
035:         *
036:         * @author Karan Vahi
037:         * @author Gaurang Mehta
038:         * @version $Revision: 351 $
039:         *
040:         * @see DagInfo
041:         * @see SubInfo
042:         */
043:
044:        public class ADag extends Data {
045:
046:            /**
047:             * The DagInfo object which contains the information got from parsing the
048:             * dax file.
049:             */
050:            public DagInfo dagInfo;
051:
052:            /**
053:             * Vector of <code>SubInfo</code> objects. Each SubInfo object contains
054:             * information corresponding to the submit file for one job.
055:             */
056:            public Vector vJobSubInfos;
057:
058:            /**
059:             * The root of the submit directory hierarchy for the DAG. This is the
060:             * directory where generally the DAG related files like the log files,
061:             * .dag and dagman output files reside.
062:             */
063:            private String mSubmitDirectory;
064:
065:            /**
066:             * The optional request ID associated with the DAX.
067:             */
068:            private String mRequestID;
069:
070:            /**
071:             * Initialises the class member variables.
072:             */
073:            public ADag() {
074:                dagInfo = new DagInfo();
075:                vJobSubInfos = new Vector();
076:                mSubmitDirectory = ".";
077:            }
078:
079:            /**
080:             * Overloaded constructor.
081:             *
082:             * @param dg     the <code>DagInfo</code>
083:             * @param vSubs  the jobs in the workflow.
084:             */
085:            public ADag(DagInfo dg, Vector vSubs) {
086:                this .dagInfo = (DagInfo) dg.clone();
087:                this .vJobSubInfos = (Vector) vSubs.clone();
088:                mSubmitDirectory = ".";
089:            }
090:
091:            /**
092:             * Returns a new copy of the Object.
093:             *
094:             * @return the clone of the object.
095:             */
096:            public Object clone() {
097:                ADag newAdag = new ADag();
098:                newAdag.dagInfo = (DagInfo) this .dagInfo.clone();
099:                newAdag.vJobSubInfos = (Vector) this .vJobSubInfos.clone();
100:                newAdag.setBaseSubmitDirectory(this .mSubmitDirectory);
101:                newAdag.setRequestID(this .mRequestID);
102:                return newAdag;
103:            }
104:
105:            /**
106:             * Returns the String description of the dag associated with this object.
107:             *
108:             * @return textual description.
109:             */
110:            public String toString() {
111:                String st = "\n Submit Directory "
112:                        + this .mSubmitDirectory
113:                        + "\n"
114:                        + this .dagInfo.toString()
115:                        + vectorToString("\n Jobs making the DAG ",
116:                                this .vJobSubInfos);
117:                return st;
118:            }
119:
120:            /**
121:             * This adds a new job to the ADAG object. It ends up adding both the job name
122:             * and the job description to the internal structure.
123:             *
124:             * @param job  the new job that is to be added to the ADag.
125:             */
126:            public void add(SubInfo job) {
127:                //add to the dagInfo
128:                dagInfo.addNewJob(job);
129:                vJobSubInfos.addElement(job);
130:            }
131:
132:            /**
133:             * Removes all the jobs from the workflow, and all the edges between
134:             * the workflows. The only thing that remains is the meta data about the
135:             * workflow.
136:             *
137:             *
138:             */
139:            public void clearJobs() {
140:                vJobSubInfos.clear();
141:                dagInfo.dagJobs.clear();
142:                dagInfo.relations.clear();
143:                dagInfo.lfnMap.clear();
144:                //reset the workflow metrics also
145:                this .getWorkflowMetrics().reset();
146:            }
147:
148:            /**
149:             * Returns whether the workflow is empty or not.
150:             * @return boolean
151:             */
152:            public boolean isEmpty() {
153:                return vJobSubInfos.isEmpty();
154:            }
155:
156:            /**
157:             * Removes a particular job from the workflow. It however does not
158:             * delete the relations the edges that refer to the job.
159:             *
160:             * @param job  the <code>SubInfo</code> object containing the job description.
161:             *
162:             * @return boolean indicating whether the removal was successful or not.
163:             */
164:            public boolean remove(SubInfo job) {
165:                boolean a = dagInfo.remove(job);
166:                boolean b = vJobSubInfos.remove(job);
167:                return a && b;
168:            }
169:
170:            /**
171:             * Returns the number of jobs in the dag on the basis of number of elements
172:             * in the <code>dagJobs</code> Vector.
173:             *
174:             * @return the number of jobs.
175:             */
176:            public int getNoOfJobs() {
177:                return this .dagInfo.getNoOfJobs();
178:            }
179:
180:            /**
181:             * Sets the request id.
182:             *
183:             * @param id    the request id.
184:             */
185:            public void setRequestID(String id) {
186:                mRequestID = id;
187:            }
188:
189:            /**
190:             * Returns the request id.
191:             *
192:             * @return    the request id.
193:             */
194:            public String getRequestID() {
195:                return mRequestID;
196:            }
197:
198:            /**
199:             * Adds a new PCRelation pair to the Vector of <code>PCRelation</code>
200:             * pairs. For the new relation the isDeleted parameter is set to false.
201:             *
202:             * @param parent    The parent in the relation pair
203:             * @param child     The child in the relation pair
204:             *
205:             * @see org.griphyn.cPlanner.classes.PCRelation
206:             */
207:            public void addNewRelation(String parent, String child) {
208:                PCRelation newRelation = new PCRelation(parent, child);
209:                this .dagInfo.relations.addElement(newRelation);
210:            }
211:
212:            /**
213:             * Adds a new PCRelation pair to the Vector of <code>PCRelation</code>
214:             * pairs.
215:             *
216:             * @param parent    The parent in the relation pair
217:             * @param child     The child in the relation pair
218:             * @param isDeleted Whether the relation has been deleted due to the reduction
219:             *                  algorithm or not.
220:             *
221:             * @see org.griphyn.cPlanner.classes.PCRelation
222:             */
223:            public void addNewRelation(String parent, String child,
224:                    boolean isDeleted) {
225:                PCRelation newRelation = new PCRelation(parent, child,
226:                        isDeleted);
227:                this .dagInfo.relations.addElement(newRelation);
228:            }
229:
230:            /**
231:             * Sets the submit directory for the workflow.
232:             *
233:             * @param dir   the submit directory.
234:             */
235:            public void setBaseSubmitDirectory(String dir) {
236:                this .mSubmitDirectory = dir;
237:            }
238:
239:            /**
240:             * Returns the label of the workflow, that was specified in the DAX.
241:             *
242:             * @return the label of the workflow.
243:             */
244:            public String getLabel() {
245:                return this .dagInfo.getLabel();
246:            }
247:
248:            /**
249:             * Returns the last modified time for the file containing the workflow
250:             * description.
251:             *
252:             * @return the MTime
253:             */
254:            public String getMTime() {
255:                return this .dagInfo.getMTime();
256:            }
257:
258:            /**
259:             * Returns the root of submit directory hierarchy for the workflow.
260:             *
261:             * @return the directory.
262:             */
263:            public String getBaseSubmitDirectory() {
264:                return this .mSubmitDirectory;
265:            }
266:
267:            /**
268:             * Gets all the parents of a particular node
269:             *
270:             * @param node the name of the job whose parents are to be found.
271:             *
272:             * @return    Vector corresponding to the parents of the node
273:             */
274:            public Vector getParents(String node) {
275:                return this .dagInfo.getParents(node);
276:            }
277:
278:            /**
279:             * Get all the children of a particular node.
280:             *
281:             * @param node  the name of the node whose children we want to find.
282:             *
283:             * @return  Vector containing the
284:             *          children of the node
285:             *
286:             */
287:            public Vector getChildren(String node) {
288:                return this .dagInfo.getChildren(node);
289:            }
290:
291:            /**
292:             * Returns all the leaf nodes of the dag. The way the structure of Dag is
293:             * specified, in terms of the parent child relationship pairs, the
294:             * determination of the leaf nodes can be computationally intensive. The
295:             * complexity is of order n^2
296:             *
297:             * @return Vector of <code>String</code> corresponding to the job names of
298:             *         the leaf nodes.
299:             *
300:             * @see org.griphyn.cPlanner.classes.PCRelation
301:             * @see org.griphyn.cPlanner.classes.DagInfo#relations
302:             */
303:            public Vector getLeafNodes() {
304:                return this .dagInfo.getLeafNodes();
305:            }
306:
307:            /**
308:             * It returns the a unique list of the execution sites that the Planner
309:             * has mapped the dax to so far in it's stage of planning . This is a
310:             * subset of the pools specified by the user at runtime.
311:             *
312:             * @return  a TreeSet containing a list of siteID's of the sites where the
313:             *          dag has to be run.
314:             */
315:            public Set getExecutionSites() {
316:                Set set = new TreeSet();
317:                SubInfo sub = null;
318:
319:                for (Iterator it = this .vJobSubInfos.iterator(); it.hasNext();) {
320:                    sub = (SubInfo) it.next();
321:                    set.add(sub.executionPool);
322:                }
323:
324:                //remove the stork pool
325:                set.remove("stork");
326:
327:                return set;
328:            }
329:
330:            /**
331:             * It determines the root Nodes for the ADag looking at the relation pairs
332:             * of the adag. The way the structure of Dag is specified in terms
333:             * of the parent child relationship pairs, the determination of the leaf
334:             * nodes can be computationally intensive. The complexity if of order n^2.
335:             *
336:             *
337:             * @return the root jobs of the Adag
338:             *
339:             * @see org.griphyn.cPlanner.classes.PCRelation
340:             * @see org.griphyn.cPlanner.classes.DagInfo#relations
341:             */
342:            public Vector getRootNodes() {
343:                return this .dagInfo.getRootNodes();
344:            }
345:
346:            /**
347:             * Returns an iterator for traversing through the jobs in the workflow.
348:             *
349:             * @return Iterator
350:             */
351:            public Iterator jobIterator() {
352:                return this .vJobSubInfos.iterator();
353:            }
354:
355:            /**
356:             * This returns a SubInfo object corresponding to the job by looking through
357:             * all the subInfos.
358:             *
359:             *
360:             *@param job   jobName of the job for which we need the subInfo object.
361:             *
362:             *@return      the <code>SubInfo</code> objects corresponding to the job
363:             */
364:            public SubInfo getSubInfo(String job) {
365:
366:                SubInfo sub = null;
367:
368:                //System.out.println("Job being considered is " + job);
369:                for (Enumeration e = this .vJobSubInfos.elements(); e
370:                        .hasMoreElements();) {
371:                    sub = (SubInfo) e.nextElement();
372:                    if (job.equalsIgnoreCase(sub.jobName)) {
373:                        return sub;
374:                    }
375:
376:                }
377:
378:                throw new RuntimeException(
379:                        "Can't find the sub info object for job " + job);
380:
381:            }
382:
383:            /**
384:             * Returns the metrics about the workflow.
385:             *
386:             * @return the WorkflowMetrics
387:             */
388:            public WorkflowMetrics getWorkflowMetrics() {
389:                return this .dagInfo.getWorkflowMetrics();
390:            }
391:
392:            /**
393:             * Returns the DOT description of the object. This is used for visualizing
394:             * the workflow.
395:             *
396:             * @return String containing the Partition object in XML.
397:             *
398:             * @exception IOException if something fishy happens to the stream.
399:             */
400:            public String toDOT() throws IOException {
401:                Writer writer = new StringWriter(32);
402:                toDOT(writer, "");
403:                return writer.toString();
404:            }
405:
406:            /**
407:             * Returns the DOT description of the object. This is used for visualizing
408:             * the workflow.
409:             *
410:             * @param stream is a stream opened and ready for writing. This can also
411:             *               be a StringWriter for efficient output.
412:             * @param indent  is a <code>String</code> of spaces used for pretty
413:             *                printing. The initial amount of spaces should be an empty
414:             *                string. The parameter is used internally for the recursive
415:             *                traversal.
416:             *
417:             *
418:             * @exception IOException if something fishy happens to the stream.
419:             */
420:            public void toDOT(Writer stream, String indent) throws IOException {
421:                String newLine = System.getProperty("line.separator", "\r\n");
422:
423:                String newIndent = (indent == null) ? "\t" : indent + "\t";
424:
425:                //write out the dot header
426:                writeDOTHeader(stream, null);
427:
428:                //traverse through the jobs
429:                for (Iterator it = jobIterator(); it.hasNext();) {
430:                    ((SubInfo) it.next()).toDOT(stream, newIndent);
431:                }
432:
433:                stream.write(newLine);
434:
435:                //traverse through the edges
436:                for (Iterator it = dagInfo.relations.iterator(); it.hasNext();) {
437:                    ((PCRelation) it.next()).toDOT(stream, newIndent);
438:                }
439:
440:                //write out the tail
441:                stream.write("}");
442:                stream.write(newLine);
443:            }
444:
445:            /**
446:             * Writes out the static DOT Header.
447:             *
448:             * @param stream is a stream opened and ready for writing. This can also
449:             *               be a StringWriter for efficient output.
450:             * @param indent  is a <code>String</code> of spaces used for pretty
451:             *                printing. The initial amount of spaces should be an empty
452:             *                string. The parameter is used internally for the recursive
453:             *                traversal.
454:             *
455:             *
456:             * @exception IOException if something fishy happens to the stream.
457:             */
458:            public void writeDOTHeader(Writer stream, String indent)
459:                    throws IOException {
460:                String newLine = System.getProperty("line.separator", "\r\n");
461:
462:                String newIndent = (indent == null) ? null : indent + "\t";
463:
464:                //write out the header and static stuff for now
465:                if (indent != null && indent.length() > 0) {
466:                    stream.write(indent);
467:                }
468:                ;
469:                stream.write("digraph E {");
470:                stream.write(newLine);
471:
472:                //write out the size of the image
473:                if (newIndent != null && newIndent.length() > 0) {
474:                    stream.write(newIndent);
475:                }
476:                stream.write("size=\"8.0,10.0\"");
477:                stream.write(newLine);
478:
479:                //write out the ratio
480:                if (newIndent != null && newIndent.length() > 0) {
481:                    stream.write(newIndent);
482:                }
483:                ;
484:                stream.write("ratio=fill");
485:                stream.write(newLine);
486:
487:                //write out what the shape of the nodes need to be like
488:                if (newIndent != null && newIndent.length() > 0) {
489:                    stream.write(newIndent);
490:                }
491:                ;
492:                stream.write("node [shape=ellipse]");
493:                stream.write(newLine);
494:
495:                //write out how edges are to be rendered.
496:                if (newIndent != null && newIndent.length() > 0) {
497:                    stream.write(newIndent);
498:                }
499:                ;
500:                stream.write("edge [arrowhead=normal, arrowsize=1.0]");
501:                stream.write(newLine);
502:
503:            }
504:
505:        }
www.java2java.com | Contact Us
Copyright 2009 - 12 Demo Source and Support. All rights reserved.
All other trademarks are property of their respective owners.