001: /*
002: * This file or a portion of this file is licensed under the terms of
003: * the Globus Toolkit Public License, found in file GTPL, or at
004: * http://www.globus.org/toolkit/download/license.html. This notice must
005: * appear in redistributions of this file, with or without modification.
006: *
007: * Redistributions of this Software, with or without modification, must
008: * reproduce the GTPL in: (1) the Software, or (2) the Documentation or
009: * some other similar material which is provided with the Software (if
010: * any).
011: *
012: * Copyright 1999-2004 University of Chicago and The University of
013: * Southern California. All rights reserved.
014: */
015: package org.griphyn.cPlanner.partitioner;
016:
017: import org.griphyn.vdl.classes.LFN;
018:
019: import org.griphyn.cPlanner.common.LogManager;
020:
021: import org.griphyn.vdl.dax.ADAG;
022: import org.griphyn.vdl.dax.Filename;
023: import org.griphyn.vdl.dax.Job;
024:
025: import org.griphyn.vdl.euryale.Callback;
026:
027: import java.io.IOException;
028:
029: import java.util.Iterator;
030: import java.util.List;
031: import java.util.Map;
032: import java.util.Set;
033:
034: /**
035: * This class ends up writing a partitioned dax, that corresponds to one
036: * partition as defined by the Partitioner. Whenever it is called to write
037: * out a dax corresponding to a partition it looks up the dax i.e parses the
038: * dax and gets the information about the jobs making up the partition.
039: *
040: * @author Karan Vahi
041: * @version $Revision: 50 $
042: */
043: public class MultipleLook extends DAXWriter {
044:
045: /**
046: * The set of job id's in the partition.
047: */
048: private Set mNodeSet;
049:
050: /**
051: * A map containing the relations between the jobs making up the partition.
052: */
053: private Map mRelationsMap;
054:
055: /**
056: * The ADAG object containing the partitioned dax.
057: */
058: private ADAG mPartADAG;
059:
060: /**
061: * The number of jobs that are in the partition.
062: */
063: private int mNumOfJobs;
064:
065: /**
066: * The number of jobs about which the callback interface has knowledge.
067: */
068: private int mCurrentNum;
069:
070: /**
071: * The index of the partition that is being written out.
072: */
073: private int mIndex;
074:
075: /**
076: * The overloaded constructor.
077: *
078: * @param daxFile the path to the dax file that is being partitioned.
079: * @param directory the directory in which the partitioned daxes are to be
080: * generated.
081: */
082: public MultipleLook(String daxFile, String directory) {
083: super (daxFile, directory);
084: mIndex = -1;
085: }
086:
087: /**
088: * It writes out a dax consisting of the jobs as specified in the partition.
089: *
090: * @param partition the partition object containing the relations and id's
091: * of the jobs making up the partition.
092: *
093: * @return boolean true if dax successfully generated and written.
094: * false in case of error.
095: */
096: public boolean writePartitionDax(Partition partition, int index) {
097:
098: //do the cleanup
099: mPartADAG = null;
100: mNodeSet = null;
101: mRelationsMap = null;
102: mIndex = index;
103:
104: //get from the partition object the set of jobs
105: //and relations between them
106: mNodeSet = partition.getNodeIDs();
107: mRelationsMap = partition.getRelations();
108: mNumOfJobs = mNodeSet.size();
109:
110: //set the current number of jobs whose information we have
111: mCurrentNum = 0;
112:
113: mPartADAG = new ADAG(0, index, mPartitionName);
114:
115: Callback callback = new MyCallBackHandler();
116: org.griphyn.vdl.euryale.DAXParser d = new org.griphyn.vdl.euryale.DAXParser(
117: null);
118: d.setCallback(callback);
119: d.parse(mDaxFile);
120:
121: //do the actual writing to the file
122: this .initializeWriteHandle(mIndex);
123: try {
124: mPartADAG.toXML(mWriteHandle, new String());
125: } catch (IOException e) {
126: mLogger.log("Error while writing out a partition dax :"
127: + e.getMessage(), LogManager.ERROR_MESSAGE_LEVEL);
128: return false;
129: }
130: this .close();
131:
132: return true;
133: }
134:
135: /**
136: * The internal callback handler for the DAXParser in Euryale. It only
137: * stores the jobs that are part of the dax, that are then populated into
138: * the internal ADAG object that is used to write out the dax file
139: * corresponding to the partition.
140: */
141: private class MyCallBackHandler implements Callback {
142:
143: /**
144: * The empty constructor.
145: */
146: public MyCallBackHandler() {
147:
148: }
149:
150: /**
151: * Callback when the opening tag was parsed. The attribute maps each
152: * attribute to its raw value. The callback initializes the DAG
153: * writer.
154: *
155: * @param attributes is a map of attribute key to attribute value
156: */
157: public void cb_document(Map attributes) {
158: //do nothing at the moment
159: }
160:
161: public void cb_filename(Filename filename) {
162:
163: }
164:
165: /**
166: *
167: */
168: public void cb_job(Job job) {
169: List fileList = null;
170: Iterator it;
171:
172: if (mNodeSet.contains(job.getID())) {
173: mCurrentNum++;
174: mPartADAG.addJob(job);
175: fileList = job.getUsesList();
176:
177: //iterate through the file list
178: //populate it in the ADAG object
179: it = fileList.iterator();
180: while (it.hasNext()) {
181: Filename file = (Filename) it.next();
182: mPartADAG.addFilename(file.getFilename(), (file
183: .getLink() == LFN.INPUT) ? true : false,
184: file.getTemporary(),
185: file.getDontRegister(), file
186: .getDontTransfer());
187: }
188: }
189:
190: }
191:
192: public void cb_parents(String child, List parents) {
193:
194: }
195:
196: public void cb_done() {
197: List parentIDs;
198: //print the xml generated so far
199:
200: if (mCurrentNum != mNumOfJobs) {
201: //throw an error and exit.
202: throw new RuntimeException(
203: "Could not find information about all the jobs"
204: + " in the dax for partition "
205: + mNodeSet);
206: }
207:
208: //add the relations between the jobs in the partition to the ADAG
209: Iterator it = mRelationsMap.keySet().iterator();
210: while (it.hasNext()) {
211: String childID = (String) it.next();
212: parentIDs = (List) mRelationsMap.get(childID);
213:
214: //get all the parents of the children and populate them in the
215: //ADAG object
216: Iterator it1 = parentIDs.iterator();
217: while (it1.hasNext()) {
218: mPartADAG.addChild(childID, (String) it1.next());
219: }
220:
221: }
222:
223: }
224:
225: }
226: }
|