0001: /*
0002: * This file or a portion of this file is licensed under the terms of
0003: * the Globus Toolkit Public License, found in file GTPL, or at
0004: * http://www.globus.org/toolkit/download/license.html. This notice must
0005: * appear in redistributions of this file, with or without modification.
0006: *
0007: * Redistributions of this Software, with or without modification, must
0008: * reproduce the GTPL in: (1) the Software, or (2) the Documentation or
0009: * some other similar material which is provided with the Software (if
0010: * any).
0011: *
0012: * Copyright 1999-2004 University of Chicago and The University of
0013: * Southern California. All rights reserved.
0014: */
0015: package org.griphyn.vdl.euryale;
0016:
0017: import org.griphyn.common.util.Separator;
0018: import org.griphyn.common.util.Currently;
0019: import org.griphyn.vdl.dax.*;
0020: import org.griphyn.vdl.util.*;
0021: import gnu.getopt.*;
0022: import java.io.*;
0023: import java.util.*;
0024: import java.util.regex.*;
0025:
0026: /**
0027: * This class is used to convert in streaming mode information from an
0028: * abstract DAG in XML (DAX) into a DAGMan .dag file and a couple of
0029: * related files, i.e. Condor submit files and planner control files.
0030: * The parser converts the DAX document specified in the commandline.
0031: *
0032: * @author Kavitha Ranganathan
0033: * @author Jens-S. Vöckler
0034: * @author Yong Zhao
0035: * @version $Revision: 289 $
0036: *
0037: * @see DAXParser
0038: * @see org.griphyn.vdl.dax.ADAG
0039: */
0040: public class DAX2DAG implements Callback {
0041: /**
0042: * Stores the current version number for whatever purposes.
0043: */
0044: public static final String c_version = "$Revision: 289 $";
0045:
0046: /**
0047: * Stores the digested version number from the class constant.
0048: */
0049: private String m_version;
0050:
0051: /**
0052: * Stores the completed DAX label hyphen index as basename.
0053: */
0054: private String m_label;
0055:
0056: /**
0057: * Remembers the filename of the .dag file.
0058: */
0059: private File m_dagname;
0060:
0061: /**
0062: * Stores an instance to the .dag file to write in steps.
0063: */
0064: private PrintWriter m_dagfile;
0065:
0066: /**
0067: * The location of the program to run as the DAGMan prescript.
0068: */
0069: private File m_prescript;
0070:
0071: /**
0072: * The location of the program to run as the DAGMan postscript.
0073: */
0074: private File m_postscript;
0075:
0076: /**
0077: * The number of retries per job node.
0078: */
0079: private int m_retries = 5;
0080:
0081: /**
0082: * Stores an instance to a logger.
0083: */
0084: private Logging m_log;
0085:
0086: /**
0087: * Start time to use in time stamping.
0088: */
0089: private Date m_timestamp;
0090:
0091: /**
0092: * Printable version of the {@link #m_timestamp} above.
0093: */
0094: private String m_cooked_stamp;
0095:
0096: /**
0097: * Maintains the directory where to put the output files.
0098: * Will be dynamically created, if it does not exist.
0099: */
0100: private FlatFileFactory m_factory;
0101:
0102: /**
0103: * Maintains the base directory until the file factory
0104: * can be instantiated.
0105: */
0106: private File m_basedir;
0107:
0108: /**
0109: * Maintains the dynamically generated name of the common
0110: * Condor logfile in a temporary directory. Singleton pattern.
0111: */
0112: private String m_logfile;
0113:
0114: /**
0115: * Maintains a minimum level for the hashed file factory
0116: * to be used during instantiation.
0117: */
0118: private int m_minlevel;
0119:
0120: /**
0121: * Records the location of the workflow configuration file
0122: */
0123: private File m_wfrc;
0124:
0125: /**
0126: * Maintains the submit file template's filename.
0127: */
0128: private String m_sftFilename;
0129:
0130: /**
0131: * Maintains the in-memory copy of the submit file template. This
0132: * is expected to be no larger than 2kB, thus an in-memory copy
0133: * should work a lot faster than continually re-reading the file.
0134: */
0135: private ArrayList m_sft;
0136:
0137: /**
0138: * Maintains the kickstart V2 config file template's filename.
0139: */
0140: private String m_cftFilename;
0141:
0142: /**
0143: * Maintains the in-memory copy of an optional config file template.
0144: * This is expected to be no larger than 2kB, thus an in-memory copy
0145: * should work a lot faster than continually re-reading the file.
0146: */
0147: private ArrayList m_cft;
0148:
0149: /**
0150: * Verbosity level of messages that go onto the "app" logging queue.
0151: */
0152: private int m_verbosity;
0153:
0154: /**
0155: * Maintains the properties to properly address workflow concerns.
0156: */
0157: private Properties m_props;
0158:
0159: /**
0160: * Maintains a set of all jobs seen here.
0161: */
0162: private Set m_job;
0163:
0164: /**
0165: * Maintains the relation of jobs to one another.
0166: */
0167: private Map m_parent;
0168: private Map m_child;
0169:
0170: /**
0171: * Set some defaults, should values be missing in the dataset.
0172: * This method will only copy the properties starting with the
0173: * "wf." prefix, and look for VDS logging related properties.
0174: *
0175: * @param from is the initial set of properties to use for copying.
0176: * @return a set of properties derived from system properties.
0177: * @see java.lang.System#getProperties()
0178: */
0179: private Properties defaultProperties(Properties from) {
0180: // initial
0181: Properties result = new Properties();
0182: Pattern pattern = Pattern.compile("\\$\\{[-a-zA-Z0-9._]+\\}");
0183:
0184: // copy wf keys as specified in the system properties to defaults
0185: for (Enumeration e = from.propertyNames(); e.hasMoreElements();) {
0186: String key = (String) e.nextElement();
0187: String value = from.getProperty(key);
0188:
0189: if (key.startsWith("wf.") || key.startsWith("work.")) {
0190: // unparse value ${prop.key} inside braces
0191: Matcher matcher = pattern.matcher(value);
0192: StringBuffer sb = new StringBuffer();
0193: boolean found = false;
0194: while (matcher.find()) {
0195: // extract name of properties from braces
0196: String newKey = value.substring(
0197: matcher.start() + 2, matcher.end() - 1);
0198:
0199: // try to find a matching value in result properties
0200: String newVal = result.getProperty(newKey, from
0201: .getProperty(newKey, System
0202: .getProperty(newKey)));
0203:
0204: // replace braced string with the actual value or empty string
0205: matcher.appendReplacement(sb, newVal == null ? ""
0206: : newVal);
0207:
0208: // for later
0209: found = true;
0210: }
0211:
0212: matcher.appendTail(sb);
0213: result.setProperty(key, sb.toString());
0214: }
0215:
0216: if (key.startsWith("vds.")) {
0217: if (key.equals("vds.verbose"))
0218: m_log.setVerbose(Integer.parseInt(value));
0219: else if (key.startsWith("vds.log.")) {
0220: m_log.register(key.substring(8), value);
0221: }
0222: }
0223: }
0224:
0225: // final
0226: return result;
0227: }
0228:
0229: static private String catfile(String d1, String d2, String fn) {
0230: File f1 = new File(d1, d2);
0231: File f2 = new File(f1, fn);
0232: return f2.getPath();
0233: }
0234:
0235: /**
0236: * Constructs a new instance of the converter and reads properties from
0237: * the default position.
0238: */
0239: public DAX2DAG() {
0240: // start logging
0241: m_log = Logging.instance();
0242: m_verbosity = 0;
0243:
0244: m_timestamp = new Date();
0245: m_cooked_stamp = null;
0246: m_label = null;
0247: m_dagname = null;
0248: m_dagfile = null;
0249: m_logfile = null;
0250: m_version = c_version.substring(10, c_version.length() - 1)
0251: .trim();
0252: m_props = defaultProperties(System.getProperties());
0253: m_wfrc = new File(System.getProperty("user.home", "."), ".wfrc");
0254: m_cft = null;
0255: m_sft = null;
0256:
0257: String vds_home = m_props.getProperty("vds.home", System
0258: .getProperty("vds.home"));
0259: m_sftFilename = catfile(vds_home, "share", "grid3.sft");
0260:
0261: File libexec = new File(vds_home, "libexec");
0262: m_prescript = new File(libexec, "prescript.pl");
0263: m_postscript = new File(libexec, "postscript.pl");
0264:
0265: if (m_log.isUnset("app")) {
0266: m_verbosity = 0;
0267: m_log.register("app", System.out, m_verbosity);
0268: } else {
0269: m_verbosity = m_log.getLevel("app");
0270: if (m_verbosity == Integer.MAX_VALUE || m_verbosity < 0) {
0271: m_verbosity = 0;
0272: m_log.setLevel("app", m_verbosity);
0273: }
0274: }
0275:
0276: // new
0277: m_job = new HashSet();
0278: m_parent = new HashMap();
0279: m_child = new HashMap();
0280:
0281: // create files in current directory, unless anything else is known.
0282: m_basedir = new File(".");
0283: try {
0284: m_factory = new FlatFileFactory(m_basedir); // minimum default
0285: } catch (IOException io) {
0286: m_log.log("default", 0,
0287: "WARNING: Unable to generate files in the CWD");
0288: }
0289: m_minlevel = -1;
0290: }
0291:
0292: /**
0293: * Increases the verbosity of the app logging queue.
0294: * @return the current level.
0295: */
0296: public int increaseVerbosity() {
0297: this .m_log.setLevel("app", ++this .m_verbosity);
0298: return this .m_verbosity;
0299: }
0300:
0301: /**
0302: * Remembers which workflow property file should be chosen. It will
0303: * not be read now. Only its location will be remembered.
0304: * @param wfrc is the location of a property file.
0305: */
0306: public void setWorkflowPropertyFile(File wfrc) {
0307: m_wfrc = wfrc;
0308: }
0309:
0310: public void finalizeProperties() {
0311: boolean success = false;
0312: Properties temp = new Properties();
0313:
0314: try {
0315: if (m_wfrc.exists() && m_wfrc.canRead()) {
0316: FileInputStream fis = new FileInputStream(m_wfrc);
0317: temp.load(fis);
0318: fis.close();
0319: success = true;
0320: } else {
0321: m_log.log("app", 0,
0322: "WARNING: No wfrc property file found!");
0323: }
0324: } catch (IOException io) {
0325: m_log.log("default", 0,
0326: "WARNING: Error while reading properties " + m_wfrc
0327: + ": " + io.getMessage());
0328: }
0329:
0330: // replace, if we were able to read, and if there is anything
0331: // available in the new property set.
0332: Properties p = defaultProperties(temp);
0333: p.putAll(m_props);
0334: m_props = p;
0335:
0336: // init property-dependent member variables
0337: String r = m_props.getProperty("wf.job.retries");
0338: if (r != null)
0339: m_retries = Integer.parseInt(r);
0340:
0341: // some more sanity checking
0342: if ((r = m_props.getProperty("wf.script.pre")) == null) {
0343: if (m_prescript == null)
0344: throw new RuntimeException(
0345: "ERROR: Unable to determine a pre-script location");
0346: } else {
0347: m_prescript = new File(r);
0348: }
0349:
0350: if ((r = m_props.getProperty("wf.script.post")) == null) {
0351: if (m_postscript == null)
0352: throw new RuntimeException(
0353: "ERROR: Unable to determine a post-script location");
0354: } else {
0355: m_postscript = new File(r);
0356: }
0357: }
0358:
0359: /**
0360: * Allows to set a property from the code.
0361: * @param key is the property key
0362: * @param value is the new value to store
0363: * @return the previous value, or null
0364: */
0365: public String setProperty(String key, String value) {
0366: return (String) this .m_props.setProperty(key, value);
0367: }
0368:
0369: /**
0370: * Sets the DAGMan PRE script location.
0371: * @param fn is the location of the PRE script.
0372: */
0373: public void setPrescript(String fn) {
0374: m_prescript = new File(fn);
0375: setProperty("wf.script.pre", fn);
0376: }
0377:
0378: /**
0379: * Sets the DAGMan POST script location.
0380: * @param fn is the location of the POST script.
0381: */
0382: public void setPostscript(String fn) {
0383: m_postscript = new File(fn);
0384: setProperty("wf.script.post", fn);
0385: }
0386:
0387: /**
0388: * Sets the output directory. This directory will be dynamically
0389: * created once the document header is found.
0390: * @param dir is the new directory to use
0391: */
0392: public void setDirectory(String dir) {
0393: m_basedir = new File(dir);
0394: }
0395:
0396: /**
0397: * Sets the minimum level in the hashed file factory. This is
0398: * to remember until the factory actually gets instantiated.
0399: * @param level is the minimum level requested
0400: */
0401: public void setMinimumLevel(int level) {
0402: m_minlevel = level;
0403: }
0404:
0405: /**
0406: * Sets the timestamp that is being emitted in all files.
0407: *
0408: * @param then is the new date to use for the timestamping.
0409: * @return the previously valid timestamp.
0410: */
0411: public Date setTimestamp(Date then) {
0412: Date old = m_timestamp;
0413: m_timestamp = then;
0414: m_cooked_stamp = Currently.iso8601(true, false, false,
0415: m_timestamp);
0416: return old;
0417: }
0418:
0419: /**
0420: * Reads the submit file template into memory for submit file
0421: * generation.
0422: *
0423: * @param sft is a file that contains the submit file template
0424: * @return false if unable to read the submit file template
0425: */
0426: public boolean setSubmitFileTemplate(File sft) {
0427: boolean result = false;
0428:
0429: try {
0430: String line;
0431: ArrayList temp = new ArrayList();
0432:
0433: BufferedReader br = new BufferedReader(new FileReader(sft));
0434: while ((line = br.readLine()) != null)
0435: temp.add(line);
0436: br.close();
0437:
0438: // switch now on success
0439: result = true;
0440: m_sftFilename = sft.getCanonicalPath();
0441: m_sft = temp;
0442: } catch (IOException io) {
0443: System.err
0444: .println("ERROR: Unable to read submit file template "
0445: + sft + ": " + io.getMessage());
0446: System.exit(3);
0447: }
0448:
0449: return result;
0450: }
0451:
0452: /**
0453: * Reads the configuration file template into memory for kickstart V2
0454: * file generation. This function is only activated, if kickstart v2
0455: * configuration is being requested.
0456: *
0457: * @param cft is the file that contains the config file template
0458: * @return false if unable to read the config file template
0459: */
0460: public boolean setConfigFileTemplate(File cft) {
0461: boolean result = false;
0462:
0463: try {
0464: String line;
0465: ArrayList temp = new ArrayList();
0466:
0467: BufferedReader br = new BufferedReader(new FileReader(cft));
0468: while ((line = br.readLine()) != null)
0469: temp.add(line);
0470: br.close();
0471:
0472: // switch now on success
0473: result = true;
0474: m_cftFilename = cft.getCanonicalPath();
0475: m_cft = temp;
0476: } catch (IOException io) {
0477: System.err
0478: .println("ERROR: Unable to read config file template "
0479: + cft + ": " + io.getMessage());
0480: System.exit(4);
0481: }
0482:
0483: return result;
0484: }
0485:
0486: /**
0487: * Callback when the opening tag was parsed. The attribute maps each
0488: * attribute to its raw value. The callback initializes the DAG
0489: * writer.
0490: *
0491: * @param attributes is a map of attribute key to attribute value
0492: */
0493: public void cb_document(java.util.Map attributes) {
0494: m_log.log("dax2dag", 1, "got attributes "
0495: + attributes.toString());
0496:
0497: // extract the label of the dax
0498: if ((this .m_label = (String) attributes.get("name")) == null)
0499: this .m_label = "test";
0500: // create a temporary filename for the common log file
0501: try {
0502: this .m_logfile = File.createTempFile(m_label + "-", ".log",
0503: null).getAbsolutePath();
0504: } catch (IOException e) {
0505: // use local, relative entry
0506: this .m_logfile = m_label + ".log";
0507: }
0508:
0509: // extract the index/count of the dax, usually 0
0510: String index = (String) attributes.get("index");
0511: if (index == null)
0512: index = "0";
0513:
0514: // create the complete label to name the .dag file
0515: this .m_label += "-" + index;
0516:
0517: // create hashed, and levelled directories
0518: String s = (String) attributes.get("jobCount");
0519: try {
0520: HashedFileFactory temp = null;
0521: int jobCount = (s == null ? 0 : Integer.parseInt(s));
0522: if (m_minlevel > 0 && m_minlevel > jobCount)
0523: jobCount = m_minlevel;
0524: if (jobCount > 0)
0525: temp = new HashedFileFactory(m_basedir, jobCount);
0526: else
0527: temp = new HashedFileFactory(m_basedir);
0528:
0529: m_factory = temp;
0530: m_log.log("default", 0, "using " + temp.getLevels()
0531: + " directory levels");
0532: } catch (NumberFormatException nfe) {
0533: if (s == null)
0534: System.err
0535: .println("ERROR: Unspecified number for jobCount");
0536: else
0537: System.err.println("ERROR: Illegal number \"" + s
0538: + "\" for jobCount");
0539: System.exit(1);
0540: } catch (IOException e) {
0541: System.err.println("ERROR: Base directory creation");
0542: System.err.println(e.getMessage());
0543: System.exit(1);
0544: }
0545:
0546: // create dag filename
0547: try {
0548: m_dagname = m_factory.createFlatFile(this .m_label + ".dag");
0549: } catch (IOException io) {
0550: System.err
0551: .println("Unable to create a flat filename for the DAG: "
0552: + io.getMessage());
0553: System.exit(1);
0554: }
0555:
0556: // open dag writer
0557: m_log.log("dax2dag", 2, "open dag writer " + m_dagname);
0558: try {
0559: if (m_dagname.exists())
0560: m_log.log("default", 0, "WARNING: Overwriting file "
0561: + m_dagname);
0562: m_dagfile = new PrintWriter(new FileWriter(m_dagname));
0563:
0564: m_dagfile.println("# dax2dag " + m_version);
0565: m_dagfile.print("## ");
0566: if (m_dagname.getParent() != null)
0567: m_dagfile.println("cd " + m_dagname.getParent());
0568: m_dagfile.println("## vds-submit-dag "
0569: + m_dagname.getName());
0570:
0571: m_dagfile.println("# "
0572: + Currently
0573: .iso8601(false, true, false, m_timestamp));
0574: m_dagfile.println("#");
0575: } catch (IOException io) {
0576: System.err.println("Unable to open DAG " + m_dagname + ": "
0577: + io.getMessage());
0578: System.exit(1);
0579: }
0580: }
0581:
0582: /**
0583: * Callback when the section 1 filenames are being parsed. This is
0584: * unused by design, as the reduction of a DAG according to the
0585: * existence of files happens dynamically.
0586: *
0587: * @param filename is a DAX-style filename elements.
0588: */
0589: public void cb_filename(Filename filename) {
0590: // m_log.log( "dax2dag", 1, "filename callback " + filename.getFilename() );
0591: }
0592:
0593: /**
0594: * Converts the dontRegister and dontTransfer flags into a numeric
0595: * value of reverse meaning.
0596: *
0597: * <table>
0598: * <tr><th>dR</th><th>dT</th><th>result</th></tr>
0599: * <tr><td>false</td><td>0</td><td>0</td></tr>
0600: * <tr><td>false</td><td>1</td><td>1</td></tr>
0601: * <tr><td>false</td><td>2</td><td>2</td></tr>
0602: * <tr><td>true</td><td>0</td><td>4</td></tr>
0603: * <tr><td>true</td><td>1</td><td>5</td></tr>
0604: * <tr><td>true</td><td>2</td><td>6</td></tr>
0605: * </table>
0606: *
0607: * @param dontRegister true for unregistered files.
0608: * @param dontTransfer for the chosen transfer mode.
0609: * @return the numerical representation.
0610: */
0611: private int assembleRT(boolean dontRegister, int dontTransfer) {
0612: int result = dontTransfer; // range 0..2
0613: if (!dontRegister)
0614: result |= 0x04;
0615: return result;
0616: }
0617:
0618: /**
0619: * Replaces a true logical filename with a construct that is late
0620: * bound to the true file. Thus, the output is a !!var!! like:<p>
0621: * <pre>!!LFN:filename!!</pre>
0622: *
0623: * @param f is the logical filename DAX construct.
0624: * @return a String for a late binding replacement
0625: */
0626: private String convertFilename(Filename f) {
0627: StringBuffer result = new StringBuffer(32);
0628:
0629: result.append("!!LFN:");
0630: result.append(f.getFilename());
0631: result.append("!!");
0632:
0633: return result.toString();
0634: }
0635:
0636: /**
0637: * Converts a dax leaf element into something to output.
0638: *
0639: * @param l is a leaf element
0640: * @return the printable version of the leaf, or an empty string.
0641: */
0642: private String convertLeaf(Leaf l) {
0643: if (l instanceof PseudoText) {
0644: return ((PseudoText) l).getContent();
0645: } else if (l instanceof Filename) {
0646: return convertFilename((Filename) l);
0647: } else {
0648: // FIXME: complain
0649: return new String();
0650: }
0651: }
0652:
0653: /**
0654: * Converts a given @@key@@ variable into its replacement value.
0655: * Only a fixed set of variables are hard-coded in this method.
0656: *
0657: * <table>
0658: * <tr><th>key</th><th>meaning</th></tr>
0659: * <tr><td>ARGS</td><td>is from job/argument, may be empty</td></tr>
0660: * <tr><td>CONFIG</td><td>is the k2 config filename</td></tr>
0661: * <tr><td>LOGFILE</td><td>is the log file all submit files share.
0662: * Note: For reasons for NFS locking, this file should reside on a
0663: * local filesystem.</td></tr>
0664: * <tr><td>DAGFILE</td><td>is the filename of the DAGMan .dag file</td></tr>
0665: * <tr><td>DAXLABEL</td><td>is the adag@label value</td></tr>
0666: * <tr><td>DAXMTIME</td><td>is the some time assoc. with the .dax file</td></tr>
0667: * <tr><td>DV</td><td>is the combined job@dv-{namespace|name|version}</td></tr>
0668: * <tr><td>GENERATOR</td><td>Name of the generator</td></tr>
0669: * <tr><td>JOBID</td><td>is the job@id value for this job</td></tr>
0670: * <tr><td>LEVEL</td><td>is the job@level value for this job</td></tr>
0671: * <tr><td>MAXPEND</td><td>is the maximum time a job is willing to pend
0672: * remotely (spend in idle on the local Condor) before it is being
0673: * replanned. Defaults to 2 hours.</td>
0674: * <tr><td>STDIN</td><td>is the optional LFN from the job/stdin filename</td></tr>
0675: * <tr><td>STDOUT</td><td>is the optional LFN from the job/stdout filename</td></tr>
0676: * <tr><td>STDERR</td><td>is the optional LFN from the job/stderr filename</td></tr>
0677: * <tr><td>SUBMIT</td><td>is the submit filename</td></tr>
0678: * <tr><td>SUBBASE</td><td>is the submit filename minus the .sub suffix</td></tr>
0679: * <tr><td>TEMPLATE</td><td>is the submit filename template name</td></tr>
0680: * <tr><td>TR</td><td>is the combined job@{namespace|name|version}</td></tr>
0681: * <tr><td>VERSION</td><td>for starters 1.0 will do</td></tr>
0682: * </table>
0683: *
0684: * @param key is the key with the at characters removed.
0685: * @param job is the job from which to glean additional information.
0686: * @param submitFilename is the filename of the submit file
0687: * @return the replacement, which may be an empty string.
0688: */
0689: private String convertVariable(String key, Job job,
0690: String submitFilename) {
0691: String result = null;
0692: m_log.log("dax2dag", 4, "converting key " + key);
0693:
0694: switch (key.charAt(0)) {
0695: case 'A':
0696: if (key.equals("ARGS")) {
0697: StringBuffer arglist = new StringBuffer(32);
0698: for (Iterator i = job.iterateArgument(); i.hasNext();) {
0699: arglist.append(convertLeaf((Leaf) i.next()));
0700: }
0701: result = arglist.toString();
0702: }
0703: break;
0704:
0705: case 'C':
0706: if (key.equals("CONFIG")) {
0707: result = submitFilename.substring(0, submitFilename
0708: .length() - 3)
0709: + "in";
0710: }
0711: break;
0712:
0713: case 'D':
0714: if (key.equals("DV")) {
0715: result = Separator.combine(job.getDVNamespace(), job
0716: .getDVName(), job.getDVVersion());
0717: } else if (key.equals("DAXLABEL")) {
0718: result = this .m_label;
0719: } else if (key.equals("DAXMTIME")) {
0720: result = this .m_cooked_stamp;
0721: // Currently.iso8601(true,false,false,m_timestamp);
0722: } else if (key.equals("DAGFILE")) {
0723: result = m_dagname.getPath();
0724: }
0725: break;
0726:
0727: case 'G':
0728: if (key.equals("GENERATOR")) {
0729: result = "d2d";
0730: }
0731: break;
0732:
0733: case 'J':
0734: if (key.equals("JOBID")) {
0735: result = job.getID();
0736: }
0737: break;
0738:
0739: case 'L':
0740: if (key.equals("LEVEL")) {
0741: result = Integer.toString(job.getLevel());
0742: } else if (key.equals("LOGFILE")) {
0743: result = (m_logfile == null ? m_label + ".log"
0744: : m_logfile);
0745: }
0746: break;
0747:
0748: case 'M':
0749: if (key.equals("MAXPEND")) {
0750: String temp = m_props.getProperty("wf.max.pending",
0751: "7200");
0752: if (Integer.parseInt(temp) >= 600)
0753: result = temp;
0754: else
0755: result = "7200";
0756: }
0757: break;
0758:
0759: case 'S':
0760: if (key.equals("SUBMIT")) {
0761: result = submitFilename;
0762: } else if (key.equals("SUBBASE")) {
0763: result = submitFilename.substring(0, submitFilename
0764: .length() - 4);
0765: } else if (key.equals("STDIN")) {
0766: if (job.getStdin() != null)
0767: result = convertFilename(job.getStdin());
0768: } else if (key.equals("STDOUT")) {
0769: if (job.getStdout() != null)
0770: result = convertFilename(job.getStdout());
0771: } else if (key.equals("STDERR")) {
0772: if (job.getStderr() != null)
0773: result = convertFilename(job.getStderr());
0774: }
0775: break;
0776:
0777: case 'T':
0778: if (key.equals("TR")) {
0779: result = Separator.combine(job.getNamespace(), job
0780: .getName(), job.getVersion());
0781: } else if (key.equals("TEMPLATE")) {
0782: result = m_sftFilename;
0783: }
0784: break;
0785:
0786: case 'V':
0787: if (key.equals("VERSION")) {
0788: result = m_version;
0789: }
0790: break;
0791:
0792: default:
0793: // FIXME: complain
0794: }
0795:
0796: // guarantee to return a valid string and not null
0797: return (result == null ? new String() : result);
0798: }
0799:
0800: /**
0801: * Writes the job planner configuration into the submit file. The
0802: * section file contains several configuration sections to ease the
0803: * life of the late planner.
0804: *
0805: * @param prefix is the prefix to use in front of the uses section.
0806: * @param sfw is an opened submit file writer.
0807: * @param job is the job from which to create the config file.
0808: * @throws IOException, if something goes wrong while opening the
0809: * file.
0810: */
0811: private void writeUsesSection(String prefix, PrintWriter sfw,
0812: Job job) throws IOException {
0813: // section filenames, may be empty
0814: sfw.println(prefix + "[filenames]");
0815: for (Iterator i = job.iterateUses(); i.hasNext();) {
0816: Filename f = (Filename) i.next();
0817:
0818: // format: <io> <rt> "<lfn>"
0819: sfw.print(prefix);
0820: sfw.print(f.getLink());
0821: sfw.print(' ');
0822: sfw.print(assembleRT(f.getDontRegister(), f
0823: .getDontTransfer()));
0824: sfw.print(" \"");
0825: sfw.print(f.getFilename());
0826: // sfw.print( "\" \"" );
0827: // String temp = f.getTemporary();
0828: // if ( temp != null ) sfw.print( temp );
0829: sfw.println("\"");
0830: }
0831: sfw.println(prefix);
0832:
0833: // section stdio, may be empty
0834: sfw.println(prefix + "[stdio]");
0835: if (job.getStdin() != null)
0836: sfw.println(prefix + "stdin="
0837: + convertFilename(job.getStdin()));
0838: if (job.getStdout() != null)
0839: sfw.println(prefix + "stdout="
0840: + convertFilename(job.getStdout()));
0841: if (job.getStderr() != null)
0842: sfw.println(prefix + "stderr="
0843: + convertFilename(job.getStderr()));
0844: sfw.println(prefix);
0845:
0846: // section profile, may be empty
0847: sfw.println(prefix + "[profiles]");
0848: for (Iterator i = job.iterateProfile(); i.hasNext();) {
0849: Profile p = (Profile) i.next();
0850: sfw.print(prefix + p.getNamespace() + "." + p.getKey()
0851: + "=\"");
0852: for (Iterator j = p.iterateLeaf(); j.hasNext();) {
0853: sfw.print(convertLeaf((Leaf) j.next()));
0854: }
0855: sfw.println("\"");
0856: }
0857: sfw.println(prefix);
0858:
0859: // section job, usually not empty
0860: sfw.println(prefix + "[job]");
0861: sfw.println(prefix
0862: + "transformation="
0863: + Separator.combine(job.getNamespace(), job.getName(),
0864: job.getVersion()));
0865: sfw.println(prefix
0866: + "derivation="
0867: + Separator.combine(job.getDVNamespace(), job
0868: .getDVName(), job.getDVVersion()));
0869: sfw.println(prefix + "wf_label=" + this .m_label);
0870: sfw.println(prefix + "wf_time=" + this .m_cooked_stamp);
0871:
0872: // kickstart V2 or not
0873: if (m_cft != null && m_cft.size() > 0)
0874: sfw.println(prefix + "kickstart=v2");
0875:
0876: sfw.println(prefix);
0877: }
0878:
0879: /**
0880: * Writes the .sub Condor submit file. The submit file contains
0881: * semi-planned job information from the generic job template.
0882: *
0883: * @param submit is the location where to create the file at.
0884: * @param job is the job from which to create the submit file.
0885: * @throws IOException, if something goes wrong while opening the
0886: * file.
0887: */
0888: private void writeSubmitFile(File submit, Job job)
0889: throws IOException {
0890: String basename = m_factory.getName(submit);
0891: if (submit.exists())
0892: m_log.log("default", 0, "WARNING: Overwriting file "
0893: + submit);
0894:
0895: PrintWriter sub = new PrintWriter(new FileWriter(submit));
0896: m_log.log("dax2dag", 3, "create sub file " + submit);
0897:
0898: sub.println("# dax2dag " + m_version);
0899: sub.println("# Condor submit file " + basename);
0900: sub.println("# "
0901: + Currently.iso8601(false, true, false, m_timestamp));
0902: sub.println("#");
0903:
0904: // write uses information into submit file with special prefix
0905: String prefix = "#! ";
0906: sub.println("## The section prefixed with \"" + prefix
0907: + "\" passes information to the late planner.");
0908: sub.println("## BEGIN late planning configuration");
0909: writeUsesSection(prefix, sub, job);
0910: sub.println("## END late planning configuration");
0911: sub.println("#");
0912:
0913: // substitute from template file
0914: for (Iterator i = m_sft.iterator(); i.hasNext();) {
0915: StringBuffer line = new StringBuffer((String) i.next());
0916:
0917: // substitute all @@var@@ occurances in this line
0918: // FIXME: Need to introduce string quoting and escape rules eventually
0919: for (int p1 = line.indexOf("@@"); p1 != -1; p1 = line
0920: .indexOf("@@")) {
0921: int p2 = line.indexOf("@@", p1 + 2) + 2;
0922: if (p2 == -1)
0923: throw new IOException("unclosed @@var@@ element");
0924: String key = line.substring(p1 + 2, p2 - 2);
0925: String value = convertVariable(key, job, basename);
0926: m_log.log("dax2dag", 4, key + " => " + value);
0927: line.replace(p1, p2, value);
0928: }
0929:
0930: sub.println(line.toString());
0931: }
0932:
0933: sub.flush();
0934: sub.close();
0935: }
0936:
0937: /**
0938: * Writes the .in kickstart v2 control file. The config file contains
0939: * semi-planned job information from the generic config file template.
0940: *
0941: * @param config is the location where to create the file at.
0942: * @param submit is the name of the corresponding submit file.
0943: * @param job is the job from which to create the config file.
0944: * @throws IOException, if something goes wrong while opening the
0945: * file.
0946: */
0947: private void writeConfigFile(File config, File submit, Job job)
0948: throws IOException {
0949: if (config.exists())
0950: m_log.log("default", 0, "WARNING: Overwriting file "
0951: + config);
0952:
0953: PrintWriter cfg = new PrintWriter(new FileWriter(config));
0954: m_log.log("dax2dag", 3, "create k2 config file " + config);
0955:
0956: cfg.println("# dax2dag " + m_version);
0957: cfg.println("# kickstart config file "
0958: + m_factory.getName(config));
0959: cfg.println("# "
0960: + Currently.iso8601(false, true, false, m_timestamp));
0961: cfg.println("#");
0962:
0963: // substitute from template file
0964: for (Iterator i = m_cft.iterator(); i.hasNext();) {
0965: StringBuffer line = new StringBuffer((String) i.next());
0966:
0967: // substitute all @@var@@ occurances in this line
0968: // FIXME: Need to introduce string quoting and escape rules eventually
0969: for (int p1 = line.indexOf("@@"); p1 != -1; p1 = line
0970: .indexOf("@@")) {
0971: int p2 = line.indexOf("@@", p1 + 2) + 2;
0972: if (p2 == -1)
0973: throw new IOException("unclosed @@var@@ element");
0974: String key = line.substring(p1 + 2, p2 - 2);
0975: String value = convertVariable(key, job, m_factory
0976: .getName(submit));
0977: m_log.log("dax2dag", 4, key + " => " + value);
0978: line.replace(p1, p2, value);
0979: }
0980:
0981: cfg.println(line.toString());
0982: }
0983:
0984: cfg.flush();
0985: cfg.close();
0986: }
0987:
0988: /**
0989: * Ensures that the submit file references the submit host local
0990: * config file. The function will ensure that there is an <code>input</code>
0991: * configuration inside the submit file, which refers to the configuration
0992: * file.
0993: */
0994: public void checkConfigSubmit() {
0995: String linefeed = System.getProperty("line.separator", "\r\n");
0996: boolean flag = false;
0997:
0998: // exchange (or add) a line "input = @@CONFIG@@" to submit file template
0999: for (ListIterator i = m_sft.listIterator(); i.hasNext();) {
1000: String line = ((String) i.next()).trim();
1001: if (line.length() > 5
1002: && line.substring(0, 5).toLowerCase().equals(
1003: "input")) {
1004: flag = true;
1005: i.set("input = @@CONFIG@@" + linefeed);
1006: i.add("transfer_input = mumbojumbo" + linefeed);
1007: }
1008: if (line.length() > 14
1009: && line.substring(0, 14).toLowerCase().equals(
1010: "transfer_input")) {
1011: i.set("transfer_input = true" + linefeed);
1012: }
1013: }
1014:
1015: if (!flag) {
1016: // sigh, not in the list, so prepend
1017: m_sft.add(0, "input = @@CONFIG@@" + linefeed);
1018: m_sft.add(0, "transfer_input = true" + linefeed);
1019: }
1020: }
1021:
1022: /**
1023: * Callback for the job from section 2 jobs. These jobs are completely
1024: * assembled, but each is passed separately. For each job, the submit
1025: * file needs to be created from the submit file template. Furthermore,
1026: * for each submit file, the kickstart control file needs to be written,
1027: * and some other useful files for the late planner.
1028: *
1029: * @param job is the DAX-style job.
1030: */
1031: public void cb_job(Job job) {
1032: String id = job.getID();
1033: m_log.log("dax2dag", 1, "found job " + id);
1034:
1035: // remember job -- to find parents and children
1036: m_job.add(id);
1037:
1038: // create and write submit file
1039: File submit = null;
1040: try {
1041: String fn = id + ".sub";
1042: submit = m_factory.createFile(fn);
1043: writeSubmitFile(submit, job);
1044: } catch (IOException io) {
1045: System.err.println("ERROR: Unable to write submit file "
1046: + submit + ": " + io.getMessage());
1047: System.exit(2);
1048: }
1049:
1050: // write kickstart.v2 config file
1051: if (m_cft != null) {
1052: // do not use factory method -- we need to go into the same dir!
1053: File config = new File(submit.getParentFile(), id + ".in");
1054: try {
1055: writeConfigFile(config, submit, job);
1056: } catch (IOException io) {
1057: System.err
1058: .println("ERROR: Unable to write config file "
1059: + config + ": " + io.getMessage());
1060: System.exit(2);
1061: }
1062: }
1063:
1064: // append dag file
1065: m_log.log("dax2dag", 3, "appending dag file with job");
1066: if (m_prescript == null) {
1067: // String fn = m_props.getProperty("wf.script.pre");
1068: // if ( fn == null )
1069: throw new RuntimeException(
1070: "ERROR: Unable to determine location of pre-script!");
1071: // m_prescript = new File(fn);
1072: }
1073: if (m_postscript == null) {
1074: // String fn = m_props.getProperty("wf.script.post");
1075: // if ( fn == null )
1076: throw new RuntimeException(
1077: "ERROR: Unable to determine location of post-script!");
1078: // m_postscript = new File(fn);
1079: }
1080:
1081: String basename = m_factory.getName(submit);
1082: String suffix = " " + basename + " ";
1083: try {
1084: suffix += m_wfrc.getCanonicalPath();
1085: } catch (IOException ioe) {
1086: m_log.log("default", 0, "ignoring un-canonicalizable "
1087: + m_wfrc.getAbsolutePath());
1088: }
1089: m_dagfile.println("JOB " + id + " " + basename);
1090: m_dagfile.println("SCRIPT PRE " + id + " " + m_prescript
1091: + suffix);
1092: m_dagfile.println("SCRIPT POST " + id + " " + m_postscript
1093: + " -e $RETURN" + suffix);
1094: if (m_retries > 1)
1095: m_dagfile.println("RETRY " + id + " " + m_retries
1096: + " UNLESS-EXIT 42");
1097: }
1098:
1099: public void cb_parents(String child, java.util.List parents) {
1100: m_log
1101: .log("dax2dag", 1, "relationship " + child + " "
1102: + parents);
1103:
1104: // remember parents -- to find later the initial and final jobsets
1105: if (!m_parent.containsKey(child))
1106: m_parent.put(child, new TreeSet());
1107: ((Set) m_parent.get(child)).addAll(parents);
1108:
1109: // write dependency into dag file
1110: //!! m_dagfile.print( "PARENT" );
1111: for (Iterator i = parents.iterator(); i.hasNext();) {
1112: String parent = (String) i.next();
1113:
1114: if (!m_child.containsKey(parent))
1115: m_child.put(parent, new TreeSet());
1116: ((Set) m_child.get(parent)).add(child);
1117:
1118: //!! m_dagfile.print( " " + parent );
1119: }
1120: //!! m_dagfile.println( " CHILD " + child );
1121: }
1122:
1123: /**
1124: * Attempts to find the primeval ancestor of a given job.
1125: *
1126: * @param job is the job to check for ancestors.
1127: * @return all ancestors found for the given job. A job without ancestors
1128: * is the job itself.
1129: */
1130: private Set find_ancestor(String job) {
1131: Set result = new TreeSet();
1132:
1133: if (m_parent.containsKey(job)) {
1134: for (Iterator i = ((Set) m_parent.get(job)).iterator(); i
1135: .hasNext();)
1136: result.addAll(find_ancestor((String) i.next()));
1137: } else {
1138: result.add(job);
1139: }
1140:
1141: return result;
1142: }
1143:
1144: /**
1145: * Attempts to find the youngest distant children of a given job.
1146: *
1147: * @param job is the job to check for children.
1148: * @return all grandchildren found for a given job. A job without children
1149: * is the job itself.
1150: */
1151: private Set find_children(String job) {
1152: Set result = new TreeSet();
1153:
1154: m_log.log("dax2dag", 2, "looking up children for " + job);
1155: if (m_child.containsKey(job)) {
1156: for (Iterator i = ((Set) m_child.get(job)).iterator(); i
1157: .hasNext();)
1158: result.addAll(find_children((String) i.next()));
1159: } else {
1160: result.add(job);
1161: }
1162:
1163: return result;
1164: }
1165:
1166: /**
1167: * Callback when the parsing of the document is done. This callback
1168: * closes and frees the DAG writer.
1169: */
1170: public void cb_done() {
1171: m_log.log("dax2dag", 2, "parent sets " + m_parent);
1172: m_log.log("dax2dag", 2, "child sets " + m_child);
1173:
1174: // print relationship now, since DAGMan likes ordering
1175: TreeSet temp = new TreeSet(m_parent.keySet());
1176: for (Iterator i = temp.iterator(); i.hasNext();) {
1177: String child = (String) i.next();
1178: TreeSet parents = (TreeSet) m_parent.get(child);
1179: if (parents.size() > 0) {
1180: m_dagfile.print("PARENT ");
1181: for (Iterator j = parents.iterator(); j.hasNext();)
1182: m_dagfile.print((String) j.next() + " ");
1183: m_dagfile.println("CHILD " + child);
1184: }
1185: }
1186: temp = null; // free
1187:
1188: // find all initial jobs
1189: Set initial = new TreeSet();
1190: Set cleanup = new TreeSet();
1191: if (m_job.size() <= 0) {
1192: // 0
1193: m_log.log("app", 0, "ERROR: There are no jobs");
1194: } else {
1195: // many: for each job, go to its original ancestor / youngest child
1196: for (Iterator i = m_job.iterator(); i.hasNext();) {
1197: String job = (String) i.next();
1198: initial.addAll(find_ancestor(job));
1199: cleanup.addAll(find_children(job));
1200: }
1201: }
1202:
1203: // for now, just pretend
1204: m_dagfile.print("# PARENT ID000000 CHILD");
1205: for (Iterator i = initial.iterator(); i.hasNext();)
1206: m_dagfile.print(" " + i.next());
1207: m_dagfile.println();
1208:
1209: m_dagfile.print("# PARENT");
1210: for (Iterator i = cleanup.iterator(); i.hasNext();)
1211: m_dagfile.print(" " + i.next());
1212: m_dagfile.println(" CHILD ID999999");
1213:
1214: // done
1215: m_dagfile.flush();
1216: m_dagfile.close();
1217: }
1218:
1219: public void showFinals() {
1220: m_log.log("default", 0, "created " + m_factory.getCount()
1221: + " structured filenames.");
1222: m_log.log("default", 0, "created " + m_factory.getFlatCount()
1223: + " flat filenames.");
1224: }
1225:
1226: // -----------------------------------------------------------------
1227:
1228: public void showUsage() {
1229: String basename = this .getClass().getName();
1230: int p = basename.lastIndexOf('.');
1231: if (p != -1)
1232: basename = basename.substring(p + 1);
1233:
1234: String linefeed = System.getProperty("line.separator", "\r\n");
1235: System.out
1236: .println("$Id: DAX2DAG.java 289 2007-08-06 21:33:27Z vahi $");
1237: System.out
1238: .println("Usage: "
1239: + basename
1240: + " [-d dir] [-V] [-w wfrc] [-P pre] [-p post] [-l min] [-t sft] dax");
1241: System.out
1242: .println(linefeed
1243: + "Mandatory arguments: "
1244: + linefeed
1245: + " dax name of the DAX file to plan."
1246: + linefeed
1247: + linefeed
1248: + "Optional arguments: "
1249: + linefeed
1250: + " -d|--dir dir directory in which to generate the file, default is \".\""
1251: + linefeed
1252: + " -w|--wfrc rcfile workflow properties location, default is $HOME/.wfrc"
1253: + linefeed
1254: + " -P|--prescript fn name of the late-planning DAGMan prescript file."
1255: + linefeed
1256: + " -p|--postscript fn name of the late-planning DAGMan postscript file."
1257: + linefeed
1258: + " -t|--template sft submit file template to use."
1259: + linefeed
1260: + " default: "
1261: + m_sftFilename
1262: + linefeed
1263: + " -l|--levels min minimum number of levels in directory structure (0..3)."
1264: + linefeed
1265: + " -V|--version print version information and exit."
1266: + linefeed
1267: + " -v|--verbose increases output verbosity."
1268: + linefeed);
1269: System.out
1270: .println("It is recommended to always use the dir option with a sensible argument. The"
1271: + linefeed
1272: + "wfrc properties usually specify the location of the pre- and post-script."
1273: + linefeed
1274: + "The number of subdirectory levels is automatically determined from the number"
1275: + linefeed + "of jobs." + linefeed);
1276: }
1277:
1278: /**
1279: * Creates a set of long options to use.
1280: * @return initialized long options.
1281: */
1282: protected LongOpt[] generateValidOptions() {
1283: LongOpt[] lo = new LongOpt[11];
1284:
1285: lo[0] = new LongOpt("prescript", LongOpt.REQUIRED_ARGUMENT,
1286: null, 'P');
1287: lo[1] = new LongOpt("postscript", LongOpt.REQUIRED_ARGUMENT,
1288: null, 'p');
1289: lo[2] = new LongOpt("dir", LongOpt.REQUIRED_ARGUMENT, null, 'd');
1290: lo[3] = new LongOpt("wfrc", LongOpt.REQUIRED_ARGUMENT, null,
1291: 'w');
1292: lo[4] = new LongOpt("template", LongOpt.REQUIRED_ARGUMENT,
1293: null, 't');
1294: lo[5] = new LongOpt("version", LongOpt.NO_ARGUMENT, null, 'V');
1295: lo[6] = new LongOpt("k.2", LongOpt.REQUIRED_ARGUMENT, null, '2');
1296: lo[7] = new LongOpt("k2", LongOpt.REQUIRED_ARGUMENT, null, '2');
1297: lo[8] = new LongOpt("help", LongOpt.NO_ARGUMENT, null, 'h');
1298: lo[9] = new LongOpt("levels", LongOpt.REQUIRED_ARGUMENT, null,
1299: 'l');
1300: lo[10] = new LongOpt("verbose", LongOpt.NO_ARGUMENT, null, 'v');
1301: return lo;
1302: }
1303:
1304: /**
1305: * Point of entry to convert the DAX into DAG with helper and submit files.
1306: * @param args are the commandline arguments.
1307: */
1308: static public void main(String[] args) {
1309: DAX2DAG me = new DAX2DAG();
1310: if (args.length == 0) {
1311: me.showUsage();
1312: return;
1313: }
1314:
1315: Getopt opts = new Getopt("DAX2DAG", args, "2:P:Vd:hp:t:w:v", me
1316: .generateValidOptions());
1317: opts.setOpterr(false);
1318: boolean sftIsSet = false;
1319: boolean cftIsSet = false;
1320: String arg = null;
1321: int option = 0;
1322: while ((option = opts.getopt()) != -1) {
1323: switch (option) {
1324: case '2':
1325: if ((arg = opts.getOptarg()) != null) {
1326: File cft = new File(arg);
1327: if (!cft.exists() || !cft.canRead()) {
1328: System.err
1329: .println("ERROR: Unable to read config template "
1330: + cft);
1331: System.exit(1);
1332: }
1333: me.setConfigFileTemplate(cft);
1334: cftIsSet = true;
1335: }
1336: break;
1337:
1338: case 'P':
1339: if ((arg = opts.getOptarg()) != null)
1340: me.setPrescript(arg);
1341: break;
1342:
1343: case 'V':
1344: System.out
1345: .println("$Id: DAX2DAG.java 289 2007-08-06 21:33:27Z vahi $");
1346: return;
1347:
1348: case 'd':
1349: if ((arg = opts.getOptarg()) != null
1350: && arg.length() > 0)
1351: me.setDirectory(arg);
1352: break;
1353:
1354: case 'l':
1355: if ((arg = opts.getOptarg()) != null
1356: && arg.length() > 0) {
1357: int level;
1358: try {
1359: level = Integer.parseInt(arg);
1360: } catch (NumberFormatException nfe) {
1361: level = -1;
1362: }
1363: if (level >= 0 && level <= 3)
1364: me.setMinimumLevel(level);
1365: else
1366: System.out
1367: .println("Ignoring illegal minimum level of "
1368: + level);
1369: }
1370: break;
1371:
1372: case 'p':
1373: if ((arg = opts.getOptarg()) != null)
1374: me.setPostscript(arg);
1375: break;
1376:
1377: case 't':
1378: if ((arg = opts.getOptarg()) != null) {
1379: File sft = new File(arg);
1380: if (!sft.exists() || !sft.canRead()) {
1381: System.err
1382: .println("ERROR: Cannot read template "
1383: + sft);
1384: System.exit(1);
1385: }
1386: me.setSubmitFileTemplate(sft);
1387: sftIsSet = true;
1388: }
1389: break;
1390:
1391: case 'w':
1392: if ((arg = opts.getOptarg()) != null)
1393: me.setWorkflowPropertyFile(new File(arg));
1394: break;
1395:
1396: case 'v':
1397: me.increaseVerbosity();
1398: break;
1399:
1400: case 'h':
1401: default:
1402: me.showUsage();
1403: return;
1404: }
1405: }
1406:
1407: // post CLI args checks
1408: if (!sftIsSet) {
1409: File sft = new File(me.m_sftFilename);
1410: if (!sft.exists() || !sft.canRead()) {
1411: System.err
1412: .println("ERROR: No valid template file found. Please use -t to point\n"
1413: + "to a valid and accessible submit file template location.");
1414: System.exit(1);
1415: }
1416: me.setSubmitFileTemplate(sft);
1417: sftIsSet = true;
1418: } else {
1419: Logging.instance().log("default", 0, "starting");
1420: }
1421:
1422: // finalize dangling properties
1423: try {
1424: me.finalizeProperties();
1425: } catch (RuntimeException rte) {
1426: System.err.println(rte.getMessage());
1427: System.err
1428: .println("Likely cause: Are your wfrc properties accessible?");
1429: System.exit(1);
1430: }
1431:
1432: // kickstart v2?
1433: if (cftIsSet)
1434: me.checkConfigSubmit();
1435:
1436: if (opts.getOptind() != args.length - 1) {
1437: System.err
1438: .println("ERROR: You need to specify a DAX file as input.");
1439: System.exit(1);
1440: } else {
1441: File dax = new File(args[opts.getOptind()]);
1442: if (dax.exists() && dax.canRead()) {
1443: me.setTimestamp(new Date(dax.lastModified()));
1444: } else {
1445: System.err.println("ERROR: Unable to read dax file "
1446: + dax);
1447: System.exit(1);
1448: }
1449: }
1450:
1451: DAXParser parser = new DAXParser(System
1452: .getProperty("vds.schema.dax"));
1453: parser.setCallback(me);
1454: if (!parser.parse(args[opts.getOptind()]))
1455: System.exit(42);
1456:
1457: me.showFinals();
1458: }
1459: }
|