001: /*
002: * This file or a portion of this file is licensed under the terms of
003: * the Globus Toolkit Public License, found in file GTPL, or at
004: * http://www.globus.org/toolkit/download/license.html. This notice must
005: * appear in redistributions of this file, with or without modification.
006: *
007: * Redistributions of this Software, with or without modification, must
008: * reproduce the GTPL in: (1) the Software, or (2) the Documentation or
009: * some other similar material which is provided with the Software (if
010: * any).
011: *
012: * Copyright 1999-2004 University of Chicago and The University of
013: * Southern California. All rights reserved.
014: */
015:
016: package org.griphyn.vdl.router;
017:
018: import org.griphyn.vdl.classes.*;
019:
020: /**
021: * Creates a Diamond DAG exmample structure in memory using VDL classes
022: * and the kanonical executable for GriPhyN aka keg.
023: *
024: * @author Jens-S. Vöckler
025: * @version $Revision: 50 $
026: */
027: public class CreateDiamondKeg {
028:
029: /**
030: * create a 4 node diamond DAG as in-memory data structures employing
031: * the VDL classes. This is a test module for the router, until the
032: * input (SAX) becomes available.
033: *
034: * @return a list of Transformations and Derivations, encapsulated
035: * as Definitions.
036: */
037: public static Definitions create(boolean condor) {
038: // create result vector
039: Definitions result = new Definitions("diamond", "1.0");
040:
041: Profile executable = new Profile("hints", "pfnHint", new Text(
042: condor ? "keg.condor" : "keg"));
043: Profile universe = new Profile("hints", "pfnUniverse",
044: new Text(condor ? "standard" : "vanilla"));
045: try {
046: // create "generate" transformation
047: Transformation t1 = new Transformation("generate");
048: t1.addProfile(executable);
049: t1.addProfile(universe);
050: t1.addDeclare(new Declare("a", Value.SCALAR, LFN.OUTPUT));
051: Argument t1a1 = new Argument();
052: t1a1.addLeaf(new Text("-a generate -o "));
053: t1a1.addLeaf(new Use("a", LFN.OUTPUT));
054: t1.addArgument(t1a1);
055: result.addDefinition(t1);
056:
057: // create "findrange" transformation
058: Transformation t2 = new Transformation("findrange");
059: t2.addProfile(executable);
060: t2.addProfile(universe);
061: t2.addDeclare(new Declare("a", Value.SCALAR, LFN.INPUT));
062: t2.addDeclare(new Declare("b", Value.SCALAR, LFN.OUTPUT));
063: t2
064: .addDeclare(new Declare("p", new Scalar(new Text(
065: "0.0"))));
066: t2
067: .addArgument(new Argument(null, new Text(
068: "-a findrange")));
069: Argument t2a1 = new Argument(null, new Text(" -p "));
070: t2a1.addLeaf(new Use("p"));
071: t2.addArgument(t2a1);
072: Argument t2a2 = new Argument(null, new Text(" -i "));
073: t2a2.addLeaf(new Use("a", LFN.INPUT));
074: t2.addArgument(t2a2);
075: Argument t2a3 = new Argument(null, new Text(" -o "));
076: t2a3.addLeaf(new Use("b", LFN.OUTPUT));
077: t2.addArgument(t2a3);
078: result.addDefinition(t2);
079:
080: // create "analyze" transformation
081: Transformation t3 = new Transformation("analyze");
082: t3.addProfile(executable);
083: t3.addProfile(universe);
084: t3.addArgument(new Argument(null, new Text("-a analyze")));
085: Argument t3a1 = new Argument("files", new Text(" -i "));
086: t3a1.addLeaf(new Use("a", "", " ", ""));
087: t3.addArgument(t3a1);
088: Argument t3a2 = new Argument(null, new Text(" -o "));
089: t3a2.addLeaf(new Use("c", LFN.OUTPUT));
090: t3.addArgument(t3a2);
091: t3.addDeclare(new Declare("a", Value.LIST, LFN.INPUT));
092: t3.addDeclare(new Declare("c", Value.SCALAR, LFN.OUTPUT));
093: result.addDefinition(t3);
094:
095: // create "top" node derivation of "generate"
096: Derivation d1 = new Derivation("top", "generate", new Pass(
097: "a", new Scalar(new LFN("f.a", LFN.OUTPUT))));
098: result.addDefinition(d1);
099:
100: // create "left" node derivation of "findrange"
101: Derivation d2 = new Derivation("left", "findrange");
102: d2.addPass(new Pass("b", new Scalar(new LFN("f.b",
103: LFN.OUTPUT))));
104: d2.addPass(new Pass("a", new Scalar(new LFN("f.a",
105: LFN.INPUT))));
106: d2.addPass(new Pass("p", new Scalar(new Text("0.5"))));
107: result.addDefinition(d2);
108:
109: // create "right" node derivation of "findrange"
110: Derivation d3 = new Derivation("right", "findrange");
111: d3.addPass(new Pass("a", new Scalar(new LFN("f.a",
112: LFN.INPUT))));
113: d3.addPass(new Pass("b", new Scalar(new LFN("f.c",
114: LFN.OUTPUT))));
115: d3.addPass(new Pass("p", new Scalar(new Text("1.0"))));
116: result.addDefinition(d3);
117:
118: // create "bottom" node derivation of "analyze"
119: Derivation d4 = new Derivation("bottom", "analyze");
120: List d4_list1 = new List();
121: d4_list1.addScalar(new Scalar(new LFN("f.b", LFN.INPUT)));
122: d4_list1.addScalar(new Scalar(new LFN("f.c", LFN.INPUT)));
123: d4.addPass(new Pass("a", d4_list1));
124: d4.addPass(new Pass("c", new Scalar(new LFN("f.d",
125: LFN.OUTPUT))));
126: result.addDefinition(d4);
127: } catch (IllegalArgumentException iae) {
128: System.err.println(iae.getMessage());
129: System.exit(1);
130: }
131:
132: // finally
133: return result;
134: }
135: }
|