0001: /*
0002: * This file or a portion of this file is licensed under the terms of
0003: * the Globus Toolkit Public License, found in file GTPL, or at
0004: * http://www.globus.org/toolkit/download/license.html. This notice must
0005: * appear in redistributions of this file, with or without modification.
0006: *
0007: * Redistributions of this Software, with or without modification, must
0008: * reproduce the GTPL in: (1) the Software, or (2) the Documentation or
0009: * some other similar material which is provided with the Software (if
0010: * any).
0011: *
0012: * Copyright 1999-2004 University of Chicago and The University of
0013: * Southern California. All rights reserved.
0014: */
0015: package org.griphyn.vdl.router;
0016:
0017: // java.util.List clashes with org.griphyn.vdl.classes.List
0018: import java.io.*;
0019: import java.util.*;
0020: import java.sql.SQLException;
0021: import org.griphyn.common.util.Separator;
0022: import org.griphyn.vdl.classes.*;
0023: import org.griphyn.vdl.dax.*;
0024: import org.griphyn.vdl.util.*;
0025: import org.griphyn.vdl.router.*;
0026: import org.griphyn.vdl.dbschema.*;
0027:
0028: /**
0029: * This class traverses the dependency tree. The tree is built by
0030: * {@link Derivation} objects. The linkage is data-flow oriented.
0031: * If a derivation requires one or more input files, all derivations
0032: * that produce these files will be considered etc. Thus, a build-style
0033: * directed acyclic graph (DAG) is formed.
0034: *
0035: * @author Jens-S. Vöckler
0036: * @author Yong Zhao
0037: * @version $Revision: 50 $
0038: */
0039: public class Route {
0040: /**
0041: * Default max depths of recursion into the graph before the circuit
0042: * breaker hits.
0043: */
0044: public static final int MAXIMUM_DEPTH = 256;
0045:
0046: /**
0047: * This is a nested class to obscure it from the outside world.
0048: * It will maintain a stack of database manager backends that
0049: * are used to resolve compound transformations. For this purpose,
0050: * it exposes the required backend database searches, and handles
0051: * stack descend.<p>
0052: * The stack enforces that any schemas put onto it implement the
0053: * <code>VDC</code> interface.
0054: * @see org.griphyn.vdl.dbschema.VDC
0055: */
0056: class DatabaseSchemaStack {
0057: /**
0058: * Stores a reference to the database schema managers.
0059: */
0060: public ArrayList m_stack;
0061:
0062: /**
0063: * C'tor: Creates a new stack instance that contains the bottom-most
0064: * database backend.
0065: * @param schema is the final database backend.
0066: */
0067: public DatabaseSchemaStack(DatabaseSchema schema) {
0068: this .m_stack = new ArrayList();
0069: if (!(schema instanceof VDC))
0070: throw new RuntimeException(
0071: "illegal database schema: Not a VDC");
0072: this .m_stack.add(new StackElement(schema));
0073: }
0074:
0075: /**
0076: * Pushes a new definition onto the top of the stack.
0077: * @param nextSchema is a new database schema handler, usually an
0078: * in-memory one for all except the root element.
0079: * @exception NullPointerException, if the argument passed is null.
0080: */
0081: public void push(DatabaseSchema nextSchema)
0082: throws NullPointerException {
0083: if (nextSchema == null)
0084: throw new NullPointerException();
0085: if (!(nextSchema instanceof VDC))
0086: throw new RuntimeException(
0087: "illegal database schema: Not a VDC");
0088:
0089: Logging.instance().log("stack", 2,
0090: "pushing dbmstack[" + this .m_stack.size() + ']');
0091: this .m_stack.add(new StackElement(nextSchema));
0092: }
0093:
0094: /**
0095: * Removes the TOS, thus making the next-lower definition TOS.
0096: * @return the old top-of-stack.
0097: * @throws EmptyStackException if the stack did not have any elements.
0098: */
0099: public DatabaseSchema pop() {
0100: int size = this .m_stack.size();
0101: if (size == 0)
0102: throw new EmptyStackException();
0103: Logging.instance().log("stack", 2,
0104: "popping dbmstack[" + (size - 1) + ']');
0105:
0106: StackElement item = (StackElement) this .m_stack
0107: .remove(size - 1);
0108: return item.getDatabaseSchema();
0109: }
0110:
0111: /**
0112: * Accessor predicate: Determines if the stack contains any elements.
0113: * @return true, if the stack is empty; false otherwise.
0114: */
0115: public boolean isEmpty() {
0116: return this .m_stack.isEmpty();
0117: }
0118:
0119: /**
0120: * Accessor: Determines the number of database managers on the stack.
0121: * @return number of elements, or 0 for an empty stack.
0122: */
0123: public int size() {
0124: return this .m_stack.size();
0125: }
0126:
0127: /**
0128: * Descends down the stack of database manager, and for each
0129: * it will determine the list of elegible logical filenames. The
0130: * search only searches for Derivations and output filenames. The
0131: * name to search for is the variable.<p>
0132: * FIXME: There might be a bug/feature with default args in TRs!
0133: *
0134: * @param filename is the logical filename to search for as output file.
0135: * @return a list with all derivations that have this output file.
0136: */
0137: public java.util.List derivationsWithOutput(String filename) {
0138: java.util.List result = new ArrayList();
0139: Logging.instance().log("trace", 2,
0140: "derivationsWithOutput(" + filename + ")");
0141: try {
0142: int level = m_stack.size();
0143: ListIterator i = m_stack.listIterator(level);
0144: boolean flag = true;
0145: while (i.hasPrevious() && flag) {
0146: StackElement element = (StackElement) i.previous();
0147:
0148: // check for existence of LFN
0149: Cache cache = element.getLFNCache();
0150: Object item = cache == null ? null : cache
0151: .get(filename);
0152: if (item == null) {
0153: // unknown or expired, check database
0154: Logging.instance().log(
0155: "cache",
0156: 0,
0157: "[" + level + "] LFN cache MISS for "
0158: + filename);
0159: VDC vdc = (VDC) element.getDatabaseSchema();
0160: java.util.List list = vdc.searchFilename(
0161: filename, LFN.OUTPUT);
0162: if (list != null && !list.isEmpty()) {
0163: result.addAll(list);
0164: if (cache != null)
0165: cache.set(filename, result);
0166: flag = false;
0167: }
0168: } else {
0169: // cache hit
0170: Logging.instance().log(
0171: "cache",
0172: 1,
0173: "[" + level + "] LFN cache HIT for "
0174: + filename);
0175: result.addAll((java.util.List) item);
0176: flag = false;
0177: }
0178: level--;
0179: }
0180:
0181: if (flag && result.isEmpty()) {
0182: // negative caching on the way out
0183: if (i.hasNext()) {
0184: StackElement element = (StackElement) i.next();
0185: Cache cache = element.getLFNCache();
0186: if (cache != null)
0187: cache.set(filename, result);
0188: }
0189: }
0190:
0191: } catch (Exception e) {
0192: Logging.instance().log("default", 0,
0193: "caught " + e + ", aborting");
0194: throw new RuntimeException(e.getMessage());
0195: }
0196:
0197: return result;
0198: }
0199:
0200: private String genKey(String usesspace, String uses,
0201: String min, String max) {
0202: StringBuffer result = new StringBuffer(32);
0203: if (usesspace != null) {
0204: result.append(usesspace);
0205: result.append(Separator.NAMESPACE);
0206: }
0207: result.append(uses);
0208: if (min != null || max != null) {
0209: result.append(Separator.NAME);
0210: if (min != null)
0211: result.append(min);
0212: result.append(Separator.VERSION);
0213: if (max != null)
0214: result.append(max);
0215: }
0216: return result.toString();
0217: }
0218:
0219: /**
0220: * Obtain all transformations that exactly match the given secondary
0221: * key triple and version range. Thus, we have to weed out wildcard
0222: * matches with exact/range matches, before we can add transformations.
0223: *
0224: * @param usesspace is the namespace, nullable, not wildcardable
0225: * @param uses is the name, which must be given
0226: * @param min is the version minimum, nullable, not wildcardable
0227: * @param max is the version maximum, nullable, not wildcardable
0228: * @return a list of all matching transformations
0229: */
0230: public java.util.List searchForTR(String usesspace,
0231: String uses, String min, String max) {
0232: java.util.List result = new ArrayList();
0233: Logging.instance().log(
0234: "trace",
0235: 2,
0236: "searchForTR "
0237: + (usesspace == null ? "null" : usesspace)
0238: + Separator.NAMESPACE + uses
0239: + Separator.NAME
0240: + (min == null ? "null" : min)
0241: + Separator.VERSION
0242: + (max == null ? "null" : max));
0243: try {
0244: String key = genKey(usesspace, uses, min, max);
0245: boolean flag = true;
0246: int level = m_stack.size();
0247: ListIterator i = m_stack.listIterator(level);
0248: while (i.hasPrevious() && flag) {
0249: StackElement element = (StackElement) i.previous();
0250:
0251: // check for existence of LFN
0252: Cache cache = element.getTRCache();
0253: Object item = cache == null ? null : cache.get(key);
0254: if (item == null) {
0255: // unknown or expired, check database
0256: Logging.instance().log(
0257: "cache",
0258: 0,
0259: "[" + level + "] TR cache MISS for "
0260: + key);
0261:
0262: VDC vdc = (VDC) element.getDatabaseSchema();
0263: java.util.List intermediary = vdc
0264: .searchDefinition(
0265: /* usesspace==null ? "" : */usesspace,
0266: uses, null,
0267: Definition.TRANSFORMATION);
0268: // postcondition: contains TR, joker version, otherwise matches
0269:
0270: // only add those that exactly match our requirements
0271: for (Iterator j = intermediary.iterator(); j
0272: .hasNext();) {
0273: Definition d = (Definition) j.next();
0274: Logging.instance().log("route", 2,
0275: "looking at TR " + d.identify());
0276: if (Route.matchWithNull(usesspace, d
0277: .getNamespace())
0278: && Route.matchWithNull(uses, d
0279: .getName())
0280: && Derivation.match(min, max, d
0281: .getVersion()))
0282: result.add(d);
0283: }
0284: if (!result.isEmpty()) {
0285: if (cache != null)
0286: cache.set(key, result);
0287: flag = false;
0288: }
0289: } else {
0290: // cache hit
0291: result.addAll((java.util.List) item);
0292: Logging.instance().log(
0293: "cache",
0294: 1,
0295: "[" + level + "] TR cache HIT for "
0296: + key);
0297: flag = false;
0298: }
0299: level--;
0300: }
0301:
0302: if (flag && result.isEmpty()) {
0303: // negative caching on the way out
0304: if (i.hasNext()) {
0305: StackElement element = (StackElement) i.next();
0306: Cache cache = element.getTRCache();
0307: if (cache != null)
0308: cache.set(key, result);
0309: }
0310: }
0311:
0312: } catch (Exception e) {
0313: Logging.instance().log("default", 0,
0314: "caught " + e + ", aborting");
0315: throw new RuntimeException(e.getMessage());
0316: }
0317:
0318: return result;
0319: }
0320:
0321: /**
0322: * Obtain all derivations that wildcard match the given secondary
0323: * key triple.
0324: *
0325: * @param namespace is the namespace, nullable, not wildcardable
0326: * @param name is the name, which must be given
0327: * @param version is the version, nullable, not wildcardable
0328: * @return a list of all matching derivations
0329: */
0330: public java.util.List searchForDV(String namespace,
0331: String name, String version) {
0332: java.util.List result = new ArrayList();
0333: Logging.instance().log(
0334: "trace",
0335: 2,
0336: "searchForDV "
0337: + (namespace == null ? "null" : namespace)
0338: + Separator.NAMESPACE + name
0339: + Separator.NAME
0340: + (version == null ? "null" : version));
0341: try {
0342: for (ListIterator i = m_stack.listIterator(m_stack
0343: .size()); i.hasPrevious();) {
0344: StackElement element = (StackElement) i.previous();
0345: VDC vdc = (VDC) element.getDatabaseSchema();
0346: java.util.List list = vdc.searchDefinition(
0347: namespace, name, version,
0348: Definition.DERIVATION);
0349: if (list != null)
0350: result.addAll(list);
0351: }
0352: } catch (Exception e) {
0353: Logging.instance().log("default", 0,
0354: "caught " + e + ", aborting");
0355: throw new RuntimeException(e.getMessage());
0356: }
0357: return result;
0358: }
0359: };
0360:
0361: /**
0362: * Stores a reference to the underlying data as top-of-stack.
0363: * At the bottom of the stack, queries to the database backend
0364: * are generated. At higher levels in the stack, the stack-existing
0365: * definitions take precendence.
0366: */
0367: private DatabaseSchemaStack m_stack;
0368:
0369: /**
0370: * Stores a reference to the class that manages arbitration in case
0371: * multiple derivations produce the same file.
0372: */
0373: private Arbiter m_arbiter;
0374:
0375: /**
0376: * The constructor initializes the stack of definitions with the
0377: * default database manager backend. This backend will be used to
0378: * pose the various search requests.
0379: *
0380: * @param backend is the backend database manager.
0381: */
0382: public Route(DatabaseSchema backend) {
0383: this .m_stack = new DatabaseSchemaStack(backend);
0384: this .m_arbiter = new PreferNamespace();
0385: }
0386:
0387: /**
0388: * The constructor initializes the stack of definitions with the
0389: * default database manager backend. Additionally, it will push an
0390: * in-memory schema constructed from the definitions argument onto the
0391: * stack.
0392: *
0393: * @param backend is the backend database manager.
0394: * @param defs is the root of an in-memory database fragment.
0395: * @exception NullPointerException, if the argument is null.
0396: * @see org.griphyn.vdl.dbschema.InMemorySchema
0397: * @see org.griphyn.vdl.dbschema.SingleFileSchema
0398: */
0399: public Route(DatabaseSchema backend, Definitions defs)
0400: throws NullPointerException {
0401: if (defs == null)
0402: throw new NullPointerException();
0403: this .m_stack = new DatabaseSchemaStack(backend);
0404: try {
0405: this .m_stack.push(new InMemorySchema(defs)); // was: SingleFile(defs)
0406: } catch (Exception e) {
0407: throw new RuntimeException(e.getMessage());
0408: }
0409: this .m_arbiter = new PreferNamespace();
0410: }
0411:
0412: /**
0413: * Creates a new top of stack with the given definitions. Effectively
0414: * a push() operation. The definitions fragment will be converted
0415: * into an in-memory database.
0416: *
0417: * @param defs is the root an in-memory database fragment.
0418: * @exception NullPointerException, if the argument is null.
0419: * @see org.griphyn.vdl.dbschema.InMemorySchema
0420: * @see org.griphyn.vdl.dbschema.SingleFileSchema
0421: */
0422: public void addDefinitions(Definitions defs)
0423: throws NullPointerException {
0424: if (defs == null)
0425: throw new NullPointerException();
0426: else {
0427: try {
0428: this .m_stack.push(new InMemorySchema(defs)); // was: SingleFile
0429: } catch (Exception e) {
0430: throw new RuntimeException(e.getMessage());
0431: }
0432: }
0433: }
0434:
0435: /**
0436: * Removes the current top of stack. Effective a pop() operation.
0437: * @return the top of stack as it was before the pop().
0438: */
0439: public DatabaseSchema removeDefinitions() {
0440: return this .m_stack.pop();
0441: }
0442:
0443: /**
0444: * Stores the maximum depths to which we will go.
0445: */
0446: private int m_maxDepth = MAXIMUM_DEPTH;
0447:
0448: /**
0449: * Allows to limit the maximum depth that the router is willing to go.
0450: *
0451: * @param depth is the maximum depth. Use Integer.MAX_VALUE for unlimited.
0452: * @see #getMaximumDepth()
0453: */
0454: public void setMaximumDepth(int depth) {
0455: this .m_maxDepth = depth;
0456: }
0457:
0458: /**
0459: * Queries the current maximum depths that the router is willing to go.
0460: *
0461: * @return the current maximum depth, or Integer.MAX_VALUE for unlimited.
0462: * @see #setMaximumDepth( int )
0463: */
0464: public int getMaximumDepth() {
0465: return this .m_maxDepth;
0466: }
0467:
0468: /**
0469: * Queries the current arbitrarion instance.
0470: *
0471: * @return a reference to the current arbiter for conflict resolution.
0472: */
0473: public Arbiter getArbiter() {
0474: return this .m_arbiter;
0475: }
0476:
0477: /**
0478: * Replaces the current arbiter with a new instance.
0479: *
0480: * @param arbiter is a new instance of a conflict resolution class.
0481: */
0482: public void setArbiter(Arbiter arbiter) {
0483: this .m_arbiter = arbiter;
0484: }
0485:
0486: // ================================================================
0487:
0488: /**
0489: * DAX bridge: Helps flattening a VDL leaf tree into a DAX leaf list.
0490: *
0491: * @param s is the VDL scalar to flatten
0492: * @param v is a reference to a list which will be appended with DAX leaves
0493: */
0494: private void appendScalarDaxLeaf(Scalar s, java.util.List v) {
0495: for (Iterator i = s.iterateLeaf(); i.hasNext();) {
0496: org.griphyn.vdl.classes.Leaf leaf = (org.griphyn.vdl.classes.Leaf) i
0497: .next();
0498: if (leaf instanceof Text) {
0499: // do PseudoText
0500: v.add(new PseudoText(((Text) leaf).getContent()));
0501: } else if (leaf instanceof LFN) {
0502: // do Filename
0503: LFN lfn = (LFN) leaf;
0504: v.add(new Filename(lfn.getFilename(), lfn.getLink(),
0505: lfn.getTemporary()));
0506: } else {
0507: // this should not happen
0508: Logging
0509: .instance()
0510: .log(
0511: "default",
0512: 0,
0513: "WARNING: "
0514: + "Illegal argument type in scalar or list");
0515: }
0516: }
0517: }
0518:
0519: /**
0520: * DAX Bridge: This methods translates a <code>Value</code> tree from
0521: * a VDL spec into a flattened <code>Leaf</code> tree for a DAX job
0522: * specification.
0523: *
0524: * @param v is a VDL <code>Value</code> tree. This argument must not be null!
0525: * @param prefix string to be used when rendering a <code>List</code>.
0526: * @param separator string to be used when rendering a <code>List</code>.
0527: * @param suffix string to be used when rendering a <code>List</code>.
0528: * @return a list of <code>Leaf</code> values
0529: *
0530: * @see org.griphyn.vdl.classes.Value
0531: * @see org.griphyn.vdl.dax.Leaf
0532: */
0533: private java.util.List flattenScalar(
0534: org.griphyn.vdl.classes.Value v, String prefix,
0535: String separator, String suffix) {
0536: java.util.List result = new ArrayList();
0537:
0538: // traverse Value tree
0539: switch (v.getContainerType()) {
0540: // this is a regular SCALAR
0541: case org.griphyn.vdl.classes.Value.SCALAR:
0542: appendScalarDaxLeaf((Scalar) v, result);
0543: break;
0544:
0545: // this is a regular LIST
0546: case org.griphyn.vdl.classes.Value.LIST:
0547: org.griphyn.vdl.classes.List list = (org.griphyn.vdl.classes.List) v;
0548: if (prefix != null && prefix.length() > 0)
0549: result.add(new PseudoText(prefix));
0550: for (Iterator i = list.iterateScalar(); i.hasNext();) {
0551: appendScalarDaxLeaf((Scalar) i.next(), result);
0552: if (separator != null && separator.length() > 0
0553: && i.hasNext())
0554: result.add(new PseudoText(separator));
0555: }
0556: if (suffix != null && suffix.length() > 0)
0557: result.add(new PseudoText(suffix));
0558: break;
0559:
0560: default:
0561: // this should not happen
0562: Logging
0563: .instance()
0564: .log("default", 0,
0565: "WARNING: An actual argument is neither SCALAR nor LIST");
0566: break;
0567: }
0568:
0569: return result;
0570: }
0571:
0572: /**
0573: * DAX bridge: Converts a leaf into a mixed form of either the textual
0574: * string, or the value of the bound variable, as passed from a DV
0575: * into a TR. The bound variable values will be flattened out.
0576: *
0577: * @param old is the VDLx leaf class to convert
0578: * @param arguments is a map of all declared variables
0579: * @return a list of DAX leaves, flattened out into one level.
0580: * @throws UndeclaredVariableException if an unknown bound variable is used.
0581: * @throws IllegalArgumentException if the Leaf class is invalid.
0582: */
0583: private java.util.List convertLeaf(
0584: org.griphyn.vdl.classes.Leaf old, Map arguments)
0585: throws UndeclaredVariableException,
0586: IllegalArgumentException {
0587: java.util.List result = new ArrayList();
0588: if (old instanceof org.griphyn.vdl.classes.Text) {
0589: // copy Text into PseudoText (simple)
0590: org.griphyn.vdl.classes.Text t = (org.griphyn.vdl.classes.Text) old;
0591: result.add(new org.griphyn.vdl.dax.PseudoText(t
0592: .getContent()));
0593: } else if (old instanceof org.griphyn.vdl.classes.Use) {
0594: // replace Use element with flattened out argument
0595: org.griphyn.vdl.classes.Use u = (org.griphyn.vdl.classes.Use) old;
0596: if (!arguments.containsKey(u.getName())) {
0597: // FIXME: Another late binding error
0598: throw new UndeclaredVariableException(
0599: "bound variable \"" + u.getName()
0600: + "\" is not known");
0601: }
0602: // java.util.List flat =
0603: // flattenScalar( (org.griphyn.vdl.classes.Value) arguments.get(u.getName()),
0604: // u.getPrefix(), u.getSeparator(), u.getSuffix() );
0605: // for ( Iterator e=flat.iterator(); e.hasNext(); ) {
0606: // result.add( (org.griphyn.vdl.dax.Leaf) e.next() );
0607: // }
0608: result.addAll(flattenScalar(
0609: (org.griphyn.vdl.classes.Value) arguments.get(u
0610: .getName()), u.getPrefix(), u
0611: .getSeparator(), u.getSuffix()));
0612: } else if (old instanceof org.griphyn.vdl.classes.LFN) {
0613: // I suppose we could allow LFNs here after all
0614: throw new IllegalArgumentException(
0615: "LFN is an illegal leaf node");
0616: } else {
0617: throw new IllegalArgumentException(
0618: "unknown class for a leaf node");
0619: }
0620:
0621: return result;
0622: }
0623:
0624: /**
0625: * Finds first TR that matches a DV or Call. If no match is found, an
0626: * exception will be throws. If more than one TR is found, the first
0627: * match will be taken - this may not always be the best match.<p>
0628: *
0629: * @param usesspace is the namespace to look for, or null for any.
0630: * @param uses names the TR. It must not be null.
0631: * @param minInclude is the minimum inclusive version of the range, or null.
0632: * @param maxInclude is the maximum inclusive version of the range, or null.
0633: * @return the matching <code>Transformation</code>.
0634: * @exception TransformationNotFoundException will be thrown, if no
0635: * match is found
0636: * @exception NullPointerException will be thrown, if uses is null.
0637: */
0638: private Transformation findMatchingTransformation(String usesspace,
0639: String uses, String minInclude, String maxInclude)
0640: throws TransformationNotFoundException,
0641: NullPointerException {
0642: // just in case...
0643: if (uses == null)
0644: throw new NullPointerException("Must name a TR");
0645:
0646: // what are we looking for
0647: String trid = (usesspace == null ? "*" : usesspace)
0648: + Separator.NAMESPACE + (uses == null ? "*" : uses)
0649: + Separator.NAME
0650: + (minInclude == null ? "*" : minInclude)
0651: + Separator.VERSION
0652: + (maxInclude == null ? "*" : maxInclude);
0653: Logging.instance().log("route", 0, "looking for TR " + trid);
0654:
0655: java.util.List result = this .m_stack.searchForTR(usesspace,
0656: uses, minInclude, maxInclude);
0657: // POSTCONDITION: result contains matching TR
0658:
0659: // select one TR from result set
0660: int size = result.size();
0661: Logging.instance().log(
0662: "route",
0663: 0,
0664: size + " matching TR" + (size == 1 ? "" : "s")
0665: + " found");
0666: if (size == 0) {
0667: // nothing found that matches
0668: throw new TransformationNotFoundException(
0669: "Aborting route: No matches found for TR " + trid);
0670: }
0671:
0672: return (Transformation) result.get(0); // may throw ClassCastException?
0673: }
0674:
0675: /**
0676: * DAX bridge: Matches <code>Declare</code> (must be complete) with
0677: * <code>Pass</code> (can skip default args). Use defaults from
0678: * <code>Use</code>. Flatten out results into a map which unfolds
0679: * into a vector of DAX leaves, and which are fit for a <code>Job</code>.
0680: *
0681: * @param dv is an implementation of <code>HasPass</code>. Implementing
0682: * classes include <code>Derivation</code> and <code>Call</code>.
0683: * @param tr is the <code>Transformation</code> which supplies the
0684: * formal arugments.
0685: * @return a map from each actual argument or un-overwritten
0686: * formal argument default from the argument name to its <code>Value</code>.
0687: *
0688: * @exception MissingArgumentException is thrown, if a formal argument
0689: * was declared without a default, but no actual argument was supplied
0690: * for it.
0691: */
0692: private Map mapActualToFormal(HasPass dv, Transformation tr) {
0693: Map arguments = new HashMap();
0694: for (Iterator e = tr.iterateDeclare(); e.hasNext();) {
0695: Declare farg = (Declare) e.next();
0696: String name = farg.getName();
0697: Pass aarg = (Pass) dv.getPass(name);
0698:
0699: // FIXME: late type checking performed here. We must have some
0700: // value to use in the job specification:
0701: // o the formal argument default value from TR, or
0702: // o the actual argument supplied value
0703: if (aarg == null && farg.getValue() == null)
0704: throw new MissingArgumentException("argument \"" + name
0705: + "\" has no value");
0706:
0707: // Save the Value structure for now. We can only flatten it,
0708: // once we have the rendering.
0709: if (aarg == null) {
0710: // use the default value from the formal argument
0711: arguments.put(name, farg.getValue());
0712: Logging.instance().log(
0713: "route",
0714: 2,
0715: "adding default value for " + name + '='
0716: + farg.getValue().toString());
0717: } else {
0718: // use the supplied actual argument.
0719: arguments.put(name, aarg.getValue());
0720: Logging.instance().log(
0721: "route",
0722: 2,
0723: "actual argument taken for " + name + '='
0724: + aarg.getValue().toString());
0725: }
0726: }
0727:
0728: // arguments contains all classes.Value for all arguments, or is empty
0729: return arguments;
0730: }
0731:
0732: /**
0733: * Adds all temporary variables declared <code>Local</code> to
0734: * the arguments map. Flatten out results into a map which unfolds
0735: * into a vector of DAX leaves, and which are fit for a <code>Job</code>.
0736: *
0737: * @param arguments is the map of variable bindings.
0738: * @param tr is the <code>Transformation</code> which supplies the
0739: * formal arugments.
0740: * @param state is the book-keeper to use for temporary filenames.
0741: *
0742: * @exception DuplicateIdentifier is thrown, if a bound name already
0743: * exists in the map. This indicates either a re-used name between
0744: * formal args and temporary variables, or the same temporary variable
0745: * was used twice.
0746: */
0747: private void updateArgsFromLocal(Map arguments, Transformation tr,
0748: BookKeeper state) {
0749: // nothing to do
0750: for (Iterator i = tr.iterateLocal(); i.hasNext();) {
0751: Local local = (Local) i.next();
0752: String name = local.getName();
0753:
0754: // FIXME? late type checking?
0755: if (arguments.containsKey(name))
0756: throw new DuplicateIdentifier("identifier \"" + name
0757: + "\" already exists");
0758:
0759: if (local.getValue() == null)
0760: // FIXME: insert Yong's request
0761: throw new NullPointerException(
0762: "you must define a value for " + name);
0763:
0764: // Fix LFNs
0765: Value value = local.getValue();
0766: java.util.List lfnlist = value.getAllLFN(LFN.INOUT);
0767: for (Iterator j = lfnlist.iterator(); j.hasNext();) {
0768: LFN lfn = (LFN) j.next();
0769: Logging.instance().log("route", 4,
0770: " oldfn = " + lfn.getFilename());
0771: lfn.setFilename(state.createTempName(
0772: lfn.getTemporary(), null));
0773: Logging.instance().log("route", 4,
0774: " newfn = " + lfn.getFilename());
0775: }
0776:
0777: // use the supplied parameter
0778: arguments.put(name, value);
0779: Logging.instance().log(
0780: "route",
0781: 2,
0782: "adding temporary variable " + name + '='
0783: + value.toString());
0784: }
0785: }
0786:
0787: /**
0788: * DAX bridge: Adds all filenames referenced by args per job to the
0789: * job "uses" clause note that this is a super-set of filenames, and
0790: * may exceed the union between profile and CLI filenames.
0791: *
0792: * @param job is the job to augment with filenames that are used within.
0793: * @param arguments are the argument from which LFNs are plucked,
0794: * and stuck into the "uses" section of the job.
0795: * @return a set of all logical filenames from the parametric lists
0796: */
0797: private Set augmentJobUses(Job job, Map arguments) {
0798: Scalar scalar = null;
0799: Set result = new HashSet();
0800:
0801: for (Iterator i = arguments.values().iterator(); i.hasNext();) {
0802: Value current = (Value) i.next();
0803: java.util.List list = current.getLFNList(-1);
0804: for (Iterator f = list.iterator(); f.hasNext();) {
0805: LFN lfn = (LFN) f.next();
0806: result.add(lfn);
0807: Filename file = new Filename(lfn);
0808: job.addUses(file);
0809: Logging.instance().log("route", 2,
0810: "adding uses for " + lfn.getFilename());
0811: }
0812: }
0813: return result;
0814: }
0815:
0816: /**
0817: * DAX bridge: Copies VDL profiles into DAX profiles while
0818: * substituting any <code>Use</code> elements.
0819: *
0820: * @param job is the job to augment with profiles.
0821: * @param arguments are the arguments from which any <code>Use</code>
0822: * elements is substituted from. They are just passed through here.
0823: * @param profiles are all known profiles, from outer to inner TR.
0824: */
0825: private void augmentJobProfile(Job job, Map arguments,
0826: java.util.List profiles) {
0827: for (Iterator i = profiles.iterator(); i.hasNext();) {
0828: org.griphyn.vdl.classes.Profile src = (org.griphyn.vdl.classes.Profile) i
0829: .next();
0830: org.griphyn.vdl.dax.Profile dst = new org.griphyn.vdl.dax.Profile(
0831: src.getNamespace(), src.getKey());
0832: for (Iterator e = src.iterateLeaf(); e.hasNext();) {
0833: java.util.List n = convertLeaf(
0834: (org.griphyn.vdl.classes.Leaf) e.next(),
0835: arguments);
0836: for (Iterator f = n.iterator(); f.hasNext();) {
0837: dst.addLeaf((org.griphyn.vdl.dax.Leaf) f.next());
0838: }
0839: }
0840: job.addProfile(dst);
0841: }
0842: }
0843:
0844: /**
0845: * DAX bridge: Converts VDL <code>Argument</code> items into DAX
0846: * arguments while substituting any <code>Use</code> elements againt
0847: * their value.
0848: *
0849: * @param job is the job to augment with profiles.
0850: * @param arguments are the arguments from which any <code>Use</code>
0851: * elements is substituted from. They are just passing through here.
0852: * @param tr is the Transformation that provides the profiles.
0853: */
0854: private void augmentJobArguments(Job job, Map arguments,
0855: Transformation tr) {
0856: // construct DAX argument line from VDL argument list.
0857: String separator = tr.getArgumentSeparator();
0858: for (Iterator e = tr.iterateArgument(); e.hasNext();) {
0859: org.griphyn.vdl.classes.Argument src = (org.griphyn.vdl.classes.Argument) e
0860: .next();
0861: String name = src.getName();
0862: for (Iterator i = src.iterateLeaf(); i.hasNext();) {
0863: org.griphyn.vdl.classes.Leaf leaf = (org.griphyn.vdl.classes.Leaf) i
0864: .next();
0865: java.util.List neu = convertLeaf(leaf, arguments);
0866: if (name != null
0867: && (name.equals("stdin")
0868: || name.equals("stdout") || name
0869: .equals("stderr"))) {
0870: String varname = null;
0871: if (leaf instanceof Use)
0872: varname = ((Use) leaf).getName();
0873: else
0874: varname = "(null)";
0875:
0876: // do stdio parsing. The resulting element must be exactly one
0877: // Filename element
0878: if (neu.size() != 1
0879: || !(neu.get(0) instanceof Filename))
0880: throw new IllegalArgumentException(
0881: "invalid spec for stdio: You must use exactly one LFN");
0882: else {
0883: Filename filename = (Filename) neu.get(0);
0884:
0885: filename.setVariable(varname);
0886: if (name.equals("stdin")) {
0887: job.setStdin(filename);
0888: } else if (name.equals("stdout")) {
0889: job.setStdout(filename);
0890: } else if (name.equals("stderr")) {
0891: job.setStderr(filename);
0892: }
0893: }
0894: } else {
0895: // regular arguments, add bits and pieces
0896: for (Iterator f = neu.iterator(); f.hasNext();) {
0897: job.addArgument((org.griphyn.vdl.dax.Leaf) f
0898: .next());
0899: }
0900: }
0901: }
0902:
0903: // add default argument separator here
0904: if (e.hasNext() && separator != null
0905: && separator.length() > 0) {
0906: job.addArgument(new org.griphyn.vdl.dax.PseudoText(
0907: separator));
0908: }
0909: }
0910: }
0911:
0912: /**
0913: * Helper function to implement the "type casting" of contained
0914: * LFNs into the necessary usage type. Not all LFNs can be casted.
0915: *
0916: * @param ul is the provided linkage in the <code>Use</code> element.
0917: * @param lfn is the logical filename to adjust. This argument may
0918: * be adjusted in place, if the cast condition matches
0919: *
0920: * @see #updateScalarFromPreset( String, Scalar, Map )
0921: * @see #updateFromPreset( String, Scalar, Map )
0922: */
0923: private void castLFN(int ul, LFN lfn) {
0924: if (lfn.getLink() == LFN.INOUT
0925: && (ul == LFN.INPUT || ul == LFN.OUTPUT))
0926: lfn.setLink(ul);
0927: }
0928:
0929: /**
0930: * Helper function to update the guts of a <code>Scalar</code>, each
0931: * occurance of a <code>Use</code> by its matching value list from
0932: * the preset map.
0933: *
0934: * @param key is a symbolic representation of the variable that we
0935: * are currently mapping
0936: * @param s is the Scalar to remap
0937: * @param preset is the map with values to replace with.
0938: * @return the new value from the replacement. This method enforces
0939: * the return of a Scalar.
0940: * @see #mapCallToDerivation( String, String, String, Call, Map )
0941: */
0942: private Scalar updateScalarFromPreset(String key, Scalar s,
0943: Map preset) {
0944: Logging.instance().log(
0945: "route",
0946: 5,
0947: "updateScalarFromPreset( " + key + '=' + s.toString()
0948: + " )");
0949: for (int i = 0; i < s.getLeafCount(); ++i) {
0950: org.griphyn.vdl.classes.Leaf leaf = s.getLeaf(i);
0951:
0952: // only work on Use elements, ignore other elements
0953: if (leaf instanceof Use) {
0954: Use use = (Use) leaf;
0955:
0956: // check existence of key
0957: if (!preset.containsKey(use.getName()))
0958: throw new RuntimeException("unable to resolve "
0959: + use.getName());
0960:
0961: Value v2 = (Value) preset.get(use.getName());
0962: if (v2.getContainerType() != Value.SCALAR)
0963: throw new RuntimeException("cannot map list "
0964: + use.getName() + " to scalar " + key);
0965:
0966: // FIXME: worry about linkage
0967:
0968: // remove <code>Use</code> element...
0969: s.removeLeaf(i);
0970:
0971: Scalar s2 = (Scalar) v2;
0972: int ul = use.getLink(); // use linkage
0973: for (int j = 0; j < s2.getLeafCount(); ++j) {
0974: org.griphyn.vdl.classes.Leaf l = s2.getLeaf(j);
0975: if (ul != -1 && l instanceof LFN) {
0976: // adjust linkage for i/o LFN, if use is IN or OUT
0977: LFN lfn = (LFN) ((LFN) l).clone();
0978: castLFN(ul, lfn);
0979: s.addLeaf(i + j, lfn);
0980: } else {
0981: s.addLeaf(i + j, l);
0982: }
0983: }
0984: i += s2.getLeafCount() - 1;
0985: }
0986: }
0987:
0988: return s;
0989: }
0990:
0991: /**
0992: * Helper function to update the guts of a <code>Scalar</code>.
0993: * This function just checks for the one very special case in
0994: * argument passing.
0995: * <ul>
0996: * <li>The Scalar value contains just one Leaf.
0997: * <li>The Leaf is of type Use.
0998: * <li>The Use maps to a List by the presets.
0999: * </ul>
1000: * If all these conditions are true, the List is returned.
1001: * All other cases are handled by {@link #updateScalarFromPreset}.
1002: *
1003: * @param key is a symbolic representation of the variable that we
1004: * are currently mapping
1005: * @param s is the Scalar to remap
1006: * @param preset is the map with values to replace with.
1007: * @return the new value from the replacement. Note that, due to
1008: * the Use element, the result can be a list to be passed.
1009: * @see #mapCallToDerivation( String, String, String, Call, Map )
1010: */
1011: private Value updateFromPreset(String key, Scalar s, Map preset) {
1012: Logging.instance().log("route", 5,
1013: "updateFromPreset( " + key + '=' + s.toString() + " )");
1014:
1015: // one very specific circumstance for passing a variable
1016: // from a use which contains a list
1017: if (s.getLeafCount() == 1 && s.getLeaf(0) instanceof Use) {
1018: Use use = (Use) s.getLeaf(0);
1019: String useName = use.getName();
1020:
1021: // check existence of key
1022: if (!preset.containsKey(useName))
1023: throw new RuntimeException("unable to resolve "
1024: + useName);
1025:
1026: Value v2 = (Value) preset.get(useName);
1027: int ul = use.getLink(); // use linkage
1028: if (v2.getContainerType() == Value.LIST && ul != -1) {
1029: // do the type casting here and now
1030: org.griphyn.vdl.classes.List list = (org.griphyn.vdl.classes.List) v2
1031: .clone(); // FIXME: memory?
1032:
1033: Logging.instance().log("trace", 4,
1034: "mapping list to " + key);
1035: for (Iterator i = list.iterateScalar(); i.hasNext();) {
1036: for (Iterator j = ((Scalar) i.next()).iterateLeaf(); j
1037: .hasNext();) {
1038: org.griphyn.vdl.classes.Leaf l = (org.griphyn.vdl.classes.Leaf) j
1039: .next();
1040: if (l instanceof LFN)
1041: castLFN(ul, (LFN) l);
1042: }
1043: }
1044: return list;
1045: }
1046: }
1047:
1048: // old code
1049: return updateScalarFromPreset(key, s, preset);
1050: }
1051:
1052: /**
1053: * Maps an anonymous <code>Call</code> to a named <code>Derivation</code>.
1054: * The <code>Call</code> may contain references to bound variables as
1055: * <code>Use</code> leaves. These must be translated into their respective
1056: * actual argument value with the help of the arguments hash.
1057: *
1058: * @param namespace is the namespace in which to produce the new Derivation.
1059: * The value may be null.
1060: * @param name is a name prefix to prepend the call id with.
1061: * @param version is the version, which may be null.
1062: * @param me is the <code>Call</code> to translate.
1063: * @param arguments is a map of actual arguments to substitute for
1064: * bound references.
1065: * @return a new <code>Derivation</code> to be used in the stead of the
1066: * <code>Call</code>.
1067: */
1068: private Derivation mapCallToDerivation(String namespace,
1069: String name, String version, Call me, Map arguments) {
1070: Derivation result = new Derivation(namespace, name + '.'
1071: + me.shortID(), version, me.getUsesspace(), me
1072: .getUses(), me.getMinIncludeVersion(), me
1073: .getMaxIncludeVersion());
1074:
1075: Logging.instance().log("trace", 2,
1076: "mapCallToDerivation: " + me.toString());
1077: Logging.instance().log("route", 2,
1078: "creating DV " + result.identify());
1079: for (Iterator f = me.iteratePass(); f.hasNext();) {
1080: Pass pass = (Pass) ((Pass) f.next()).clone();
1081: String key = pass.getBind();
1082: Logging.instance().log("route", 4,
1083: "pold " + key + '=' + pass.getValue().toString());
1084:
1085: Value value = null;
1086: switch (pass.getValue().getContainerType()) {
1087: case Value.SCALAR:
1088: value = updateFromPreset(key, (Scalar) pass.getValue(),
1089: arguments);
1090: break;
1091: case Value.LIST:
1092: org.griphyn.vdl.classes.List list = (org.griphyn.vdl.classes.List) pass
1093: .getValue(); // value;
1094: org.griphyn.vdl.classes.List newlist = new org.griphyn.vdl.classes.List();
1095: Logging.instance().log("trace", 4,
1096: "mapping list to list");
1097: for (int i = 0; i < list.getScalarCount(); ++i)
1098: newlist.addScalar(updateScalarFromPreset(key + "["
1099: + i + "]", list.getScalar(i), arguments));
1100: value = newlist;
1101: break;
1102: default:
1103: throw new RuntimeException("should not happen");
1104: }
1105:
1106: Logging.instance().log("route", 4,
1107: "pnew " + key + '=' + value.toString());
1108: result.addPass(new Pass(key, value));
1109: }
1110: return result;
1111: }
1112:
1113: /**
1114: * This private helper methods uses a single derivation or call node
1115: * and applies the immutable parts stored in the transformation.
1116: * The result is a job description which merges a call or DV actual
1117: * argument with transformation formal argument. The result will
1118: * be stored in the book-keeping DAX.<p>
1119: *
1120: * Compound TR will be sequenced into a number of jobs. Each later job
1121: * will depend on its previous job. The last job in the sequence is
1122: * the replacement job for the calling derivation. FIXME: what about
1123: * the parentship of the first job?<p>
1124: *
1125: * @param dv is the {@link Derivation} to generate the job DAX info for.
1126: * @param state is the book-keeper to extend with transformations.
1127: * @param real are the real derivations to be used as PARENT for the
1128: * current derivation. In case of a simple TR, the result is the
1129: * derivation itself.
1130: * @param level is the recursion depth to record, set by caller
1131: * @return <code>true</code> for a simple transformation,
1132: * <code>false</code> for a compound.
1133: */
1134: private boolean applyTransformation(Derivation dv,
1135: BookKeeper state, TreeSet real, int level) {
1136: // startup
1137: String id = dv.identify();
1138: Logging.instance().log("trace", 3,
1139: "applyTransformation(" + id + ", " + level + ')');
1140:
1141: // find a TR anywhere on the definitions stack, from top to bottom,
1142: // that matches the DV-specified constraints.
1143: Transformation tr = findMatchingTransformation(dv
1144: .getUsesspace(), dv.getUses(), dv
1145: .getMinIncludeVersion(), dv.getMaxIncludeVersion());
1146: Logging.instance().log("route", 2, "taken TR " + tr.identify());
1147:
1148: // map out actual arguments and formal argument defaults with
1149: // their respective bound variable name.
1150: Map arguments = mapActualToFormal(dv, tr);
1151: // POSTCONDITION: arguments contains all classes.Value for all arguments
1152:
1153: // FIXME: Put line below HERE to register IDs for _all_ TRs, simple and compound.
1154: // String nmtoken = state.mapJob( id );
1155: if (tr.isSimple()) {
1156: Logging.instance().log("route", 2, "simple TR");
1157:
1158: // generate DAX job
1159: String nmtoken = state.mapJob(id);
1160: Job job = new Job(tr.getNamespace(), tr.getName(), tr
1161: .getVersion(), nmtoken, dv.getNamespace(), dv
1162: .getName(), dv.getVersion());
1163: job.setLevel(level);
1164:
1165: // add LFNs from arguments to uses section of job.
1166: Collection c = augmentJobUses(job, arguments);
1167: state.addFilenames(c); // and add to DAG files section
1168:
1169: // Convert "use" elements while copying profiles.
1170: java.util.List profiles = state.getAllProfiles();
1171: profiles.addAll(tr.getProfileList());
1172: if (profiles.size() > 0)
1173: augmentJobProfile(job, arguments, profiles);
1174:
1175: // Convert "use" elements while constructing the argument line.
1176: augmentJobArguments(job, arguments, tr);
1177:
1178: // add compound chain
1179: job.setChain(state.getAllTransformations());
1180:
1181: // add job to DAX
1182: state.addJob(job);
1183:
1184: // indicate simple TR
1185: real.add(dv.identify());
1186: } else {
1187: Logging.instance().log("route", 2,
1188: "compound TR " + tr.identify());
1189:
1190: // new: add local variables from compound statement
1191: updateArgsFromLocal(arguments, tr, state);
1192:
1193: // create local derivation map, and populate it with compounds
1194: int position;
1195: Definitions defs = new Definitions();
1196: for (Iterator e = tr.iterateCall(); e.hasNext();) {
1197: Call call = (Call) e.next();
1198: Derivation newdv = mapCallToDerivation(dv
1199: .getNamespace(), dv.getName(), dv.getVersion(),
1200: call, arguments);
1201: Logging.instance().log(
1202: "route",
1203: 3,
1204: "adding DV " + newdv.identify() + " from CALL "
1205: + call.identify());
1206: defs.addDefinition(newdv);
1207:
1208: // Definitions tends to sanitize, and do weird things, so check again
1209: if ((position = defs.positionOfDefinition(newdv)) != -1)
1210: Logging.instance().log(
1211: "route",
1212: 3,
1213: "added DV "
1214: + defs.getDefinition(position)
1215: .identify());
1216: }
1217:
1218: // remember all profiles so far
1219: state.pushProfile(tr.getProfileList());
1220:
1221: // remember from where we got here
1222: //state.pushTransformation( dv.identify() );
1223: state.pushTransformation(tr.shortID());
1224:
1225: // now search through local derivations first by advancing
1226: // the stack frame to the new derivation set.
1227: // this.m_stack.push( defs );
1228: addDefinitions(defs);
1229:
1230: // search in local by going backwards from produced output files
1231: java.util.List output = dv.getLFNList(LFN.OUTPUT);
1232: Logging.instance().log("route", 2,
1233: "output list " + output.toString());
1234:
1235: for (Iterator i = output.iterator(); i.hasNext();) {
1236: String lfn = (String) i.next();
1237: Logging.instance().log("route", 2,
1238: "looking for producers of LFN " + lfn);
1239: // compound TR should not count in levels
1240: Set temp = requestLfn(lfn, state, level - 1, dv
1241: .getNamespace());
1242: Logging.instance().log(
1243: "route",
1244: 2,
1245: "LFN " + lfn + " produced by "
1246: + temp.toString());
1247: real.addAll(temp);
1248: }
1249:
1250: // restore previous state
1251: // this.m_stack.pop();
1252: removeDefinitions();
1253:
1254: // remove level the memorized top transformation from chain.
1255: state.popTransformation();
1256:
1257: // remove remembered profiles for this TR
1258: state.popProfile();
1259: }
1260:
1261: Logging.instance()
1262: .log(
1263: "trace",
1264: 2,
1265: "applyTransformation(" + id + ") := "
1266: + real.toString());
1267: return (tr.isSimple());
1268: }
1269:
1270: /**
1271: * This private helper traverses recursively the derivation dependencies,
1272: * started by querying for a given derivation. FIXME: It is assumed that
1273: * the derivation is part of the instance-central derivation list.
1274: *
1275: * @param dv is the derivation instance that we ask for
1276: * @param state is the book-keeping to protocol the DAG
1277: * @param level is the recursion depth, use 1 when calling from outside.
1278: * @return a set of job identifiers
1279: */
1280: private Set requestDerivation(Derivation dv, BookKeeper state,
1281: int level) {
1282: String id = dv.identify();
1283: Logging.instance().log("trace", 3,
1284: "requestDerivation(" + id + ", " + level + ')');
1285:
1286: TreeSet result = new TreeSet();
1287: if (level <= this .m_maxDepth) {
1288: // ok, let's work
1289: if (state.wasVisited(dv)) {
1290: // already known
1291:
1292: if (level <= 1) {
1293: // OK, if requested directly
1294: Logging.instance().log(
1295: "app",
1296: 1,
1297: "Skipping direct request for "
1298: + "already known DV " + id);
1299: result.addAll(state.getVisited(dv));
1300: } else {
1301: // FIXME: Potential problem, if requested indirectly!
1302: final String m = "requestDerivation("
1303: + id
1304: + ") reached an unreachable branch. "
1305: + "I am not quite prepared for this, so I will rather exit now than "
1306: + "create havoc. Please contact support, supply your data and actions.";
1307: throw new RuntimeException(m);
1308: }
1309: } else {
1310: // DO IT HERE, ...
1311: // disadvant: it is depth-first, but profiles are mucked up.
1312: // advantage: "result" is already set correctly for storing in addVisited
1313: // // new node, generate job
1314: // boolean isSimple = applyTransformation( dv, state, result );
1315:
1316: // store real inner-most simple-TR parentset for this DV
1317: state.addVisited(dv, result);
1318:
1319: TreeSet parents = new TreeSet();
1320: java.util.List input = dv.getLFNList(LFN.INPUT);
1321: for (Iterator i = input.iterator(); i.hasNext();) {
1322: String lfn = (String) i.next();
1323: parents.addAll(requestLfn(lfn, state, level, dv
1324: .getNamespace()));
1325: }
1326:
1327: // ...OR DO IT HERE
1328: // disadvant: "result" will be set after the fact (but it is a reference!)
1329: // advantage: breadth-first, correct profiles
1330: // new node, generate job
1331: boolean isSimple = applyTransformation(dv, state,
1332: result, level);
1333:
1334: // add parents to current node
1335: if (isSimple)
1336: state.addParent(dv, parents);
1337: else
1338: Logging.instance().log(
1339: "route",
1340: 4,
1341: "NOT adding " + id + " PARENTS "
1342: + parents.toString());
1343: }
1344: } else {
1345: Logging.instance().log(
1346: "route",
1347: 0,
1348: "maximum recursion " + m_maxDepth
1349: + " reached, leaving.");
1350: }
1351:
1352: Logging.instance()
1353: .log(
1354: "trace",
1355: 2,
1356: "requestDerivation(" + id + ") := "
1357: + result.toString());
1358: return result;
1359: }
1360:
1361: /**
1362: * This private helper method is used in recursive calls when requesting
1363: * a given logical filename.
1364: *
1365: * @param lfn is the logical filename that must be a product of some
1366: * derivation
1367: * @param state is the book-keeping structure to produce the final DAG.
1368: * @param level is the recursion depth, use 0 when calling from outside.
1369: * @param cwns is the current working namespace, may be null.
1370: * @return a set of job identifiers
1371: */
1372: private Set requestLfn(String lfn, BookKeeper state, int level,
1373: String cwns) {
1374: TreeSet result = new TreeSet();
1375: Logging.instance().log(
1376: "trace",
1377: 3,
1378: "requestLfn(" + lfn + ", " + level + ", "
1379: + (cwns == null ? "null" : cwns) + ')');
1380:
1381: // DON'T. One check in requestDerivation should be enough
1382: // if ( level < this.m_maxDepth )
1383:
1384: // find all derivations that have this file as output
1385: java.util.List match = this .m_stack.derivationsWithOutput(lfn);
1386:
1387: // if such a derivation exists...
1388: if (match.size() > 0) {
1389: Derivation dv = null;
1390: if (match.size() > 1) {
1391: // only create a fuss, if there is anything to chose from
1392: java.util.Map env = new java.util.TreeMap();
1393: env.put("cwns", cwns);
1394: env.put("lfn", lfn);
1395: env.put("level", new Integer(level));
1396:
1397: dv = m_arbiter.arbit(match, env);
1398:
1399: try {
1400: env.clear();
1401: } catch (UnsupportedOperationException e) {
1402: // ignore
1403: }
1404: }
1405: if (dv == null)
1406: dv = (Derivation) match.get(0);
1407:
1408: // if we haven't visited here before, recurse into node
1409: if (state.wasVisited(dv)) {
1410: // seen before, just return known parental relationship
1411: result.addAll(state.getVisited(dv));
1412: } else {
1413: // not seen before, recurse into graph
1414: result.addAll(requestDerivation(dv, state, level + 1));
1415: }
1416: } else {
1417: if (level == 0)
1418: Logging.instance().log("default", 0,
1419: "Unknown output LFN \"" + lfn + "\"");
1420: }
1421:
1422: Logging.instance().log("trace", 2,
1423: "requestLfn(" + lfn + ") := " + result.toString());
1424: return result;
1425: }
1426:
1427: /**
1428: * This helper method is the entry point when requesting a certain
1429: * derivation. As a result, a build-style DAG will be produced and
1430: * maintained in the book-keeping structure. FIXME: It is assumed that
1431: * the derivation is part of the DatabaseSchema that was used with the
1432: * c'tor.
1433: *
1434: * @param dv is the derivation asked for.
1435: * @return a new bookkeeper containing the DAG information.
1436: * @see #requestDerivation( Collection )
1437: * @see BookKeeper
1438: */
1439: public BookKeeper requestDerivation(Derivation dv) {
1440: if (m_stack.isEmpty())
1441: return null; // ???
1442: BookKeeper state = new BookKeeper();
1443: requestDerivation(dv, state, 1);
1444: return state;
1445: }
1446:
1447: /**
1448: * This helper method is the entry point when requesting a set of
1449: * derivations. As a result, a build-style DAG will be produced and
1450: * maintained in the book-keeping structure. Note that the resulting
1451: * graph may be disconnected. FIXME: It is assumed that the derivation
1452: * is part of the DatabaseSchema that was used with the c'tor.
1453: *
1454: * @param list is the set of derivations to asked for.
1455: * @return a new bookkeeper containing the DAG information.
1456: * @see BookKeeper
1457: */
1458: public BookKeeper requestDerivation(Collection list) {
1459: if (m_stack.isEmpty() || list == null)
1460: return null;
1461: BookKeeper state = new BookKeeper();
1462: for (Iterator i = list.iterator(); i.hasNext();) {
1463: Derivation dv = (Derivation) i.next();
1464: Logging.instance().log("route", 0,
1465: "requesting DV " + dv.identify());
1466: requestDerivation(dv, state, 1);
1467: }
1468: return state;
1469: }
1470:
1471: /**
1472: * Compares two strings, each of which may be null. If both are null,
1473: * they are considered equal by this function. This function relies on
1474: * the fact that equals() can deal with null arguments.
1475: *
1476: * @param a a string which may be null
1477: * @param b a string which may be null
1478: * @return true, if the strings equal, or if both are null.
1479: */
1480: private static boolean matchWithNull(String a, String b) {
1481: return (a == null ? b == null : a.equals(b));
1482: }
1483:
1484: /**
1485: * This helper retrieves a build-DAG for running the specified logical
1486: * derivation and all its required predecessors. It assumes a fully
1487: * qualified derivation specification.
1488: *
1489: * @param namespace is the namespace the derivation lives in. A value
1490: * of {@link Separator#DEFAULT} will be assumed for a value of null.
1491: * @param name is the name of the derivation. Although anonymous
1492: * derivations are allowed, this method requires a named derivation.
1493: * @param version is a version string within the minVersion and maxVersion
1494: * range defined by the derivation, which might be null.
1495: * @return a book-keeping structure, or null, if either no matching
1496: * derivation was found.
1497: */
1498: public BookKeeper requestDerivation(String namespace, String name,
1499: String version) {
1500: java.util.List result = m_stack.searchForDV(namespace, name,
1501: version);
1502:
1503: int size = result.size();
1504: if (size >= 1) {
1505: // request all matches
1506: BookKeeper state = new BookKeeper();
1507: for (Iterator i = result.iterator(); i.hasNext();) {
1508: Derivation dv = (Derivation) i.next();
1509: Logging.instance().log("route", 0,
1510: "requesting DV " + dv.identify());
1511: requestDerivation(dv, state, 1);
1512: }
1513:
1514: return state;
1515: }
1516:
1517: // if derivation could not be found.
1518: return null;
1519: }
1520:
1521: /**
1522: * This helper retrieves a DAX for running the specified logical
1523: * derivation match and all its required predecessors. It assumes a fully
1524: * qualified or partial-null derivation specification.
1525: *
1526: * @param namespace is the namespace the derivation lives in. Null is
1527: * the joker value.
1528: * @param name is the name of the derivation. Although anonymous
1529: * derivations are allowed, this method requires a named derivation.
1530: * @param version is a version string within the minVersion and maxVersion
1531: * range defined by the derivation, which might be null.
1532: * @param state is the book-keeping structure
1533: * @return true, if something was found, and false if not.
1534: */
1535: public boolean requestDerivation(String namespace, String name,
1536: String version, BookKeeper state) {
1537: // sanity check first
1538: if (state == null)
1539: return false;
1540:
1541: // what have we got here?
1542: java.util.List result = m_stack.searchForDV(namespace, name,
1543: version);
1544:
1545: int size = result.size();
1546: if (size >= 1) {
1547: // request all matches
1548: for (Iterator i = result.iterator(); i.hasNext();) {
1549: Derivation dv = (Derivation) i.next();
1550: Logging.instance().log("route", 0,
1551: "requesting DV " + dv.identify());
1552: requestDerivation(dv, state, 1);
1553: }
1554:
1555: return true;
1556: }
1557:
1558: // if derivation could not be found.
1559: return false;
1560: }
1561:
1562: /**
1563: * This helper retrieves a DAX for a number of symbolically specified
1564: * derivations. The resulting graph may be disconnected. It assumes a
1565: * fully qualified or partial-null derivation specification.
1566: *
1567: * @param fqdi is a collection of fully-qualified derivation identifiers.
1568: * @param state is the book-keeping structure
1569: * @return true, if something was found, and false if not.
1570: * @see org.griphyn.common.util.Separator#splitFQDI( String )
1571: */
1572: public boolean requestDerivation(Collection fqdi, BookKeeper state) {
1573: // sanity check first
1574: if (state == null)
1575: return false;
1576:
1577: // use a set, so we request each DV only once
1578: java.util.Set result = new HashSet();
1579: for (Iterator i = fqdi.iterator(); i.hasNext();) {
1580: String[] n = Separator.splitFQDI((String) i.next());
1581: result.addAll(m_stack.searchForDV(n[0], n[1], n[2]));
1582: }
1583:
1584: int size = result.size();
1585: if (size >= 1) {
1586: // request all matches
1587: for (Iterator i = result.iterator(); i.hasNext();) {
1588: Derivation dv = (Derivation) i.next();
1589: Logging.instance().log("route", 0,
1590: "requesting DV " + dv.identify());
1591: requestDerivation(dv, state, 1);
1592: }
1593:
1594: return true;
1595: }
1596:
1597: // if derivation could not be found.
1598: return false;
1599: }
1600:
1601: /**
1602: * This method requests a data product logical filename. As a result,
1603: * the complete build-style DAG for producing the requested filename
1604: * will be constructed.
1605: *
1606: * @param lfn is the filename that was requested
1607: * @return the book-keeping information to construct the DAG. Please
1608: * note that it might be empty, if no derivation is known to produce
1609: * the file. It will be null, if the definitionslist is empty.
1610: * @see #requestLfn( Collection, BookKeeper )
1611: * @see #requestLfn( String, BookKeeper, int, String )
1612: */
1613: public BookKeeper requestLfn(String lfn) {
1614: if (m_stack.isEmpty() || lfn == null)
1615: return null;
1616: BookKeeper state = new BookKeeper();
1617: requestLfn(lfn, state, 0, null);
1618: return state;
1619: }
1620:
1621: /**
1622: * This method requests multiple data product logical filenames. As a
1623: * result, the complete build-style DAG for producing the requested
1624: * filename will be constructed. Please note that the result may
1625: * constitute a disconnected graph.
1626: *
1627: * @param list is a collection of logical filename strings.
1628: * @param state is the book-keeping structure
1629: *
1630: * @see #requestLfn( String )
1631: * @see #requestLfn( String, BookKeeper, int, String )
1632: */
1633: public void requestLfn(Collection list, BookKeeper state) {
1634: if (m_stack.isEmpty() || list == null || state == null)
1635: return;
1636: for (Iterator i = list.iterator(); i.hasNext();) {
1637: String lfn = (String) i.next();
1638: Logging.instance().log("route", 0, "requesting LFN " + lfn);
1639: requestLfn(lfn, state, 0, null);
1640: }
1641: }
1642: }
|