001: /*
002: * This file or a portion of this file is licensed under the terms of
003: * the Globus Toolkit Public License, found in file GTPL, or at
004: * http://www.globus.org/toolkit/download/license.html. This notice must
005: * appear in redistributions of this file, with or without modification.
006: *
007: * Redistributions of this Software, with or without modification, must
008: * reproduce the GTPL in: (1) the Software, or (2) the Documentation or
009: * some other similar material which is provided with the Software (if
010: * any).
011: *
012: * Copyright 1999-2004 University of Chicago and The University of
013: * Southern California. All rights reserved.
014: */
015: package org.griphyn.vdl.diagnozer;
016:
017: import java.io.*;
018: import java.util.*;
019: import java.util.regex.*;
020: import org.griphyn.vdl.directive.*;
021: import org.griphyn.vdl.toolkit.FriendlyNudge;
022:
023: /**
024: * Inspects various files in a run directory to help diagnoze an error.
025: *
026: * @author Jin Soon Chang
027: * @version $Revision: 50 $
028: */
029: public class Diagnozer {
030: /**
031: * Contains the base directory in and underneath which all files reside.
032: */
033: private File m_basedir;
034:
035: /**
036: * Names the .dag file
037: */
038: private File m_dagfile;
039:
040: /**
041: * Maps a job ID to its submit file.
042: */
043: private Map m_job;
044:
045: /**
046: * Remembers job IDs from jobs that were done (successfully).
047: */
048: private Set m_done;
049:
050: /**
051: * Maps a job ID to a JobInfo record.
052: */
053: private Map m_jobRecord;
054:
055: /**
056: * Maps a parent job ID to all dependent children.
057: */
058: private Map m_parents;
059:
060: /**
061: * Maps a child job ID to all dependent parents.
062: */
063: private Map m_children;
064:
065: /**
066: * work directory
067: */
068: String m_workdir;
069:
070: /**
071: * any signal from user
072: */
073: String m_signal;
074:
075: public void parseDAG(File dag) throws IOException {
076: String line, parent, child;
077: StringTokenizer st = null;
078: LineNumberReader lnr = new LineNumberReader(new FileReader(dag));
079: while ((line = lnr.readLine()) != null) {
080: String lower = line.toLowerCase().trim();
081: if (lower.startsWith("job ")) {
082: // JOB ID000001 00/00/ID000001.sub [ DONE ]
083: st = new StringTokenizer(line.trim());
084: st.nextToken(); // JOB
085: String jobid = st.nextToken(); // jobid
086: String subfn = st.nextToken(); // submit file
087: m_job.put(jobid, subfn);
088: /*JobInfo job=new JobInfo();
089: job.m_id=jobid;
090: m_jobRecord.put(jobid,job);*/
091:
092: if (m_done != null && st.hasMoreTokens()
093: && st.nextToken().toLowerCase().equals("done"))
094: m_done.add(jobid);
095: /*
096: else{
097: JobInfos jobs=new JobInfos (jobid);
098: JobInfo job=new JobInfo();
099: job.setId(jobid);
100: job.setRetry("0");
101: jobs.addJobInfo(job,"0");
102: m_jobRecord.put(jobid,jobs);
103: }
104: */
105: } else if (lower.startsWith("parent ")) {
106: // PARENT ID000093 CHILD ID000094
107:
108: TreeSet parents = new TreeSet();
109: TreeSet children = new TreeSet();
110:
111: st = new StringTokenizer(line.trim());
112: st.nextToken(); // PARENT
113: do {
114: parent = st.nextToken();
115: if (parent.toLowerCase().equals("child"))
116: break;
117: parents.add(parent);
118: } while (st.hasMoreTokens());
119: while (st.hasMoreTokens()) {
120: children.add(st.nextToken());
121: }
122:
123: for (Iterator i = parents.iterator(); i.hasNext();) {
124: parent = (String) i.next();
125: if (!m_parents.containsKey(parent))
126: m_parents.put(parent, new TreeSet());
127: ((Set) m_parents.get(parent)).addAll(children);
128: }
129: for (Iterator i = children.iterator(); i.hasNext();) {
130: child = (String) i.next();
131: if (!m_children.containsKey(child))
132: m_children.put(child, new TreeSet());
133: ((Set) m_children.get(child)).addAll(parents);
134: }
135: }
136: }
137: lnr.close();
138: }
139:
140: /**
141: * c'tor.
142: */
143: public Diagnozer(String basedir) throws IOException {
144: m_basedir = new File(basedir);
145: if (!m_basedir.isDirectory())
146: throw new IOException(basedir + " is not a directory");
147: // post-condition: itsa dir
148:
149: File[] dagfiles = m_basedir.listFiles(new FindTheFile(".dag"));
150: // File[] dagfiles = m_basedir.listFiles(new FindTheRegex("\\.dag$"));
151: if (dagfiles.length != 1)
152: throw new RuntimeException("too many dag files in "
153: + basedir);
154: m_dagfile = dagfiles[0];
155: m_workdir = basedir;
156:
157: m_job = new TreeMap();
158: m_parents = new TreeMap();
159: m_children = new TreeMap();
160: m_jobRecord = new HashMap();
161: m_signal = null;
162: // find the rescue dag
163: File rescuedag = new File(m_dagfile.getPath() + ".rescue");
164:
165: if (rescuedag.exists()) {
166: // parse rescue dag instead
167: m_done = new TreeSet();
168: parseDAG(rescuedag);
169: } else {
170: // parse regular dag file
171: m_done = null;
172: parseDAG(m_dagfile);
173: }
174:
175: for (Iterator i = m_job.keySet().iterator(); i.hasNext();) {
176: String jobid = (String) i.next();
177: String subfn = (String) m_job.get(jobid);
178: if (allParentsDone(jobid) && !m_done.contains(jobid)) {
179: JobInfos jobs = new JobInfos(jobid);
180: JobInfo job = new JobInfo();
181: job.setId(jobid);
182: job.setRetry("0");
183: jobs.addJobInfo(job, "0");
184: m_jobRecord.put(jobid, jobs);
185: }
186: }
187:
188: }
189:
190: /**
191: * Dumps knowledge about the DAG for debugging purposes.
192: */
193: public void dump() {
194: for (Iterator i = m_job.keySet().iterator(); i.hasNext();) {
195: String jobid = (String) i.next();
196: String subfn = (String) m_job.get(jobid);
197: String done = m_done == null ? ""
198: : (m_done.contains(jobid) ? "is done" : "NOT done");
199: System.out.println(jobid + " -> " + subfn + ": " + done);
200: }
201:
202: for (Iterator i = m_parents.keySet().iterator(); i.hasNext();) {
203: String parent = (String) i.next();
204: System.out.println("PARENT " + parent + " CHILD "
205: + m_parents.get(parent).toString());
206: }
207: }
208:
209: public void parseDebug(String dbgfile) {
210: String line;
211: File dbg = new File(dbgfile);
212: LineNumberReader lnr = null;
213:
214: try {
215: lnr = new LineNumberReader(new FileReader(dbg));
216: while ((line = lnr.readLine()) != null) {
217: }
218: lnr.close();
219: } catch (IOException ioe) {
220: System.err.println("Warning: Unable to read " + dbgfile);
221: }
222: }
223:
224: public void getDebugInfo() {
225: for (Iterator i = m_job.keySet().iterator(); i.hasNext();) {
226: String jobid = (String) i.next();
227: String subfn = (String) m_job.get(jobid);
228:
229: if (!m_done.contains(jobid) && allParentsDone(jobid)) {
230: String dbgfile = subfn.replaceAll(".sub", "");
231: ParseDbg(m_dagfile.getParent() + "/" + dbgfile.trim()
232: + ".dbg", jobid);
233: }
234: }
235: }
236:
237: public void ParseDbg(String dbgfile, String jobid) {
238: try {
239: File dbg = null;
240: LineNumberReader lnr = null;
241:
242: try {
243: dbg = new File(dbgfile);
244: lnr = new LineNumberReader(new FileReader(dbg));
245: } catch (FileNotFoundException fne) {
246: System.err.println(dbgfile + " doesn't exists");
247: return;
248: }
249:
250: int retries = -1;
251:
252: String line;
253: //20041010T140006.218 [16939] PRE: chose site "term"
254: Pattern site = Pattern.compile(".*chose\\ssite\\s(.*)");
255:
256: //20040901T184209.513 [8299] PRE: starting /home/changjs/vds/contrib/Euryale/prescript.pl
257: Pattern retry = Pattern
258: .compile(".*pre:\\sstarting\\s.*prescript.*");
259:
260: //20041028T143022.783 [4579] PRE: server gsiftp://gainly.uchicago.edu problem: connect: Connection refused
261: Pattern badsite = Pattern
262: .compile(".*pre:\\s(.*)problem:\\s(.*)");
263: //Unable to stage-in "fmri.1129-5_anonymized.img": no replicas found at /home/changjs/vds-1.3.2/contrib/Euryale/prescript.pl line 338.
264: Pattern fe = Pattern.compile("\\s*([a-zA-Z].*)");
265:
266: //20041029T014559.358 [8844] PRE: [transfer|T2] # [0x00004002] 1 1/0 2.477s "fmri.3472-5_anonymized.warp" error: the server sent an error response: 530 530 No local mapping for Globus ID
267: //20040901T173703.111 [30512] PRE: stage-in exit code 42, trying to replan
268:
269: Pattern tr = Pattern
270: .compile(".*\\[transfer\\].*\\s(.*)\\serror:\\s(.*)|.*\\[t2\\].*\\s(.*)\\serror:\\s(.*)");
271:
272: //20041104T100447.069 [4466] PRE: [T2] error: globus_ftp_client: the server responded with an error
273: Pattern tr2 = Pattern
274: .compile(".*\\[t2\\]\\serror:\\s(.*)|.*\\[transfer\\]\\serror:\\s(.*)");
275:
276: String currentSite = null;
277:
278: while ((line = lnr.readLine()) != null) {
279: String lower = line.toLowerCase().trim();
280: Matcher siteM = site.matcher(lower);
281: Matcher badsiteM = badsite.matcher(lower);
282: Matcher feM = fe.matcher(lower);
283: Matcher trM = tr.matcher(lower);
284: Matcher tr2M = tr2.matcher(lower);
285: Matcher retryM = retry.matcher(lower);
286:
287: if (retryM.matches()) {
288: retries = retries + 1;
289: }
290:
291: if (trM.matches()) {
292: String filename = trM.group(1);
293: String error = trM.group(2);
294: String currentRetry = Integer.toString(retries);
295: String errorS = "Transfer Error: " + filename + " "
296: + error + " " + currentSite;
297: //((JobInfo) m_jobRecord.get(jobid)).m_fatalErrorMessages.add(errorS);
298: ((JobInfo) ((JobInfos) m_jobRecord.get(jobid))
299: .getJobInfo(currentRetry))
300: .addFatalErrorMessage(errorS);
301: }
302:
303: if (tr2M.matches()) {
304: String error = tr2M.group(1);
305: String currentRetry = Integer.toString(retries);
306: String errorS = "Transfer Error: " + error + " "
307: + currentSite;
308: ((JobInfo) ((JobInfos) m_jobRecord.get(jobid))
309: .getJobInfo(currentRetry))
310: .addFatalErrorMessage(errorS);
311: }
312:
313: if (badsiteM.matches()) {
314: String server = badsiteM.group(1);
315: String error = badsiteM.group(2);
316: String currentRetry = Integer.toString(retries);
317: ;
318: error = server + " " + error;
319: //System.out.println(error);
320:
321: ((JobInfo) ((JobInfos) m_jobRecord.get(jobid))
322: .getJobInfo(currentRetry))
323: .addFatalErrorMessage(error);
324:
325: } else if (feM.matches()) {
326: String currentRetry = Integer.toString(retries);
327: String feMes = feM.group(1);
328: if (m_jobRecord.get(jobid) == null) {
329: System.out.println("dsadsad");
330: }
331: if ((JobInfo) ((JobInfos) m_jobRecord.get(jobid))
332: .getJobInfo(currentRetry) == null) {
333: System.out.println("dsadsa" + currentRetry);
334: }
335: ((JobInfo) ((JobInfos) m_jobRecord.get(jobid))
336: .getJobInfo(currentRetry))
337: .addFatalErrorMessage(feMes);
338: } else if (siteM.matches()) {
339: String currentRetry = Integer.toString(retries);
340: ;
341: currentSite = siteM.group(1);
342: ((JobInfo) ((JobInfos) m_jobRecord.get(jobid))
343: .getJobInfo(currentRetry))
344: .setPool(currentSite);
345: } else if (lower
346: .matches(".*out\\sof\\ssite\\scandidates.*")) {
347: String currentRetry = Integer.toString(retries);
348: //20041028T143028.143 [4605] PRE: out of site candidates, giving up!
349: ((JobInfo) ((JobInfos) m_jobRecord.get(jobid))
350: .getJobInfo(currentRetry))
351: .setPool(currentSite);
352: }
353: }
354: } catch (Exception e) {
355: e.printStackTrace();
356: }
357: }
358:
359: public void parseDagmanOut() {
360: try {
361: String line;
362: File dagmanout = new File(m_dagfile + ".dagman.out");
363: //10/10 13:47:43 PRE Script of Job ID000003 failed with status 1
364:
365: Map retries = new HashMap();
366: for (Iterator i = m_jobRecord.keySet().iterator(); i
367: .hasNext();) {
368: retries.put(((String) i.next()), "0");
369: }
370:
371: Pattern exit = Pattern
372: .compile(".+\\s(.*)\\sscript\\sof\\sjob\\s(.*)failed\\swith\\sstatus\\s(.*)");
373:
374: //PRE Script of Job ID000001 completed successfully.
375: Pattern success = Pattern
376: .compile(".+\\s(.*)\\sscript\\sof\\sjob\\s(.*)\\scompleted\\ssuccessfully.*");
377: LineNumberReader lnr = new LineNumberReader(new FileReader(
378: dagmanout));
379:
380: //10/29 01:48:02 Retrying node ID000001 (retry #1 of 5)...
381: Pattern retry = Pattern
382: .compile(".+\\sretrying\\snode\\s(.*)\\s\\(retry\\s#(.*)\\sof.*");
383: //compile(".+\\sretrying.*");
384: //11/4 10:33:42 Received SIGUSR1
385: Pattern sig = Pattern.compile(".*\\sreceived\\s(.*).*");
386:
387: //10/29 01:46:17 Event: ULOG_GLOBUS_SUBMIT for Condor Job ID000001 (50119.0.0)
388: Pattern stage = Pattern
389: .compile(".*event:\\s(.*)\\sfor\\scondor\\sjob\\s(.*)\\s.*");
390:
391: while ((line = lnr.readLine()) != null) {
392: String lower = line.toLowerCase().trim();
393: Matcher preExitM = exit.matcher(lower);
394: Matcher successM = success.matcher(lower);
395: Matcher stageM = stage.matcher(lower);
396: Matcher sigM = sig.matcher(lower);
397: Matcher retryM = retry.matcher(lower);
398:
399: if (retryM.matches()) {
400: //System.out.println(lower);
401: String id = retryM.group(1);
402: String retryN = retryM.group(2);
403: if (m_jobRecord
404: .containsKey(id.toUpperCase().trim())) {
405: retries.put(id.trim().toUpperCase(), retryN
406: .trim());
407: JobInfo j = new JobInfo();
408: j.setId(id.toUpperCase().trim());
409: j.setRetry(retryN);
410:
411: ((JobInfos) m_jobRecord.get(id.toUpperCase()
412: .trim())).addJobInfo(j, retryN);
413: }
414: }
415:
416: if (sigM.matches()) {
417: String signal = sigM.group(1);
418: m_signal = signal.toUpperCase().trim();
419: }
420:
421: if (preExitM.matches()) {
422: String prepost = preExitM.group(1);
423: String ID = preExitM.group(2);
424: String exitCode = preExitM.group(3);
425: String currentRetry = (String) retries.get(ID
426: .toUpperCase().trim());
427: if (m_jobRecord
428: .containsKey(ID.toUpperCase().trim())) {
429: if (prepost.equals("pre")) {
430: ((JobInfo) ((JobInfos) m_jobRecord.get(ID
431: .toUpperCase().trim()))
432: .getJobInfo(currentRetry))
433: .setPrescriptErrorCode(exitCode);
434: ((JobInfo) ((JobInfos) m_jobRecord.get(ID
435: .toUpperCase().trim()))
436: .getJobInfo(currentRetry))
437: .setPostcriptErrorCode("N/A");
438: }
439: if (prepost.equals("post")) {
440: ((JobInfo) ((JobInfos) m_jobRecord.get(ID
441: .toUpperCase().trim()))
442: .getJobInfo(currentRetry))
443: .setPostcriptErrorCode(exitCode);
444: String subfn = (String) m_job.get(ID
445: .toUpperCase().trim());
446: File subDir = (new File(m_dagfile
447: .getParent()
448: + "/" + subfn)).getParentFile();
449: File outFile = new File(subDir.getPath()
450: + "/" + ID.toUpperCase().trim()
451: + ".out." + currentRetry);
452: File errFile = new File(subDir.getPath()
453: + "/" + ID.toUpperCase().trim()
454: + ".err." + currentRetry);
455: if (outFile == null) {
456: System.out.println(subDir.getPath()
457: + ".out." + currentRetry
458: + " doesn't exits");
459: }
460: if (errFile == null) {
461: System.out.println(subDir.getPath()
462: + ".err." + currentRetry
463: + " doestn't exites");
464: }
465: ParseOut(outFile, ID.toUpperCase().trim(),
466: currentRetry);
467: ParseError(errFile,
468: ID.toUpperCase().trim(),
469: currentRetry);
470:
471: }
472: }
473: }
474: if (successM.matches()) {
475: String prepost = successM.group(1);
476: String jid = successM.group(2);
477: String currentRetry = (String) retries.get(jid
478: .trim().toUpperCase());
479: prepost.trim();
480: if (m_jobRecord.containsKey(jid.toUpperCase()
481: .trim())) {
482: if (prepost.equals("pre")) {
483: ((JobInfo) ((JobInfos) m_jobRecord.get(jid
484: .toUpperCase().trim()))
485: .getJobInfo(currentRetry))
486: .setPrescriptErrorCode("0");
487: }
488: if (prepost.equals("post")) {
489: ((JobInfo) ((JobInfos) m_jobRecord.get(jid
490: .toUpperCase().trim()))
491: .getJobInfo(currentRetry))
492: .setPostcriptErrorCode("0");
493: }
494: }
495: }
496:
497: if (stageM.matches()) {
498: String jobID = stageM.group(2);
499: String stageS = stageM.group(1);
500: String currentRetry = (String) retries.get(jobID
501: .trim().toUpperCase());
502: if (m_jobRecord.containsKey(jobID.toUpperCase()
503: .trim())) {
504: ((JobInfo) ((JobInfos) m_jobRecord.get(jobID
505: .toUpperCase().trim()))
506: .getJobInfo(currentRetry))
507: .updateLastStage(stageS);
508: }
509: }
510: }
511: } catch (Exception e) {
512: e.printStackTrace();
513: }
514: }
515:
516: public void dumpJobRecords(PrintWriter pw) throws IOException {
517: if (m_signal != null)
518: pw.println("THIS JOB WAS TERMINATED BY SIGNAL " + m_signal);
519: pw.flush();
520: for (Iterator i = m_jobRecord.keySet().iterator(); i.hasNext();) {
521: String ID = (String) i.next();
522: ((JobInfos) m_jobRecord.get(ID)).dump(pw);
523: }
524: pw.flush();
525: }
526:
527: public void ParseError(File errFile, String jobid,
528: String currentRetry) {
529: try {
530: LineNumberReader lnr = null;
531: String line;
532: try {
533: lnr = new LineNumberReader(new FileReader(errFile));
534: } catch (FileNotFoundException fne) {
535: //System.err.println(errFile.getName()+" doesn't exists");
536: return;
537: }
538:
539: while ((line = lnr.readLine()) != null) {
540: JobInfo job = (JobInfo) m_jobRecord.get(jobid);
541: ((JobInfo) ((JobInfos) m_jobRecord.get(jobid))
542: .getJobInfo(currentRetry))
543: .addFatalErrorMessage(line);
544: }
545: } catch (Exception e) {
546: e.printStackTrace();
547: }
548: }
549:
550: public void ParseOut(File outFile, String jobid, String currentRetry) {
551: try {
552: LineNumberReader lnr = null;
553: String line;
554: try {
555: lnr = new LineNumberReader(new FileReader(outFile));
556: } catch (FileNotFoundException fne) {
557: System.err.println(outFile.getName() + "is missing");
558: return;
559: }
560:
561: //<data>/home/changjs/vdldemo/bin/align_warp dsadsa dsadsa dasdsa -m 12 -q
562: //do_align_warp.c: 157: problem with file dsadsa
563: //The specified file does not exist. (AIR_NO_FILE_READ_ERROR)
564: //</data>
565:
566: ParseKickstart pks = new ParseKickstart();
567:
568: String filename = outFile.getPath();
569: try {
570: ((JobInfo) ((JobInfos) m_jobRecord.get(jobid))
571: .getJobInfo(currentRetry)).setOutfileExit(pks
572: .parseFile(filename));
573: } catch (FriendlyNudge fn) {
574: fn.toString();
575: } catch (ClassCastException cce) {
576: System.out.println("class cast exception");
577: cce.printStackTrace();
578: }
579:
580: /*
581: Pattern data = Pattern.
582: compile("<data>(.*)");
583: //<data>/home/changjs/vdldemo/bin/align_warp dsadsa dsadsa dasdsa -m 12 -q
584: //do_align_warp.c: 157: problem with file dsadsa
585: //The specified file does not exist. (AIR_NO_FILE_READ_ERROR)
586: //</data>
587:
588: Pattern endData=Pattern.compile(".*</data>.*");
589:
590: while ( (line = lnr.readLine()) != null ) {
591: //String lower = line.toLowerCase().trim();
592: Matcher dataM = data.matcher(line.trim());
593: Matcher endM=endData.matcher(line.trim());
594: if ( dataM.matches() ) {
595: String dataS=dataM.group(1);
596: //System.out.println(dataS);
597: do{
598: //System.out.println(line);
599: ((JobInfo)((JobInfos) m_jobRecord.get(jobid)).getJobInfo(currentRetry))
600: .setOutfileExit(errorS);
601:
602: line = lnr.readLine();
603: line=line.trim();
604: }while ( !endM.matches()) ;
605: }
606: }*/
607: } catch (Exception e) {
608: e.printStackTrace();
609: }
610: }
611:
612: /*
613: public void getOutFileInfo()
614: {
615: for ( Iterator i=m_job.keySet().iterator(); i.hasNext(); ) {
616: String jobid = (String) i.next();
617: String subfn = (String) m_job.get(jobid);
618: if ( ! m_done.contains(jobid)&&
619: ((JobInfo)m_jobRecord.get(jobid)).getLastStage()!=null) {
620:
621: File subDir=(new File(m_dagfile.getParent()+"/"+subfn)).getParentFile();
622: File[] outFiles=subDir.listFiles(new FindTheRegex(jobid+"\\.out.*"));
623: //System.out.println("the sub "+ subDir.getPath());
624: if(outFiles==null)
625: System.out.println("dsadsa");
626: System.out.println("out files:"+outFiles.length);
627: for(int j=0;j<outFiles.length;++j){
628: System.out.println( outFiles[j].getName());
629: ParseOut(outFiles[j],jobid);
630: }
631: }
632: }
633: }
634: */
635: private boolean allParentsDone(String cid) {
636: TreeSet parents = new TreeSet();
637: parents = (TreeSet) m_children.get(cid);
638: if (parents == null)
639: return true;
640: for (Iterator j = parents.iterator(); j.hasNext();) {
641: if (!m_done.contains((String) j.next())) {
642: return false;
643: }
644: }
645:
646: return true;
647: }
648:
649: /*
650: public void getErrorFileInfo()
651: {
652: for ( Iterator i=m_job.keySet().iterator(); i.hasNext(); ) {
653: String jobid = (String) i.next();
654: String subfn = (String) m_job.get(jobid);
655: if ( !m_done.contains(jobid) &&
656: ((JobInfo)m_jobRecord.get(jobid)).getLastStage()!=null) {
657: //String dbgfile=subfn.replaceAll(".sub","");
658: File subDir=(new File(m_dagfile.getParent()+"/"+subfn)).getParentFile();
659: File[] errFiles=subDir.listFiles(new FindTheRegex(jobid+"\\.err.*"));
660: System.out.println("the sub "+ subDir.getPath());
661: if(errFiles==null)
662: System.out.println("dsadsa");
663: System.out.println("err files:"+errFiles.length);
664: for(int j=0;j<errFiles.length;++j){
665: System.out.println( errFiles[j].getName());
666: ParseError(errFiles[j],jobid);
667: }
668:
669: }
670: }
671: }
672:
673: private String getSignal(){
674: return m_signal;
675: }
676: */
677: public static void main(String args[]) {
678: Diagnozer me = null;
679: int result = 0;
680:
681: if (args.length != 1) {
682: System.err.println("Need the base directory");
683: System.exit(1);
684: }
685:
686: try {
687: me = new Diagnozer(args[0]);
688: me.parseDagmanOut();
689: me.getDebugInfo();
690: // me.getErrorFileInfo();
691: // me.getOutFileInfo();
692: me.dumpJobRecords(new PrintWriter(System.out));
693: //me.dump();
694: } catch (IOException ioe) {
695: System.err.println("ERROR: " + ioe.getMessage());
696: result = 1;
697: } catch (RuntimeException rte) {
698: System.err.println("RTE: " + rte.getMessage());
699: rte.printStackTrace(System.err);
700: result = 1;
701: } catch (Exception e) {
702: System.err.println("FATAL: " + e.getMessage());
703: e.printStackTrace(System.err);
704: result = 2;
705: }
706:
707: if (result != 0)
708: System.exit(result);
709: }
710:
711: }
|