001: package org.antlr.tool;
002:
003: import org.antlr.Tool;
004: import org.antlr.misc.Utils;
005: import org.antlr.codegen.CodeGenerator;
006: import org.antlr.stringtemplate.StringTemplate;
007: import org.antlr.stringtemplate.StringTemplateGroup;
008: import org.antlr.stringtemplate.language.AngleBracketTemplateLexer;
009:
010: import java.util.List;
011: import java.util.ArrayList;
012: import java.io.*;
013:
014: /** Given a grammar file, show the dependencies on .tokens etc...
015: * Using ST, emit a simple "make compatible" list of dependencies.
016: * For example, combined grammar T.g (no token import) generates:
017: *
018: * TParser.java : T.g
019: * T.tokens : T.g
020: * T__g : T.g
021: *
022: * For tree grammar TP with import of T.tokens:
023: *
024: * TP.g : T.tokens
025: * TP.java : TP.g
026: *
027: * If "-lib libdir" is used on command-line with -depend, then include the
028: * path like
029: *
030: * TP.g : libdir/T.tokens
031: *
032: * Pay attention to -o as well:
033: *
034: * outputdir/TParser.java : T.g
035: *
036: * So this output shows what the grammar depends on *and* what it generates.
037: *
038: * Operate on one grammar file at a time. If given a list of .g on the
039: * command-line with -depend, just emit the dependencies. The grammars
040: * may depend on each other, but the order doesn't matter. Build tools,
041: * reading in this output, will know how to organize it.
042: *
043: * This is a wee bit slow probably because the code generator has to load
044: * all of its template files in order to figure out the file extension
045: * for the generated recognizer.
046: *
047: * This code was obvious until I removed redundant "./" on front of files
048: * and had to escape spaces in filenames :(
049: */
050: public class BuildDependencyGenerator {
051: protected String grammarFileName;
052: protected Tool tool;
053: protected Grammar grammar;
054: protected CodeGenerator generator;
055: protected StringTemplateGroup templates;
056:
057: public BuildDependencyGenerator(Tool tool, String grammarFileName)
058: throws IOException, antlr.TokenStreamException,
059: antlr.RecognitionException {
060: this .tool = tool;
061: this .grammarFileName = grammarFileName;
062: grammar = tool.getGrammar(grammarFileName);
063: String language = (String) grammar.getOption("language");
064: generator = new CodeGenerator(tool, grammar, language);
065: generator.loadTemplates(language);
066: }
067:
068: /** From T.g return a list of File objects that
069: * names files ANTLR will emit from T.g.
070: */
071: public List getGeneratedFileList() {
072: List files = new ArrayList();
073: File outputDir = tool.getOutputDirectory(grammarFileName);
074: if (outputDir.getName().equals(".")) {
075: outputDir = null;
076: } else if (outputDir.getName().indexOf(' ') >= 0) { // has spaces?
077: String escSpaces = Utils.replace(outputDir.toString(), " ",
078: "\\ ");
079: outputDir = new File(escSpaces);
080: }
081: // add generated recognizer; e.g., TParser.java
082: String recognizer = generator.getRecognizerFileName(
083: grammar.name, grammar.type);
084: files.add(new File(outputDir, recognizer));
085: // add output vocab file; e.g., T.tokens
086: files.add(new File(outputDir, generator.getVocabFileName()));
087: // are we generating a .h file?
088: StringTemplate headerExtST = null;
089: if (generator.getTemplates().isDefined("headerFile")) {
090: headerExtST = generator.getTemplates().getInstanceOf(
091: "headerFileExtension");
092: files.add(new File(outputDir, headerExtST.toString()));
093: }
094: if (grammar.type == Grammar.COMBINED) {
095: // add autogenerated lexer; e.g., TLexer.java TLexer.h TLexer.tokens
096: // don't add T__.g (just a temp file)
097: String lexer = generator.getRecognizerFileName(
098: grammar.name, Grammar.LEXER);
099: files.add(new File(outputDir, lexer));
100: // TLexer.h
101: String suffix = Grammar.grammarTypeToFileNameSuffix[Grammar.LEXER];
102: if (headerExtST != null) {
103: String header = grammar.name + suffix
104: + headerExtST.toString();
105: files.add(new File(outputDir, header));
106: }
107: // for combined, don't generate TLexer.tokens
108: }
109:
110: if (files.size() == 0) {
111: return null;
112: }
113: return files;
114: }
115:
116: /** Return a list of File objects that name files ANTLR will read
117: * to process T.g; for now, this can only be .tokens files and only
118: * if they use the tokenVocab option.
119: */
120: public List getDependenciesFileList() {
121: List files = new ArrayList();
122: String vocabName = (String) grammar.getOption("tokenVocab");
123: if (vocabName == null) {
124: return null;
125: }
126: File vocabFile = grammar.getImportedVocabFileName(vocabName);
127: File outputDir = vocabFile.getParentFile();
128: if (outputDir.getName().equals(".")) {
129: files.add(vocabFile.getName());
130: } else if (outputDir.getName().indexOf(' ') >= 0) { // has spaces?
131: String escSpaces = Utils.replace(outputDir.toString(), " ",
132: "\\ ");
133: outputDir = new File(escSpaces);
134: files.add(new File(outputDir, vocabFile.getName()));
135: } else {
136: files.add(vocabFile);
137: }
138:
139: if (files.size() == 0) {
140: return null;
141: }
142: return files;
143: }
144:
145: public StringTemplate getDependencies() {
146: loadDependencyTemplates();
147: StringTemplate dependenciesST = templates
148: .getInstanceOf("dependencies");
149: dependenciesST.setAttribute("in", getDependenciesFileList());
150: dependenciesST.setAttribute("out", getGeneratedFileList());
151: dependenciesST
152: .setAttribute("grammarFileName", grammar.fileName);
153: return dependenciesST;
154: }
155:
156: public void loadDependencyTemplates() {
157: if (templates != null) {
158: return;
159: }
160: String fileName = "org/antlr/tool/templates/depend.stg";
161: ClassLoader cl = Thread.currentThread().getContextClassLoader();
162: InputStream is = cl.getResourceAsStream(fileName);
163: if (is == null) {
164: cl = ErrorManager.class.getClassLoader();
165: is = cl.getResourceAsStream(fileName);
166: }
167: if (is == null) {
168: ErrorManager
169: .internalError("Can't load dependency templates: "
170: + fileName);
171: return;
172: }
173: BufferedReader br = null;
174: try {
175: br = new BufferedReader(new InputStreamReader(is));
176: templates = new StringTemplateGroup(br,
177: AngleBracketTemplateLexer.class);
178: br.close();
179: } catch (IOException ioe) {
180: ErrorManager.internalError(
181: "error reading dependency templates file "
182: + fileName, ioe);
183: } finally {
184: if (br != null) {
185: try {
186: br.close();
187: } catch (IOException ioe) {
188: ErrorManager.internalError(
189: "cannot close dependency templates file "
190: + fileName, ioe);
191: }
192: }
193: }
194: }
195: }
|