001: package com.canoo.ant.table;
002:
003: import com.canoo.ant.filter.AllEqualsFilter;
004: import com.canoo.ant.filter.AllFilter;
005: import com.canoo.ant.filter.ITableFilter;
006: import org.apache.log4j.Logger;
007:
008: import java.io.File;
009: import java.io.IOException;
010: import java.util.*;
011:
012: public abstract class APropertyTable implements IPropertyTable {
013:
014: private static final Logger LOG = Logger
015: .getLogger(APropertyTable.class);
016: private static final int MAX_DEPTH = 10; // max recursion depth
017: private static final ThreadLocal DEPTH = new ThreadLocal();
018:
019: private File fContainer;
020: private String fTable;
021: private String fPrefix;
022: private ITableFilter fFilter;
023: private List fRawTable;
024: private List fMetaTable;
025: protected static final String EMPTY = "";
026: protected static final String KEY_JOIN = "JOIN";
027:
028: static {
029: setDepth(0);
030: }
031:
032: protected APropertyTable() {
033: fFilter = new AllFilter();
034: }
035:
036: private static void setDepth(int depth) {
037: DEPTH.set(new Integer(depth));
038: }
039:
040: private static int getDepth() {
041: return ((Integer) DEPTH.get()).intValue();
042: }
043:
044: /**
045: * @return columnName -> expander (Type IPropertyTable)
046: */
047: public Map getColumnInfo() {
048: List meta = getMetaTable();
049: Map result = new HashMap(meta.size()); // smaller is likely
050: // find all properties for this table
051: List tableSpecificColumnInfo = new AllEqualsFilter(
052: TableFactory.KEY_TABLE).filter(meta, getTable());
053: for (Iterator eachColumnInfo = tableSpecificColumnInfo
054: .iterator(); eachColumnInfo.hasNext();) {
055: Properties colInfo = (Properties) eachColumnInfo.next();
056:
057: try {
058: // tableClass defaults to the current class
059: IPropertyTable table = TableFactory.createTable(
060: colInfo, this .getClass().getName());
061: ITableFilter filter = TableFactory
062: .createFilter(colInfo);
063: final File container;
064: if (colInfo.getProperty(TableFactory.KEY_CONTAINER) != null) {
065: container = new File(
066: getContainer().getParentFile(),
067: colInfo
068: .getProperty(TableFactory.KEY_CONTAINER));
069: colInfo.remove(TableFactory.KEY_CONTAINER); // to be sure that it doesn't get used with wrong path
070: } else {
071: container = getContainer();
072: }
073:
074: String key = colInfo.getProperty(TableFactory.KEY_NAME); // no default possible
075: TableFactory.initOrDefault(table, filter, colInfo,
076: container, key);
077: result.put(key, table);
078: } catch (Exception e) {
079: LOG.error("cannot work with Property: "
080: + colInfo.toString(), e);
081: }
082: }
083: return result;
084: }
085:
086: public List getPropertiesList(final String filterValue,
087: final String prefix) {
088: // start with copy of initial table
089: // if current filter concerns extension keys, filter before extending
090: // filtering in advance also lowers memory consumption in the average
091: List result = getFilter().filter(getRawTable(), filterValue);
092: if (getDepth() > MAX_DEPTH) {
093: LOG
094: .error("processing grounded due to excessive recursion calls: "
095: + getDepth());
096: return result;
097: }
098: setDepth(getDepth() + 1);
099:
100: final Map colInfo = getColumnInfo();
101: // only go over entries in the colInfo.
102: // (property names without colInfo info are not expanded)
103: for (Iterator eachExpandable = colInfo.keySet().iterator(); eachExpandable
104: .hasNext();) {
105: String expansionName = (String) eachExpandable.next();
106: expandName(result, expansionName, colInfo);
107: }
108:
109: setDepth(getDepth() - 1);
110:
111: // filter a second time to allow filters to work on expansions
112: result = getFilter().filter(result, filterValue);
113: // prefix is processed after filtering
114: if (prefix != null && prefix.length() > 0) {
115: result = mapPrefix(result, prefix);
116: }
117:
118: return result;
119: }
120:
121: // like a ruby map!
122: private List mapPrefix(List result, final String prefix) {
123: List collect = new ArrayList(result.size());
124: for (Iterator eachProps = result.iterator(); eachProps
125: .hasNext();) {
126: Properties props = (Properties) eachProps.next();
127: Properties mapped = new Properties();
128: for (Iterator eachKey = props.keySet().iterator(); eachKey
129: .hasNext();) {
130: String key = (String) eachKey.next();
131: String value = props.getProperty(key);
132: mapped.setProperty(prefix + "." + key, value);
133: }
134: collect.add(mapped);
135: }
136: return collect;
137: }
138:
139: protected void expandName(List result, String expansionName,
140: Map colInfo) {
141: List expansions = new LinkedList(); // cannot add while iterating. store and add later
142: for (Iterator eachProperties = result.iterator(); eachProperties
143: .hasNext();) {
144: Properties props = (Properties) eachProperties.next();
145: List newExpansions = expandProps(props, expansionName,
146: colInfo);
147: // default behaviour: like OUTER join, we do not shrink if nothing found
148: if (newExpansions.size() > 0) {
149: eachProperties.remove();
150: expansions.addAll(newExpansions);
151: }
152: }
153: result.addAll(expansions);
154: }
155:
156: protected List expandProps(Properties props, String expansionName,
157: Map colInfo) {
158: String value = props.getProperty(expansionName);
159: List propExpansions = new LinkedList();
160: IPropertyTable expansionTable = (IPropertyTable) colInfo
161: .get(expansionName);
162: // recursive call
163: List expandWith = expansionTable.getPropertiesList(value,
164: expansionTable.getPrefix());
165: for (Iterator eachExpansion = expandWith.iterator(); eachExpansion
166: .hasNext();) {
167: Properties expandProps = (Properties) eachExpansion.next();
168: // merge expansion with current line
169: expandProps.putAll(props);
170: // store for later adding
171: propExpansions.add(expandProps);
172: }
173: return propExpansions;
174: }
175:
176: //-------------- field accessors ------------------
177:
178: public File getContainer() {
179: return fContainer;
180: }
181:
182: public void setContainer(File container) {
183: fContainer = container;
184: }
185:
186: public String getTable() {
187: return fTable;
188: }
189:
190: public void setTable(String table) {
191: fTable = table;
192: }
193:
194: public ITableFilter getFilter() {
195: return fFilter;
196: }
197:
198: public void setFilter(ITableFilter filter) {
199: fFilter = filter;
200: }
201:
202: public String getPrefix() {
203: return fPrefix;
204: }
205:
206: public void setPrefix(String prefix) {
207: fPrefix = prefix;
208: }
209:
210: //-------------- how to read specifics ------------------
211:
212: /** lazy getter, cached */
213: public List getRawTable() {
214: fRawTable = getCachedTable(getTable(), fRawTable);
215: return fRawTable;
216: }
217:
218: /** lazy getter, cached */
219: public List getMetaTable() {
220: if (hasJoinTable()) {
221: fMetaTable = getCachedTable(KEY_JOIN, fMetaTable);
222: } else {
223: fMetaTable = Collections.EMPTY_LIST;
224: }
225: return fMetaTable;
226: }
227:
228: /**
229: * Indicates if the table container has a JOIN table.
230: * @return default is <code>true</code>
231: */
232: protected boolean hasJoinTable() {
233: return true;
234: }
235:
236: protected List getCachedTable(final String table, List tableCache) {
237: if (tableCache != null) {
238: return tableCache;
239: }
240:
241: try {
242: tableCache = read(table);
243: } catch (final IOException e) {
244: LOG.error("Cannot read " + getContainer() + " " + table, e);
245: String message = "Cannot read container >" + getContainer()
246: + "<";
247: if (table != null)
248: message += " (table " + table + ")";
249: message += ": " + e.getMessage();
250: throw new RuntimeException(message, e);
251: }
252:
253: if (tableCache.isEmpty()) {
254: LOG.debug("no entry in " + getContainer() + "/" + table);
255: }
256: LOG.debug(tableCache.size() + " entries in " + getContainer()
257: + " " + table);
258: return tableCache;
259: }
260:
261: protected abstract List read(String table) throws IOException;
262: }
|