001: //$Header: /deegreerepository/deegree/src/org/deegree/io/datastore/Datastore.java,v 1.28 2007/01/16 13:58:34 mschneider Exp $
002: /*---------------- FILE HEADER ------------------------------------------
003:
004: This file is part of deegree.
005: Copyright (C) 2001-2008 by:
006: EXSE, Department of Geography, University of Bonn
007: http://www.giub.uni-bonn.de/deegree/
008: lat/lon GmbH
009: http://www.lat-lon.de
010:
011: This library is free software; you can redistribute it and/or
012: modify it under the terms of the GNU Lesser General Public
013: License as published by the Free Software Foundation; either
014: version 2.1 of the License, or (at your option) any later version.
015:
016: This library is distributed in the hope that it will be useful,
017: but WITHOUT ANY WARRANTY; without even the implied warranty of
018: MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
019: Lesser General Public License for more details.
020:
021: You should have received a copy of the GNU Lesser General Public
022: License along with this library; if not, write to the Free Software
023: Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
024:
025: Contact:
026:
027: Andreas Poth
028: lat/lon GmbH
029: Aennchenstraße 19
030: 53177 Bonn
031: Germany
032: E-Mail: poth@lat-lon.de
033:
034: Prof. Dr. Klaus Greve
035: Department of Geography
036: University of Bonn
037: Meckenheimer Allee 166
038: 53115 Bonn
039: Germany
040: E-Mail: greve@giub.uni-bonn.de
041:
042: ---------------------------------------------------------------------------*/
043: package org.deegree.io.datastore;
044:
045: import java.util.ArrayList;
046: import java.util.Collection;
047: import java.util.List;
048: import java.util.Set;
049:
050: import org.deegree.datatypes.QualifiedName;
051: import org.deegree.framework.log.ILogger;
052: import org.deegree.framework.log.LoggerFactory;
053: import org.deegree.framework.trigger.TriggerProvider;
054: import org.deegree.i18n.Messages;
055: import org.deegree.io.datastore.schema.MappedFeatureType;
056: import org.deegree.io.datastore.schema.MappedGMLSchema;
057: import org.deegree.io.datastore.schema.content.MappingGeometryField;
058: import org.deegree.model.crs.CoordinateSystem;
059: import org.deegree.model.crs.UnknownCRSException;
060: import org.deegree.model.feature.Feature;
061: import org.deegree.model.feature.FeatureCollection;
062: import org.deegree.model.filterencoding.Filter;
063: import org.deegree.ogcwebservices.wfs.WFService;
064: import org.deegree.ogcwebservices.wfs.operation.Lock;
065: import org.deegree.ogcwebservices.wfs.operation.LockFeature;
066: import org.deegree.ogcwebservices.wfs.operation.Query;
067:
068: /**
069: * A datastore implementation must extend this class.
070: * <p>
071: * Describes the access to a datastore that encapsulates the access to a database or file. The
072: * accessible objects are {@link Feature} instances. Primarily, datastores are used as persistence
073: * layer by the {@link WFService} class.
074: *
075: * @author <a href="mailto:poth@lat-lon.de">Andreas Poth </a>
076: * @author <a href="mailto:tfr@users.sourceforge.net">Torsten Friebe </a>
077: * @author <a href="mailto:schneider@lat-lon.de">Markus Schneider </a>
078: * @author last edited by: $Author: apoth $
079: *
080: * @version $Revision: 9342 $, $Date: 2007-12-27 04:32:57 -0800 (Thu, 27 Dec 2007) $
081: */
082: public abstract class Datastore {
083:
084: private static final TriggerProvider TP = TriggerProvider
085: .create(Datastore.class);
086:
087: protected static final ILogger LOG = LoggerFactory
088: .getLogger(Datastore.class);
089:
090: private Collection<MappedGMLSchema> schemas = new ArrayList<MappedGMLSchema>(
091: 10);
092:
093: private DatastoreConfiguration config;
094:
095: /**
096: * Returns the datastore specific annotation parser.
097: *
098: * @return the datastore specific annotation parser
099: */
100: public abstract AnnotationDocument getAnnotationParser();
101:
102: /**
103: * Configures the datastore with the supplied configuration.
104: *
105: * @param config
106: * configuration
107: * @throws DatastoreException
108: */
109: @SuppressWarnings("unused")
110: public void configure(DatastoreConfiguration config)
111: throws DatastoreException {
112: this .config = config;
113: }
114:
115: /**
116: * Returns the configuration parameters of the datastore.
117: *
118: * @return the configuration parameters of the datastore
119: */
120: public DatastoreConfiguration getConfiguration() {
121: return this .config;
122: }
123:
124: /**
125: * Adds the given GML application schema to the set of schemas that are handled by this
126: * datastore instance.
127: * <p>
128: * Note that this method may be called several times for every GML schema that uses this
129: * datastore instance.
130: *
131: * @param schema
132: * GML application schema to bind
133: * @throws DatastoreException
134: */
135: @SuppressWarnings("unused")
136: public void bindSchema(MappedGMLSchema schema)
137: throws DatastoreException {
138: this .schemas.add(schema);
139: }
140:
141: /**
142: * Returns the GML application schemas that are handled by this datastore.
143: *
144: * @return the GML application schemas that are handled by this datastore
145: */
146: public MappedGMLSchema[] getSchemas() {
147: return this .schemas.toArray(new MappedGMLSchema[this .schemas
148: .size()]);
149: }
150:
151: /**
152: * Returns the feature type with the given name.
153: *
154: * @param ftName
155: * name of the feature type
156: * @return the feature type with the given name, or null if the <code>Datastore</code> does
157: * not this feature type
158: */
159: public MappedFeatureType getFeatureType(QualifiedName ftName) {
160: MappedFeatureType ft = null;
161: MappedGMLSchema[] schemas = getSchemas();
162: for (int i = 0; i < schemas.length; i++) {
163: ft = schemas[i].getFeatureType(ftName);
164: if (ft != null) {
165: break;
166: }
167: }
168: return ft;
169: }
170:
171: /**
172: * Closes the datastore so it can free dependent resources.
173: *
174: * @throws DatastoreException
175: */
176: public abstract void close() throws DatastoreException;
177:
178: /**
179: * Performs a query against the datastore.
180: *
181: * @param query
182: * query to be performed
183: * @param rootFts
184: * the root feature types that are queried, more than one type means that the types
185: * are joined
186: * @return requested feature instances
187: * @throws DatastoreException
188: * @throws UnknownCRSException
189: */
190: public abstract FeatureCollection performQuery(final Query query,
191: final MappedFeatureType[] rootFts)
192: throws DatastoreException, UnknownCRSException;
193:
194: /**
195: * Performs a query against the datastore (in the given transaction context).
196: *
197: * @param query
198: * query to be performed
199: * @param rootFts
200: * the root feature types that are queried, more than one type means that the types
201: * are joined
202: * @param context
203: * context (used to specify the JDBCConnection, for example)
204: * @return requested feature instances
205: * @throws DatastoreException
206: * @throws UnknownCRSException
207: */
208: public abstract FeatureCollection performQuery(final Query query,
209: final MappedFeatureType[] rootFts,
210: final DatastoreTransaction context)
211: throws DatastoreException, UnknownCRSException;
212:
213: /**
214: * Determines the ids of all features to be locked by the given parts of a {@link LockFeature}
215: * request, this includes all descendant and super features of the targeted features as well.
216: *
217: * @param requestParts
218: * the parts of a <code>LockFeature</code> request that this <code>Datastore</code>
219: * is responsible for
220: * @return the ids of all features that have to be locked
221: * @throws DatastoreException
222: */
223: public Set<FeatureId> determineFidsToLock(
224: @SuppressWarnings("unused")
225: List<Lock> requestParts) throws DatastoreException {
226: throw new DatastoreException(Messages.getMessage(
227: "DATASTORE_METHOD_UNSUPPORTED", this .getClass()
228: .getName(),
229: "#determineFeaturesToLock( LockFeature )"));
230: }
231:
232: /**
233: * Acquires transactional access to the datastore instance. There's only one active transaction
234: * per datastore allowed.
235: *
236: * @return transaction object that allows to perform transactions operations on the datastore
237: * @throws DatastoreException
238: */
239: public DatastoreTransaction acquireTransaction()
240: throws DatastoreException {
241: throw new DatastoreException(Messages.getMessage(
242: "DATASTORE_METHOD_UNSUPPORTED", this .getClass()
243: .getName(), "#acquireTransaction()"));
244: }
245:
246: /**
247: * Returns the transaction to the datastore. This makes the transaction available to other
248: * clients again (via {@link #acquireTransaction()}). Underlying resources (such as
249: * JDBCConnections are freed).
250: * <p>
251: * The transaction should be terminated, i.e. {@link DatastoreTransaction#commit()} or
252: * {@link DatastoreTransaction#rollback()} must have been called before.
253: *
254: * @param ta
255: * the DatastoreTransaction to be returned
256: * @throws DatastoreException
257: */
258: public void releaseTransaction(@SuppressWarnings("unused")
259: DatastoreTransaction ta) throws DatastoreException {
260: throw new DatastoreException(Messages.getMessage(
261: "DATASTORE_METHOD_UNSUPPORTED", this .getClass()
262: .getName(), "#releaseTransaction()"));
263: }
264:
265: /**
266: * Transforms the incoming {@link Query} so that the {@link CoordinateSystem} of all spatial
267: * arguments (BBOX, etc.) in the {@link Filter} match the SRS of the targeted
268: * {@link MappingGeometryField}s.
269: * <p>
270: * NOTE: If this transformation can be performed by the backend (e.g. by Oracle Spatial), this
271: * method should be overwritten to return the original input {@link Query}.
272: *
273: * @param query
274: * query to be transformed
275: * @return query with spatial arguments transformed to target SRS
276: */
277: protected Query transformQuery(Query query) {
278: LOG.logDebug("Transforming query.");
279: Object[] result = TP.doPreTrigger(this , query);
280: query = (Query) result[0];
281: return query;
282: }
283:
284: /**
285: * Transforms the {@link FeatureCollection} so that the geometries of all contained geometry
286: * properties use the requested SRS.
287: *
288: * @param fc
289: * feature collection to be transformed
290: * @param targetSRS
291: * requested SRS
292: * @return transformed FeatureCollection
293: */
294: protected FeatureCollection transformResult(FeatureCollection fc,
295: String targetSRS) {
296: LOG.logDebug("Transforming result to SRS '" + targetSRS + "'.");
297: Object[] result = TP.doPostTrigger(this , fc, targetSRS);
298: fc = (FeatureCollection) result[0];
299: return fc;
300: }
301:
302: /**
303: * Returns whether the datastore is capable of performing a native coordinate transformation
304: * (using an SQL function call for example) into the given SRS.
305: * <p>
306: * <code>Datastore</code> implementations capable of performing native coordinate
307: * transformations must override this class.
308: *
309: * @param targetSRS
310: * target spatial reference system (usually "EPSG:XYZ")
311: * @return true, if the datastore can perform the coordinate transformation, false otherwise
312: */
313: protected boolean canTransformTo(@SuppressWarnings("unused")
314: String targetSRS) {
315: return false;
316: }
317: }
|