0001: /*
0002: * Geotools2 - OpenSource mapping toolkit
0003: * http://geotools.org
0004: * (C) 2002-2006, Geotools Project Managment Committee (PMC)
0005: *
0006: * This library is free software; you can redistribute it and/or
0007: * modify it under the terms of the GNU Lesser General Public
0008: * License as published by the Free Software Foundation;
0009: * version 2.1 of the License.
0010: *
0011: * This library is distributed in the hope that it will be useful,
0012: * but WITHOUT ANY WARRANTY; without even the implied warranty of
0013: * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
0014: * Lesser General Public License for more details.
0015: *
0016: */
0017: package org.geotools.arcsde.data;
0018:
0019: import java.io.IOException;
0020: import java.util.ArrayList;
0021: import java.util.Arrays;
0022: import java.util.HashMap;
0023: import java.util.Iterator;
0024: import java.util.LinkedList;
0025: import java.util.List;
0026: import java.util.Map;
0027: import java.util.logging.Level;
0028: import java.util.logging.Logger;
0029:
0030: import net.sf.jsqlparser.statement.select.PlainSelect;
0031:
0032: import org.geotools.arcsde.data.view.QueryInfoParser;
0033: import org.geotools.arcsde.data.view.SelectQualifier;
0034: import org.geotools.arcsde.pool.ArcSDEConnectionPool;
0035: import org.geotools.arcsde.pool.ArcSDEPooledConnection;
0036: import org.geotools.arcsde.pool.UnavailableArcSDEConnectionException;
0037: import org.geotools.data.AbstractDataStore;
0038: import org.geotools.data.AttributeReader;
0039: import org.geotools.data.DataSourceException;
0040: import org.geotools.data.DefaultFeatureReader;
0041: import org.geotools.data.DefaultQuery;
0042: import org.geotools.data.FeatureReader;
0043: import org.geotools.data.FeatureWriter;
0044: import org.geotools.data.Query;
0045: import org.geotools.data.Transaction;
0046: import org.geotools.feature.AttributeType;
0047: import org.geotools.feature.Feature;
0048: import org.geotools.feature.FeatureType;
0049: import org.geotools.feature.GeometryAttributeType;
0050: import org.geotools.feature.IllegalAttributeException;
0051: import org.geotools.feature.SchemaException;
0052: import org.opengis.filter.Filter;
0053: import org.opengis.referencing.crs.CoordinateReferenceSystem;
0054:
0055: import com.esri.sde.sdk.client.SeColumnDefinition;
0056: import com.esri.sde.sdk.client.SeCoordinateReference;
0057: import com.esri.sde.sdk.client.SeException;
0058: import com.esri.sde.sdk.client.SeExtent;
0059: import com.esri.sde.sdk.client.SeLayer;
0060: import com.esri.sde.sdk.client.SeQueryInfo;
0061: import com.esri.sde.sdk.client.SeRegistration;
0062: import com.esri.sde.sdk.client.SeTable;
0063: import com.vividsolutions.jts.geom.Envelope;
0064:
0065: /**
0066: * Implements a DataStore to work upon an ArcSDE spatial database gateway.
0067: * String[] getTypeNames() FeatureType getSchema(String typeName) FeatureReader
0068: * getFeatureReader( typeName ) FeatureWriter getFeatureWriter( typeName )
0069: * Filter getUnsupportedFilter(String typeName, Filter filter) FeatureReader
0070: * getFeatureReader(String typeName, Query query)
0071: *
0072: * <p>
0073: * All remaining functionality is implemented against these methods, including
0074: * Transaction and Locking Support. These implementations will not be optimal
0075: * but they will work.
0076: * </p>
0077: *
0078: * @author Gabriel Roldan, Axios Engineering
0079: * @source $URL:
0080: * http://svn.geotools.org/geotools/trunk/gt/modules/unsupported/arcsde/datastore/src/main/java/org/geotools/arcsde/data/ArcSDEDataStore.java $
0081: * @version $Id: ArcSDEDataStore.java 27863 2007-11-12 20:34:34Z desruisseaux $
0082: */
0083: public class ArcSDEDataStore extends AbstractDataStore {
0084: /** DOCUMENT ME! */
0085: private static final Logger LOGGER = org.geotools.util.logging.Logging
0086: .getLogger(ArcSDEDataStore.class.getName());
0087:
0088: private static final String DEFAULT_NAMESPACE = "http://www.geotools.org/sde";
0089:
0090: /** DOCUMENT ME! */
0091: private ArcSDEConnectionPool connectionPool;
0092:
0093: private String namespace;
0094:
0095: /** <code>Map<typeName/FeatureType></code> of feature type schemas */
0096: private Map schemasCache = new HashMap();
0097:
0098: /**
0099: * <code>Map<typeName/FeatureType></code> of inprocess views feature
0100: * type schemas registered through
0101: * {@link #registerView(String, PlainSelect)}
0102: */
0103: private Map viewSchemasCache = new HashMap();
0104:
0105: private Map viewSelectStatements = new HashMap();
0106:
0107: /**
0108: * <code>Map<typeName/SeQueryInfo></code> of inprocess views
0109: */
0110: private Map viewQueryInfos = new HashMap();
0111:
0112: /**
0113: * Creates a new ArcSDEDataStore object.
0114: *
0115: * @param connectionPool
0116: * DOCUMENT ME!
0117: */
0118: public ArcSDEDataStore(ArcSDEConnectionPool connectionPool) {
0119: this (connectionPool, DEFAULT_NAMESPACE);
0120: }
0121:
0122: /**
0123: *
0124: * @param connectionPool
0125: * datastore's connection pool. Not null.
0126: * @param nsUri
0127: * datastore's namespace. May be null.
0128: */
0129: public ArcSDEDataStore(ArcSDEConnectionPool connectionPool,
0130: String nsUri) {
0131: super (true);
0132: if (connectionPool == null) {
0133: throw new NullPointerException("connectionPool");
0134: }
0135: if (nsUri == null) {
0136: throw new NullPointerException("namespace");
0137: }
0138: this .connectionPool = connectionPool;
0139: this .namespace = nsUri;
0140: }
0141:
0142: /**
0143: * DOCUMENT ME!
0144: *
0145: * @return DOCUMENT ME!
0146: */
0147: public ArcSDEConnectionPool getConnectionPool() {
0148: return this .connectionPool;
0149: }
0150:
0151: public String getNamespace() {
0152: return this .namespace;
0153: }
0154:
0155: /**
0156: * DOCUMENT ME!
0157: *
0158: * @return the list of full qualified feature class names on the ArcSDE
0159: * database this DataStore works on. An ArcSDE full qualified class
0160: * name is composed of three dot separated strings:
0161: * "DATABASE.USER.CLASSNAME", wich is usefull enough to use it as
0162: * namespace
0163: *
0164: * @throws RuntimeException
0165: * if an exception occurs while retrieving the list of
0166: * registeres feature classes on the backend, or while obtaining
0167: * the full qualified name of one of them
0168: */
0169: public String[] getTypeNames() throws IOException {
0170: List layerNames = new ArrayList(connectionPool
0171: .getAvailableLayerNames());
0172: layerNames.addAll(viewSchemasCache.keySet());
0173: return (String[]) layerNames.toArray(new String[layerNames
0174: .size()]);
0175: }
0176:
0177: /**
0178: * Obtains the schema for the given featuretype name.
0179: *
0180: * <p>
0181: * Just for convenience, if the type name is not full qualified, it will be
0182: * prepended by the "<DATABASE_NAME>.<USER_NAME>." string.
0183: * Anyway, it is strongly recommended that you use <b>only </b> full
0184: * qualified type names. The rational for this is that the actual ArcSDE
0185: * name of a featuretype is full qualified, and more than a single type can
0186: * exist with the same non qualified name, if they pertein to different
0187: * database users. So, if a non qualified name is passed, the user name
0188: * which will be prepended to it is the user used to create the connections
0189: * (i.e., the one you specified with the "user" parameter to create the
0190: * datastore.
0191: * </p>
0192: *
0193: * @param typeName
0194: * DOCUMENT ME!
0195: *
0196: * @return DOCUMENT ME!
0197: *
0198: * @throws java.io.IOException
0199: * DOCUMENT ME!
0200: * @throws NullPointerException
0201: * DOCUMENT ME!
0202: * @throws DataSourceException
0203: * DOCUMENT ME!
0204: */
0205: public synchronized FeatureType getSchema(String typeName)
0206: throws java.io.IOException {
0207: if (typeName == null) {
0208: throw new NullPointerException("typeName is null");
0209: }
0210:
0211: FeatureType schema = (FeatureType) viewSchemasCache
0212: .get(typeName);
0213:
0214: if (schema == null) {
0215: // connection used to retrieve the user name if a non qualified type
0216: // name was passed in
0217: ArcSDEPooledConnection conn = null;
0218:
0219: // check if it is not qualified and prepend it with "instance.user."
0220: if (typeName.indexOf('.') == -1) {
0221: try {
0222: conn = getConnectionPool().getConnection();
0223: LOGGER
0224: .warning("A non qualified type name was given, qualifying it...");
0225: if (conn.getDatabaseName() != null
0226: && conn.getDatabaseName().length() != 0) {
0227: typeName = conn.getDatabaseName() + "."
0228: + conn.getUser() + "." + typeName;
0229: } else {
0230: typeName = conn.getUser() + "." + typeName;
0231: }
0232: LOGGER.info("full qualified name is " + typeName);
0233: } catch (DataSourceException e) {
0234: throw e;
0235: } catch (UnavailableArcSDEConnectionException e) {
0236: throw new DataSourceException(
0237: "A non qualified type name ("
0238: + typeName
0239: + ") was passed and a connection to retrieve the user name "
0240: + " is not available.", e);
0241: } catch (SeException e) {
0242: throw new DataSourceException(
0243: "error obtaining the user name from a connection",
0244: e);
0245: } finally {
0246: conn.close();
0247: }
0248: }
0249:
0250: schema = (FeatureType) schemasCache.get(typeName);
0251:
0252: if (schema == null) {
0253: schema = ArcSDEAdapter.fetchSchema(getConnectionPool(),
0254: typeName, this .namespace);
0255: schemasCache.put(typeName, schema);
0256: }
0257: }
0258:
0259: return schema;
0260: }
0261:
0262: /**
0263: * Pass-through to the createSchema method with a config keyword. This
0264: * method calls createSchema(schema, null);
0265: *
0266: */
0267: public void createSchema(FeatureType schema) throws IOException,
0268: IllegalArgumentException {
0269: createSchema(schema, null);
0270: }
0271:
0272: /**
0273: * Creates the given featuretype in the underlying ArcSDE database.
0274: *
0275: * <p>
0276: * The common use case to create an ArcSDE layer is to setup the SeTable
0277: * object with all the non-geometry attributes first, then create the
0278: * SeLayer and set the geometry column name and its properties. This
0279: * approach brings a nice problem, since we need to create the attributes in
0280: * exactly the same order as specified in the passed FeatureType, which
0281: * means that the geometry attribute needs not to be the last one.
0282: * </p>
0283: *
0284: * <p>
0285: * To avoid this, the following workaround is performed: instead of creating
0286: * the schema as described above, we will first create the SeTable with a
0287: * single, temporary column, since it is not possible to create a table
0288: * without columns. The, we will iterate over the AttributeTypes and add
0289: * them as they appear using
0290: * <code>SeTable.addColumn(SeColumnDefinition)</code>. But if we found
0291: * that the current AttributeType is geometric, instead of adding the column
0292: * we just create the SeLayer object. This way, the geometric attribute is
0293: * inserted at the end, and then we keep iterating and adding the rest of
0294: * the columns. Finally, the first column is removed, since it was temporary
0295: * (note that I advertise it, it is a _workaround_).
0296: * </p>
0297: *
0298: * <p>
0299: * Sometimes some 'extra' information is required to correctly create the
0300: * underlying ArcSDE SeLayer. For instance, a specific configuration keyword
0301: * might be required to be used (instead of "DEFAULTS"), or a particular
0302: * column might need to be marked as the rowid column for the featuretype.
0303: *
0304: * A non-null <code>hints</code> parameter contains a mapping from a list
0305: * of well-known {@link java.lang.String} keys to values. The possible keys
0306: * are listed in the table below. keys with any other values are ignored.
0307: *
0308: * <table>
0309: * <tr>
0310: * <td>key name</td>
0311: * <td>key value type</td>
0312: * <td>default value (if applicable)</td>
0313: * </tr>
0314: *
0315: * <tr>
0316: * <td>configuration.keyword</td>
0317: * <td>{@link java.lang.String}</td>
0318: * <td>"DEFAULTS"</td>
0319: * </tr>
0320: *
0321: * <tr>
0322: * <td>rowid.column.type</td>
0323: * <td>{@link java.lang.String} - "NONE", "USER" and "SDE" are the only
0324: * valid values</td>
0325: * <td>"NONE"</td>
0326: * </tr>
0327: *
0328: * <tr>
0329: * <td>rowid.column.name</td>
0330: * <td>{@link java.lang.String}</td>
0331: * <td>null</td>
0332: * </tr>
0333: *
0334: * </p>
0335: *
0336: * @param featureType
0337: * the feature type containing the name, attributes and
0338: * coordinate reference system of the new ArcSDE layer.
0339: *
0340: * @param hints
0341: * A map containing extra ArcSDE-specific hints about how to
0342: * create the underlying ArcSDE SeLayer and SeTable objects from
0343: * this FeatureType.
0344: *
0345: * @throws IOException
0346: * see <code>throws DataSourceException</code> bellow
0347: * @throws IllegalArgumentException
0348: * if the passed feature type does not contains at least one
0349: * geometric attribute, or if the type name contains '.' (dots).
0350: * @throws NullPointerException
0351: * if <code>featureType</code> is <code>null</code>
0352: * @throws DataSourceException
0353: * if there is <b>not an available (free) connection </b> to the
0354: * ArcSDE instance(in that case maybe you need to increase the
0355: * maximun number of connections for the connection pool), or an
0356: * SeException exception is catched while creating the feature
0357: * type at the ArcSDE instance (e.g. a table with that name
0358: * already exists).
0359: */
0360: public void createSchema(FeatureType featureType, Map hints)
0361: throws IOException, IllegalArgumentException {
0362: if (featureType == null) {
0363: throw new NullPointerException(
0364: "You have to provide a FeatureType instance");
0365: }
0366: /*
0367: * if(!(featureType instanceof FeatureType)){ throw new
0368: * IllegalArgumentException("ArcSDE datastore supports only
0369: * SimpleFeatureType"); }
0370: */
0371:
0372: if (featureType.getDefaultGeometry() == null) {
0373: throw new IllegalArgumentException(
0374: "FeatureType must have at least a geometry attribute");
0375: }
0376:
0377: final String nonQualifiedTypeName = featureType.getTypeName();
0378:
0379: if (nonQualifiedTypeName.indexOf('.') != -1) {
0380: throw new IllegalArgumentException(
0381: "Please do not use type names that contains '.' (dots)");
0382: }
0383:
0384: // Create a new SeTable/SeLayer with the specified attributes....
0385: ArcSDEPooledConnection connection = null;
0386: SeTable table = null;
0387: SeLayer layer = null;
0388:
0389: // flag to know if the table was created by us when catching an
0390: // exception.
0391: boolean tableCreated = false;
0392:
0393: // table/layer creation hints information
0394: int rowIdType = SeRegistration.SE_REGISTRATION_ROW_ID_COLUMN_TYPE_NONE;
0395: String rowIdColumn = null;
0396: String configKeyword = "DEFAULTS";
0397: if (hints != null) {
0398: if (hints.get("configuration.keyword") instanceof String) {
0399: configKeyword = (String) hints
0400: .get("configuration.keyword");
0401: }
0402: if (hints.get("rowid.column.type") instanceof String) {
0403: String rowIdStr = (String) hints
0404: .get("rowid.column.type");
0405: if (rowIdStr.equalsIgnoreCase("NONE")) {
0406: rowIdType = SeRegistration.SE_REGISTRATION_ROW_ID_COLUMN_TYPE_NONE;
0407: } else if (rowIdStr.equalsIgnoreCase("USER")) {
0408: rowIdType = SeRegistration.SE_REGISTRATION_ROW_ID_COLUMN_TYPE_USER;
0409: } else if (rowIdStr.equalsIgnoreCase("SDE")) {
0410: rowIdType = SeRegistration.SE_REGISTRATION_ROW_ID_COLUMN_TYPE_SDE;
0411: } else {
0412: throw new DataSourceException(
0413: "createSchema hint 'rowid.column.type' must be one of 'NONE', 'USER' or 'SDE'");
0414: }
0415: }
0416: if (hints.get("rowid.column.name") instanceof String) {
0417: rowIdColumn = (String) hints.get("rowid.column.name");
0418: }
0419: }
0420:
0421: // placeholder to a catched exception to know in the finally block
0422: // if we should cleanup the crap we left in the database
0423: Exception error = null;
0424:
0425: try {
0426: connection = connectionPool.getConnection();
0427:
0428: // create a table with provided username
0429: String qualifiedName = null;
0430:
0431: if (nonQualifiedTypeName.indexOf('.') == -1) {
0432: qualifiedName = connection.getUser() + "."
0433: + featureType.getTypeName();
0434: LOGGER.finer("new full qualified type name: "
0435: + qualifiedName);
0436: } else {
0437: qualifiedName = nonQualifiedTypeName;
0438: LOGGER
0439: .finer("full qualified type name provided by user: "
0440: + qualifiedName);
0441: }
0442:
0443: layer = new SeLayer(connection);
0444: layer.setTableName(qualifiedName);
0445: layer.setCreationKeyword(configKeyword);
0446:
0447: final String HACK_COL_NAME = "gt_workaround_col_";
0448:
0449: table = createSeTable(connection, qualifiedName,
0450: HACK_COL_NAME, configKeyword);
0451: tableCreated = true;
0452:
0453: List atts = Arrays.asList(featureType.getAttributeTypes());
0454: AttributeType currAtt;
0455:
0456: for (Iterator it = atts.iterator(); it.hasNext();) {
0457: currAtt = (AttributeType) it.next();
0458:
0459: if (currAtt instanceof GeometryAttributeType) {
0460: GeometryAttributeType geometryAtt = (GeometryAttributeType) currAtt;
0461: createSeLayer(layer, qualifiedName, geometryAtt);
0462: } else {
0463: LOGGER.fine("Creating column definition for "
0464: + currAtt);
0465:
0466: SeColumnDefinition newCol = ArcSDEAdapter
0467: .createSeColumnDefinition(currAtt);
0468:
0469: // /////////////////////////////////////////////////////////////
0470: // HACK!!!!: this hack is just to avoid the error that
0471: // occurs //
0472: // when adding a column wich is not nillable. Need to fix
0473: // this//
0474: // but by now it conflicts with the requirement of creating
0475: // //
0476: // the schema with the correct attribute order. //
0477: // /////////////////////////////////////////////////////////////
0478: newCol = new SeColumnDefinition(newCol.getName(),
0479: newCol.getType(), newCol.getSize(), newCol
0480: .getScale(), true);
0481:
0482: // /////////////////////////////////////////////////////////////
0483: // END of horrible HACK //
0484: // /////////////////////////////////////////////////////////////
0485: LOGGER.fine("Adding column " + newCol.getName()
0486: + " to the actual table.");
0487: table.addColumn(newCol);
0488: }
0489: }
0490:
0491: LOGGER.fine("deleting the 'workaround' column...");
0492: table.dropColumn(HACK_COL_NAME);
0493:
0494: LOGGER.fine("setting up table registration with ArcSDE...");
0495: SeRegistration reg = new SeRegistration(connection, table
0496: .getName());
0497: if (rowIdColumn != null) {
0498: LOGGER.fine("setting rowIdColumnName to " + rowIdColumn
0499: + " in table " + reg.getTableName());
0500: reg.setRowIdColumnName(rowIdColumn);
0501: reg.setRowIdColumnType(rowIdType);
0502: reg.alter();
0503: reg = null;
0504: }
0505:
0506: LOGGER.fine("Schema correctly created: " + featureType);
0507:
0508: } catch (SeException e) {
0509: LOGGER.log(Level.WARNING, e.getSeError().getErrDesc(), e);
0510: throw new DataSourceException(e.getMessage(), e);
0511: } catch (DataSourceException dse) {
0512: LOGGER.log(Level.WARNING, dse.getMessage(), dse);
0513: throw dse;
0514: } finally {
0515: if ((error != null) && tableCreated) {
0516: // TODO: remove table if created and then failed
0517: }
0518: connection.close();
0519: }
0520: }
0521:
0522: /**
0523: * DOCUMENT ME!
0524: *
0525: * @param connection
0526: * @param qualifiedName
0527: * @param hackColName
0528: * DOCUMENT ME!
0529: *
0530: *
0531: * @throws SeException
0532: */
0533: private SeTable createSeTable(ArcSDEPooledConnection connection,
0534: String qualifiedName, String hackColName,
0535: String configKeyword) throws SeException {
0536: SeTable table;
0537: final SeColumnDefinition[] tmpCol = { new SeColumnDefinition(
0538: hackColName, SeColumnDefinition.TYPE_STRING, 4, 0, true) };
0539: table = new SeTable(connection, qualifiedName);
0540:
0541: try {
0542: LOGGER
0543: .warning("Remove the line 'table.delete()' for production use!!!");
0544: table.delete();
0545: } catch (SeException e) {
0546: // intentionally do nothing
0547: }
0548:
0549: LOGGER.info("creating table " + qualifiedName);
0550:
0551: // create the table using DBMS default configuration keyword.
0552: // valid keywords are defined in the dbtune table.
0553: table.create(tmpCol, configKeyword);
0554: LOGGER.info("table " + qualifiedName + " created...");
0555:
0556: return table;
0557: }
0558:
0559: /**
0560: * DOCUMENT ME!
0561: *
0562: * @param layer
0563: * @param qualifiedName
0564: * @param geometryAtt
0565: *
0566: * @throws SeException
0567: */
0568: private void createSeLayer(SeLayer layer, String qualifiedName,
0569: GeometryAttributeType geometryAtt) throws SeException {
0570: String spatialColName = geometryAtt.getName();
0571: LOGGER.info("setting spatial column name: " + spatialColName);
0572: layer.setSpatialColumnName(spatialColName);
0573:
0574: // Set the shape types that can be inserted into this layer
0575: int seShapeTypes = ArcSDEAdapter.guessShapeTypes(geometryAtt);
0576: layer.setShapeTypes(seShapeTypes);
0577: layer.setGridSizes(1100, 0, 0);
0578: layer.setDescription("Created with GeoTools");
0579:
0580: // Define the layer's Coordinate Reference
0581: CoordinateReferenceSystem crs = geometryAtt
0582: .getCoordinateSystem();
0583: SeCoordinateReference coordref = getGenericCoordRef();
0584: String WKT = null;
0585:
0586: if (crs == null) {
0587: LOGGER
0588: .warning("Creating feature type "
0589: + qualifiedName
0590: + ": the geometry attribute does not supply a coordinate reference system");
0591: } else {
0592: LOGGER
0593: .info("Creating the SeCoordRef object for CRS "
0594: + crs);
0595: WKT = crs.toWKT();
0596: coordref.setCoordSysByDescription(WKT);
0597: }
0598:
0599: SeExtent validCoordRange = null;
0600:
0601: if ((WKT != null) && (WKT.indexOf("GEOGCS") != -1)) {
0602: validCoordRange = new SeExtent(-180, -90, 180, 90);
0603: } else {
0604: validCoordRange = coordref.getXYEnvelope();
0605: }
0606:
0607: layer.setExtent(validCoordRange);
0608:
0609: LOGGER
0610: .info("Applying CRS "
0611: + coordref.getCoordSysDescription());
0612: layer.setCoordRef(coordref);
0613: LOGGER.info("CRS applyed to the new layer.");
0614:
0615: // /////////////////////////
0616: // this param is used by ArcSDE for database initialization purposes
0617: int estInitFeatCount = 4;
0618:
0619: // this param is used by ArcSDE as an estimation of the average number
0620: // of points the layer's geometries will have, one never will know what
0621: // for
0622: int estAvgPointsPerFeature = 4;
0623: LOGGER.info("Creating the layer...");
0624: layer.create(estInitFeatCount, estAvgPointsPerFeature);
0625: LOGGER.info("ArcSDE layer created.");
0626: }
0627:
0628: /**
0629: * Creates and returns a <code>SeCoordinateReference</code> CRS, though
0630: * based on an UNKNOWN CRS, is inclusive enough (in terms of valid
0631: * coordinate range and presicion) to deal with most coordintates.
0632: *
0633: * <p>
0634: * Actually tested to deal with coordinates with 0.0002 units of separation
0635: * as well as with large coordinates such as UTM (values greater than
0636: * 500,000.00)
0637: * </p>
0638: *
0639: * <p>
0640: * This method is driven by the equally named method in TestData.java
0641: * </p>
0642: *
0643: * @return DOCUMENT ME!
0644: *
0645: * @throws SeException
0646: * DOCUMENT ME!
0647: */
0648: private static SeCoordinateReference getGenericCoordRef()
0649: throws SeException {
0650: // create a sde CRS with a huge value range and 5 digits of presission
0651: SeCoordinateReference seCRS = new SeCoordinateReference();
0652: int shift = 600000;
0653: SeExtent validRange = new SeExtent(-shift, -shift, shift, shift);
0654: seCRS.setXYByEnvelope(validRange);
0655: LOGGER.info("CRS: " + seCRS.getXYEnvelope());
0656:
0657: return seCRS;
0658: }
0659:
0660: /**
0661: * DOCUMENT ME!
0662: *
0663: * @param typeName
0664: * DOCUMENT ME!
0665: *
0666: * @return DOCUMENT ME!
0667: *
0668: * @throws java.io.IOException
0669: * DOCUMENT ME!
0670: */
0671: protected FeatureReader getFeatureReader(String typeName)
0672: throws java.io.IOException {
0673: return getFeatureReader(typeName, Query.ALL);
0674: }
0675:
0676: /**
0677: * GR: this method is called from inside getFeatureReader(Query ,Transaction )
0678: * to allow subclasses return an optimized FeatureReader wich supports the
0679: * filter and attributes truncation specified in <code>query</code>
0680: *
0681: * <p>
0682: * A subclass that supports the creation of such an optimized FeatureReader
0683: * shold override this method. Otherwise, it just returns
0684: * <code>getFeatureReader(typeName)</code>
0685: * </p>
0686: *
0687: * <p>
0688: * </p>
0689: *
0690: * @param typeName
0691: * DOCUMENT ME!
0692: * @param query
0693: * DOCUMENT ME!
0694: *
0695: * @return DOCUMENT ME!
0696: *
0697: * @throws IOException
0698: * DOCUMENT ME!
0699: * @throws DataSourceException
0700: * DOCUMENT ME!
0701: */
0702: protected FeatureReader getFeatureReader(String typeName,
0703: Query query) throws IOException {
0704: ArcSDEQuery sdeQuery = null;
0705: FeatureReader reader = null;
0706:
0707: try {
0708: FeatureType schema = getSchema(typeName);
0709: sdeQuery = ArcSDEQuery.createQuery(this , schema, query);
0710:
0711: sdeQuery.execute();
0712:
0713: AttributeReader attReader = new ArcSDEAttributeReader(
0714: sdeQuery);
0715: final FeatureType resultingSchema = sdeQuery.getSchema();
0716: reader = new DefaultFeatureReader(attReader,
0717: resultingSchema) {
0718: protected Feature readFeature(AttributeReader atts)
0719: throws IllegalAttributeException, IOException {
0720: ArcSDEAttributeReader sdeAtts = (ArcSDEAttributeReader) atts;
0721: Object[] currAtts = sdeAtts.readAll();
0722: System.arraycopy(currAtts, 0, this .attributes, 0,
0723: currAtts.length);
0724:
0725: /*
0726: * for (int i = 0, ii = atts.getAttributeCount(); i < ii;
0727: * i++) { attributes[i] = atts.read(i); }
0728: */
0729: return resultingSchema.create(this .attributes,
0730: sdeAtts.readFID());
0731: }
0732: };
0733: } catch (SchemaException ex) {
0734: if (sdeQuery != null) {
0735: sdeQuery.close();
0736: }
0737: LOGGER.log(Level.SEVERE, ex.getMessage(), ex);
0738: throw new DataSourceException("Types do not match: "
0739: + ex.getMessage(), ex);
0740: } catch (IOException e) {
0741: if (sdeQuery != null) {
0742: sdeQuery.close();
0743: }
0744: throw e;
0745: } catch (Exception t) {
0746: if (sdeQuery != null) {
0747: sdeQuery.close();
0748: }
0749: LOGGER.log(Level.SEVERE, t.getMessage(), t);
0750: throw new DataSourceException(
0751: "Problem with feature reader: " + t.getMessage(), t);
0752: }
0753:
0754: return reader;
0755: }
0756:
0757: /**
0758: *
0759: */
0760: /*public FeatureReader getFeatureReader(Query query, Transaction transaction) throws IOException {
0761: String typeName = query.getTypeName();
0762:
0763: return getFeatureReader(typeName, query);
0764: }*/
0765:
0766: /**
0767: * GR: if a subclass supports filtering, it should override this method to
0768: * return the unsupported part of the passed filter, so a
0769: * FilteringFeatureReader will be constructed upon it. Otherwise it will
0770: * just return the same filter.
0771: *
0772: * <p>
0773: * If the complete filter is supported, the subclass must return
0774: * <code>Filter.INCLUDE</code>
0775: * </p>
0776: *
0777: * @param typeName
0778: * DOCUMENT ME!
0779: * @param filter
0780: * DOCUMENT ME!
0781: *
0782: * @return DOCUMENT ME!
0783: */
0784: protected org.opengis.filter.Filter getUnsupportedFilter(
0785: String typeName, Filter filter) {
0786: try {
0787: SeLayer layer;
0788: SeQueryInfo qInfo;
0789:
0790: if (isView(typeName)) {
0791: qInfo = getViewQueryInfo(typeName);
0792: String mainLayerName;
0793: try {
0794: mainLayerName = qInfo.getConstruct().getTables()[0];
0795: } catch (SeException e) {
0796: throw new RuntimeException(e.getMessage());
0797: }
0798: layer = connectionPool.getSdeLayer(mainLayerName);
0799: } else {
0800: layer = connectionPool.getSdeLayer(typeName);
0801: qInfo = null;
0802: }
0803:
0804: ArcSDEPooledConnection conn = null;
0805: FIDReader fidReader;
0806: try {
0807: conn = connectionPool.getConnection();
0808: fidReader = FIDReader.getFidReader(conn, layer);
0809: } finally {
0810: if (conn != null)
0811: conn.close();
0812: }
0813:
0814: FeatureType schema = getSchema(typeName);
0815: ArcSDEQuery.FilterSet filters = ArcSDEQuery.createFilters(
0816: layer, schema, filter, qInfo,
0817: getViewSelectStatement(typeName), fidReader);
0818:
0819: Filter result = filters.getUnsupportedFilter();
0820:
0821: if (LOGGER.isLoggable(Level.FINE)) {
0822: LOGGER.fine("Supported filters: "
0823: + filters.getSqlFilter() + " --- "
0824: + filters.getGeometryFilter());
0825: LOGGER.fine("Unsupported filter: " + result.toString());
0826: }
0827:
0828: return result;
0829: } catch (IOException ex) {
0830: LOGGER.log(Level.WARNING, ex.getMessage(), ex);
0831: }
0832:
0833: return filter;
0834: }
0835:
0836: /**
0837: * DOCUMENT ME!
0838: *
0839: * @param typeName
0840: *
0841: * @return FeatureWriter over contents of typeName
0842: *
0843: * @throws IOException
0844: * Subclass may throw IOException
0845: */
0846: protected FeatureWriter getFeatureWriter(String typeName)
0847: throws IOException {
0848: ArcSDEPooledConnection conn;
0849: SeLayer layer;
0850: FIDReader fidStrategy;
0851: try {
0852: conn = connectionPool.getConnection();
0853: } catch (UnavailableArcSDEConnectionException e) {
0854: throw new DataSourceException(e);
0855: }
0856: try {
0857: layer = connectionPool.getSdeLayer(conn, typeName);
0858: fidStrategy = FIDReader.getFidReader(conn, layer);
0859: } finally {
0860: conn.close();
0861: }
0862:
0863: return new ArcSDEFeatureWriter(this , fidStrategy, null, layer);
0864: }
0865:
0866: /**
0867: * Provides a writer that iterates over all of the features.
0868: *
0869: * @param typeName
0870: * @param transaction
0871: *
0872: * @return DOCUMENT ME!
0873: *
0874: * @throws IOException
0875: * DOCUMENT ME!
0876: */
0877: public FeatureWriter getFeatureWriter(String typeName,
0878: Transaction transaction) throws IOException {
0879: FeatureWriter featureWriter = super .getFeatureWriter(typeName,
0880: transaction);
0881:
0882: return featureWriter;
0883: }
0884:
0885: /**
0886: * DOCUMENT ME!
0887: *
0888: * @param typeName
0889: * @param filter
0890: * @param transaction
0891: *
0892: * @return DOCUMENT ME!
0893: *
0894: * @throws IOException
0895: * DOCUMENT ME!
0896: */
0897: public FeatureWriter getFeatureWriter(String typeName,
0898: Filter filter, Transaction transaction) throws IOException {
0899:
0900: FeatureType featureType = getSchema(typeName);
0901: AttributeType[] attributes = featureType.getAttributeTypes();
0902: String[] names = new String[attributes.length];
0903:
0904: // Extract the attribute names for the query, we want them all...
0905: for (int i = 0; i < names.length; i++) {
0906: names[i] = attributes[i].getName();
0907: }
0908:
0909: DefaultQuery query = new DefaultQuery(typeName, filter, 100,
0910: names, "handle");
0911: ArrayList list = new ArrayList();
0912:
0913: // We really don't need any transaction handling here, just keep it
0914: // simple as
0915: // we are going to exhaust this feature reader immediately. Really, this
0916: // could
0917: // consume a great deal of memory based on the query.
0918: // PENDING Jake Fear: Optimize this operation, exhausting the reader in
0919: // this
0920: // case could be a cause of real trouble later on. I need to think
0921: // through
0922: // the consequences of all of this. Really the feature writer should
0923: // delegate to a FeatureReader for the features that are queried. That
0924: // way
0925: // we can stream all of these goodies instead of having big fat
0926: // chunks...
0927: //
0928: // All that said, this works until I get everything else completed....
0929: FeatureReader featureReader = getFeatureReader(query,
0930: Transaction.AUTO_COMMIT);
0931:
0932: while (featureReader.hasNext()) {
0933: try {
0934: list.add(featureReader.next());
0935: } catch (Exception ex) {
0936: LOGGER.log(Level.WARNING, ex.getMessage(), ex);
0937: break;
0938: }
0939: }
0940: featureReader.close();
0941:
0942: // Well, this seems to come prepopulated with a state object,
0943: // but I can't seem to figure out why. As such we check for
0944: // and existing state, and check that states class as well. If
0945: // it is a state we already provided (or at least of a workable
0946: // type) then we will proceed with it. Otherwise, we must remove
0947: // the state and replace it with an appropriate transaction
0948: // state object that we understand. This should not present any
0949: // danger as the default state could not possibly have come from
0950: // us, and as such, no uncommitted changes could be lost.
0951: // Jake Fear 6/25/2004
0952: ArcTransactionState state = null;
0953:
0954: if (Transaction.AUTO_COMMIT != transaction) {
0955: synchronized (this ) {
0956: Transaction.State s = transaction.getState(this );
0957:
0958: if (!(s instanceof ArcTransactionState)) {
0959: if (s != null) {
0960: transaction.removeState(this );
0961: }
0962:
0963: state = new ArcTransactionState(this );
0964: transaction.putState(this , state);
0965: } else {
0966: state = (ArcTransactionState) s;
0967: }
0968: }
0969: }
0970:
0971: ArcSDEPooledConnection connection = connectionPool
0972: .getConnection();
0973: SeLayer layer;
0974: FIDReader fidStrategy;
0975: try {
0976: layer = connectionPool.getSdeLayer(connection, typeName);
0977: fidStrategy = FIDReader.getFidReader(connection, layer);
0978: } finally {
0979: connection.close();
0980: }
0981:
0982: FeatureWriter writer = new ArcSDEFeatureWriter(this ,
0983: fidStrategy, state, layer, list);
0984:
0985: return writer;
0986: }
0987:
0988: /**
0989: * Provides a <code>FeatureWriter</code> in an appropriate state for
0990: * immediately adding new <code>Feature</code> instances to the specified
0991: * layer.
0992: *
0993: * @param typeName
0994: * @param transaction
0995: *
0996: * @return FeatureWriter whose hasNext() call will return false.
0997: *
0998: * @throws IOException
0999: * DOCUMENT ME!
1000: */
1001: public FeatureWriter getFeatureWriterAppend(String typeName,
1002: Transaction transaction) throws IOException {
1003: ArcTransactionState state = null;
1004:
1005: if (Transaction.AUTO_COMMIT != transaction) {
1006: synchronized (this ) {
1007: state = (ArcTransactionState) transaction
1008: .getState(this );
1009:
1010: if (state == null) {
1011: state = new ArcTransactionState(this );
1012: transaction.putState(this , state);
1013: }
1014: }
1015: }
1016:
1017: SeLayer layer;
1018: FIDReader fidStrategy;
1019: ArcSDEPooledConnection conn = connectionPool.getConnection();
1020: try {
1021: layer = connectionPool.getSdeLayer(conn, typeName);
1022: fidStrategy = FIDReader.getFidReader(conn, layer);
1023: } finally {
1024: conn.close();
1025: }
1026: FeatureWriter writer = new ArcSDEFeatureWriter(this ,
1027: fidStrategy, state, layer);
1028:
1029: return writer;
1030: }
1031:
1032: /**
1033: * Gets the number of the features that would be returned by this query for
1034: * the specified feature type.
1035: *
1036: * <p>
1037: * If getBounds(Query) returns <code>-1</code> due to expense consider
1038: * using <code>getFeatures(Query).getCount()</code> as a an alternative.
1039: * </p>
1040: *
1041: * @param query
1042: * Contains the Filter and MaxFeatures to find the bounds for.
1043: *
1044: * @return The number of Features provided by the Query or <code>-1</code>
1045: * if count is too expensive to calculate or any errors or occur.
1046: *
1047: * @throws IOException
1048: * if there are errors getting the count
1049: */
1050: protected int getCount(Query query) throws IOException {
1051: LOGGER.fine("getCount");
1052:
1053: int count = ArcSDEQuery.calculateResultCount(this , query);
1054: LOGGER.fine("count: " + count);
1055:
1056: return count;
1057: }
1058:
1059: /**
1060: * Computes the bounds of the features for the specified feature type that
1061: * satisfy the query provided that there is a fast way to get that result.
1062: *
1063: * <p>
1064: * Will return null if there is not fast way to compute the bounds. Since
1065: * it's based on some kind of header/cached information, it's not guaranteed
1066: * to be real bound of the features
1067: * </p>
1068: *
1069: * @param query
1070: *
1071: * @return the bounds, or null if too expensive
1072: *
1073: * @throws IOException
1074: */
1075: protected Envelope getBounds(Query query) throws IOException {
1076: LOGGER.fine("getBounds");
1077:
1078: Envelope ev;
1079: if (query == null || query.getFilter().equals(Filter.INCLUDE)) {
1080: LOGGER
1081: .fine("getting bounds of entire layer. Using optimized SDE call.");
1082: // we're really asking for a bounds of the WHOLE layer,
1083: // let's just ask SDE metadata for that, rather than doing an
1084: // expensive query
1085: SeLayer this Layer = this .connectionPool.getSdeLayer(query
1086: .getTypeName());
1087: SeExtent extent = this Layer.getExtent();
1088: ev = new Envelope(extent.getMinX(), extent.getMaxX(),
1089: extent.getMinY(), extent.getMaxY());
1090: } else {
1091: ev = ArcSDEQuery.calculateQueryExtent(this , query);
1092: }
1093:
1094: if (LOGGER.isLoggable(Level.FINE)) {
1095: if (ev != null)
1096: LOGGER
1097: .fine("ArcSDE optimized getBounds call returned: "
1098: + ev);
1099: else
1100: LOGGER
1101: .fine("ArcSDE couldn't process all filters in this query, so optimized getBounds() returns null.");
1102: }
1103:
1104: return ev;
1105: }
1106:
1107: /**
1108: * Returns wether <code>typeName</code> refers to a FeatureType registered
1109: * as an in-process view through {@link #registerView(String, PlainSelect)}.
1110: *
1111: * @param typeName
1112: * @return <code>true</code> if <code>typeName</code> is registered as a
1113: * view given a SQL SELECT query, <code>false</code> otherwise.
1114: */
1115: public boolean isView(String typeName) {
1116: return viewSchemasCache.containsKey(typeName);
1117: }
1118:
1119: public SeQueryInfo getViewQueryInfo(String typeName) {
1120: SeQueryInfo qInfo = (SeQueryInfo) viewQueryInfos.get(typeName);
1121: return qInfo;
1122: }
1123:
1124: public PlainSelect getViewSelectStatement(String typeName) {
1125: PlainSelect select = (PlainSelect) viewSelectStatements
1126: .get(typeName);
1127: return select;
1128: }
1129:
1130: /**
1131: * Creates an in-process data view against one or more actual FeatureTypes
1132: * of this DataStore, which will be advertised as <code>typeName</code>
1133: *
1134: * @param typeName
1135: * the name of the view's FeatureType.
1136: * @param sqlQuery
1137: * a full SQL query which will act as the view definition.
1138: * @throws IOException
1139: */
1140: // public void registerView(String typeName, String sqlQuery) throws
1141: // IOException {
1142: // LOGGER.fine("about to register view " + typeName + "=" + sqlQuery);
1143: // SelectBody select = parseSqlQuery(sqlQuery);
1144: // registerView(typeName, select);
1145: // }
1146: // private static SelectBody parseSqlQuery(String selectStatement) throws
1147: // IOException {
1148: // CCJSqlParserManager pm = new CCJSqlParserManager();
1149: // Reader reader = new StringReader(selectStatement);
1150: // Statement statement;
1151: // try {
1152: // statement = pm.parse(reader);
1153: // } catch (Exception e) {
1154: // throw new DataSourceException("parsing select statement: " +
1155: // e.getCause().getMessage(),
1156: // e);
1157: // }
1158: // if (!(statement instanceof Select)) { // either PlainSelect or Union
1159: // throw new IllegalArgumentException("expected select or union statement: "
1160: // + statement);
1161: // }
1162: // SelectBody selectBody = ((Select) statement).getSelectBody();
1163: // return selectBody;
1164: // }
1165: /**
1166: *
1167: * @param typeName
1168: * @param select
1169: * may be a {@link PlainSelect} or a {@link Union}. If it is a
1170: * <code>Union</code> will simple throw an
1171: * UnsupportedOperationException.
1172: * @throws IOException
1173: * @throws UnsupportedOperationException
1174: * if <code>select</code> is a <code>Union</code> or a
1175: * <code>PlainSelect</code> containing a construct not
1176: * supported by ArcSDE
1177: */
1178: // public void registerView(String typeName, SelectBody select) throws
1179: // IOException,
1180: // UnsupportedOperationException {
1181: // if (!(select instanceof PlainSelect)) {
1182: // throw new UnsupportedOperationException("ArcSDE supports only a limited"
1183: // + " set of PlainSelect construct: " + select);
1184: // }
1185: // registerView(typeName, (PlainSelect) select);
1186: // }
1187: /**
1188: * Supported constructs:
1189: * <ul>
1190: * <li>FromItems
1191: * <li>SelectItems
1192: * <li>Top (as in SELECT TOP 10 * FROM...)
1193: * <li>Where
1194: * </ul>
1195: *
1196: * @param typeName
1197: * @param select
1198: * @throws IOException
1199: */
1200: public void registerView(final String typeName,
1201: final PlainSelect select) throws IOException {
1202:
1203: if (typeName == null)
1204: throw new NullPointerException("typeName");
1205: if (select == null)
1206: throw new NullPointerException("select");
1207: if (Arrays.asList(getTypeNames()).contains(typeName)) {
1208: throw new IllegalArgumentException(typeName
1209: + " already exists as a FeatureType");
1210: }
1211:
1212: verifyQueryIsSupported(select);
1213:
1214: ArcSDEPooledConnection conn = connectionPool.getConnection();
1215:
1216: PlainSelect qualifiedSelect = SelectQualifier.qualify(conn,
1217: select);
1218: // System.out.println(qualifiedSelect);
1219:
1220: SeQueryInfo queryInfo;
1221: LOGGER.fine("creating definition query info");
1222: try {
1223: queryInfo = QueryInfoParser.parse(conn, qualifiedSelect);
1224: } catch (SeException e) {
1225: throw new DataSourceException("Parsing select: "
1226: + e.getMessage(), e);
1227: } finally {
1228: conn.close();
1229: }
1230:
1231: FeatureType viewSchema = ArcSDEAdapter.fetchSchema(
1232: connectionPool, typeName, namespace, queryInfo);
1233: LOGGER.fine("view schema: " + viewSchema);
1234:
1235: this .viewQueryInfos.put(typeName, queryInfo);
1236: this .viewSchemasCache.put(typeName, viewSchema);
1237: this .viewSelectStatements.put(typeName, qualifiedSelect);
1238: }
1239:
1240: /**
1241: * Unsupported constructs:
1242: * <ul>
1243: * <li>GroupByColumnReferences
1244: * <li>Joins
1245: * <li>Into
1246: * <li>Limit
1247: * </ul>
1248: * Not yet verified to work:
1249: * <ul>
1250: * <li>Distinct
1251: * <li>Having
1252: * <li>
1253: * </ul>
1254: *
1255: * @param select
1256: * @throws UnsupportedOperationException
1257: * if any of the unsupported constructs are found on
1258: * <code>select</code>
1259: */
1260: private void verifyQueryIsSupported(PlainSelect select)
1261: throws UnsupportedOperationException {
1262: List errors = new LinkedList();
1263: // @TODO errors.add(select.getDistinct());
1264: // @TODO errors.add(select.getHaving());
1265: verifyUnsupportedSqlConstruct(errors, select
1266: .getGroupByColumnReferences());
1267: verifyUnsupportedSqlConstruct(errors, select.getInto());
1268: verifyUnsupportedSqlConstruct(errors, select.getJoins());
1269: verifyUnsupportedSqlConstruct(errors, select.getLimit());
1270: if (errors.size() > 0) {
1271: throw new UnsupportedOperationException(
1272: "The following constructs are not supported: "
1273: + errors);
1274: }
1275: }
1276:
1277: /**
1278: * If construct is not null or an empty list, adds it to the list of errors.
1279: *
1280: * @param errors
1281: * @param construct
1282: */
1283: private void verifyUnsupportedSqlConstruct(List errors,
1284: Object construct) {
1285: if (construct instanceof List) {
1286: List constructsList = (List) construct;
1287: if (constructsList.size() > 0) {
1288: errors.add(constructsList);
1289: }
1290: } else if (construct != null) {
1291: errors.add(construct);
1292: }
1293: }
1294:
1295: }
|