0001: /*
0002: * GeoTools - OpenSource mapping toolkit
0003: * http://geotools.org
0004: * (C) 2002-2006, GeoTools Project Managment Committee (PMC)
0005: *
0006: * This library is free software; you can redistribute it and/or
0007: * modify it under the terms of the GNU Lesser General Public
0008: * License as published by the Free Software Foundation;
0009: * version 2.1 of the License.
0010: *
0011: * This library is distributed in the hope that it will be useful,
0012: * but WITHOUT ANY WARRANTY; without even the implied warranty of
0013: * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
0014: * Lesser General Public License for more details.
0015: */
0016: package org.geotools.data.postgis;
0017:
0018: import java.io.IOException;
0019: import java.sql.Connection;
0020: import java.sql.ResultSet;
0021: import java.sql.SQLException;
0022: import java.sql.Statement;
0023: import java.util.ArrayList;
0024: import java.util.Arrays;
0025: import java.util.Collections;
0026: import java.util.HashSet;
0027: import java.util.List;
0028: import java.util.Set;
0029: import java.util.logging.Logger;
0030:
0031: import org.geotools.data.DataSourceException;
0032: import org.geotools.data.DataUtilities;
0033: import org.geotools.data.DefaultQuery;
0034: import org.geotools.data.FeatureReader;
0035: import org.geotools.data.Query;
0036: import org.geotools.data.jdbc.JDBCDataStore;
0037: import org.geotools.data.jdbc.JDBCFeatureStore;
0038: import org.geotools.data.jdbc.JDBCUtils;
0039: import org.geotools.data.jdbc.SQLBuilder;
0040: import org.geotools.data.jdbc.fidmapper.FIDMapper;
0041: import org.geotools.factory.FactoryConfigurationError;
0042: import org.geotools.factory.Hints;
0043: import org.geotools.feature.AttributeType;
0044: import org.geotools.feature.Feature;
0045: import org.geotools.feature.FeatureCollection;
0046: import org.geotools.feature.FeatureIterator;
0047: import org.geotools.feature.FeatureType;
0048: import org.geotools.feature.GeometryAttributeType;
0049: import org.geotools.feature.IllegalAttributeException;
0050: import org.geotools.feature.SchemaException;
0051: import org.geotools.filter.FidFilter;
0052: import org.opengis.filter.Filter;
0053: import org.opengis.referencing.FactoryException;
0054: import org.opengis.referencing.crs.CoordinateReferenceSystem;
0055: import org.opengis.referencing.operation.TransformException;
0056: import org.geotools.filter.FilterFactory;
0057: import org.geotools.filter.FilterFactoryFinder;
0058: import org.geotools.filter.SQLEncoderException;
0059: import org.geotools.filter.SQLEncoderPostgis;
0060: import org.geotools.filter.SQLUnpacker;
0061: import org.geotools.geometry.jts.ReferencedEnvelope;
0062:
0063: import com.vividsolutions.jts.geom.Envelope;
0064: import com.vividsolutions.jts.geom.Geometry;
0065: import com.vividsolutions.jts.geom.GeometryFactory;
0066: import com.vividsolutions.jts.io.ParseException;
0067: import com.vividsolutions.jts.io.WKTReader;
0068: import com.vividsolutions.jts.io.WKTWriter;
0069:
0070: /**
0071: * Implementation of a Postgis specific FeatureStore.
0072: * <p>
0073: * This mostly just rips off code from PostgisDataSource
0074: * It could definitely use some nice code reuse with PostgisDataStore, as they
0075: * have a number of similar if not identical methods right now.
0076: * <p>
0077: * Approaching deadlines, however, mean that
0078: * we're sticking with the code that works, instead of getting all kinds of
0079: * nice reuse. This'll hopefully change. This bypasses the writers used in
0080: * JDBCFeatureStore, as I'm just not yet confident in them. We also should
0081: * do some solid tests to see which is actually faster.
0082: *
0083: * @author Chris Holmes, TOPP
0084: * @source $URL: http://svn.geotools.org/geotools/tags/2.4.1/modules/plugin/postgis/src/main/java/org/geotools/data/postgis/PostgisFeatureStore.java $
0085: * @version $Id: PostgisFeatureStore.java 29383 2008-02-19 16:39:16Z aaime $
0086: *
0087: * @task HACK: too little code reuse with PostgisDataStore.
0088: * @task TODO: make individual operations truly atomic. If the transaction is
0089: * an auto-commit one, then it should make a a new jdbc transaction that
0090: * rollsback if there are errors while performing its action.
0091: * @task TODO: don't use encoder at all, only access it through
0092: * PostgisSQLBuilder
0093: */
0094: public class PostgisFeatureStore extends JDBCFeatureStore {
0095: /** The logger for the postgis module. */
0096: private static final Logger LOGGER = org.geotools.util.logging.Logging
0097: .getLogger("org.geotools.data.postgis");
0098:
0099: /** Well Known Text writer (from JTS). */
0100: protected static WKTWriter geometryWriter = new WKTWriter();
0101:
0102: /** Factory for producing geometries (from JTS). */
0103: protected static GeometryFactory geometryFactory = new GeometryFactory();
0104:
0105: /** Well Known Text reader (from JTS). */
0106: protected static WKTReader geometryReader = new WKTReader(
0107: geometryFactory);
0108:
0109: /** Error message prefix for sql connection errors */
0110: protected static final String CONN_ERROR = "Some sort of database connection error: ";
0111:
0112: /** To create the sql where statement */
0113: protected PostgisSQLBuilder sqlBuilder;
0114: protected SQLEncoderPostgis encoder;
0115: protected String tableName;
0116:
0117: /** the name of the column to use for the featureId */
0118: protected FIDMapper fidMapper;
0119:
0120: public PostgisFeatureStore(PostgisDataStore postgisDataStore,
0121: FeatureType featureType) throws IOException {
0122: super (postgisDataStore, featureType);
0123: tableName = featureType.getTypeName();
0124: fidMapper = postgisDataStore.getFIDMapper(tableName);
0125: sqlBuilder = (PostgisSQLBuilder) postgisDataStore
0126: .getSqlBuilder(tableName);
0127:
0128: AttributeType geomType = featureType.getDefaultGeometry();
0129: encoder = new SQLEncoderPostgis();
0130: encoder.setFeatureType(featureType);
0131: encoder.setFIDMapper(postgisDataStore.getFIDMapper(featureType
0132: .getTypeName()));
0133:
0134: if (geomType != null) {
0135: //HACK: encoder should be set for each geometry.
0136: int srid = getSRID(geomType.getName());
0137: encoder.setDefaultGeometry(geomType.getName());
0138: encoder.setSRID(srid);
0139: encoder.setFIDMapper(fidMapper);
0140: }
0141: }
0142:
0143: // /**
0144: // * Returns a feature collection, based on the passed filter. The schema of
0145: // * the features passed in must match the schema of the datasource.
0146: // *
0147: // * @param reader Add features to the PostGIS database.
0148: // *
0149: // * @return A set of featureIds of the features added.
0150: // *
0151: // * @throws IOException if anything went wrong.
0152: // * @throws DataSourceException DOCUMENT ME!
0153: // *
0154: // * @task REVISIT: Check to make sure features passed in match schema.
0155: // * @task TODO: get working with the primary key fid column. This will
0156: // * currently just insert nulls for the fids if oid is not being used
0157: // * as the column. We probably need a sequence to generate the fids.
0158: // * Or if the fid is supposed to be part of the insert (which doesn't
0159: // * make sense if we return fids), then we should check for
0160: // * uniqueness.
0161: // * @task REVISIT: not sure about previousAutoCommit stuff. We want to make
0162: // * sure that each of these actions is atomic if we're not working
0163: // * against a Transaction.
0164: // */
0165: // public Set addFeatures(FeatureReader reader) throws IOException {
0166: // boolean fail = false;
0167: // Set curFids = null;
0168: // Set newFids = null;
0169: //
0170: // //Feature[] featureArr = collection.getFeatures();
0171: // Connection conn = null;
0172: // Statement statement = null;
0173: //
0174: // if (reader.hasNext()) {
0175: // try {
0176: // conn = getConnection();
0177: //
0178: // curFids = getFidSet(conn);
0179: // LOGGER.fine("fids before add: " + curFids);
0180: // statement = conn.createStatement();
0181: //
0182: // while (reader.hasNext()) {
0183: // String sql = makeInsertSql(tableName, reader.next());
0184: // LOGGER.finer("this sql statement = " + sql);
0185: // statement.executeUpdate(sql);
0186: // }
0187: //
0188: // newFids = getFidSet(conn);
0189: // LOGGER.fine("fids after add: " + newFids);
0190: // newFids.removeAll(curFids);
0191: // LOGGER.fine("to return " + newFids);
0192: // } catch (SQLException sqle) {
0193: // fail = true;
0194: // close(conn, getTransaction(), sqle);
0195: //
0196: // String message = CONN_ERROR + sqle.getMessage();
0197: // LOGGER.warning(message);
0198: // throw new DataSourceException(message, sqle);
0199: // } catch (IllegalAttributeException iae) {
0200: // throw new DataSourceException("attribute problem", iae);
0201: // } finally {
0202: // reader.close();
0203: // close(statement);
0204: // close(conn, getTransaction(), null);
0205: //
0206: // //finalizeTransactionMethod(previousAutoCommit, fail);
0207: // }
0208: // }
0209: //
0210: // //Set retFids = new HashSet(newFids.size());
0211: // //for (Iterator i = newFids.iterator(); i.hasNext;){
0212: // return newFids;
0213: // }
0214:
0215: // /**
0216: // * Gets the set of fids for all features in this datasource . Used by
0217: // * insert to figure out which features it added. There should be a more
0218: // * efficient way of doing this, I'm just not sure what.
0219: // *
0220: // * @param conn The connection to get the fid set with.
0221: // *
0222: // * @return a set of strings of the featureIds
0223: // *
0224: // * @throws IOException if there were problems connecting to the db backend.
0225: // * @throws DataSourceException DOCUMENT ME!
0226: // */
0227: // private Set getFidSet(Connection conn) throws IOException {
0228: // Set fids = new HashSet();
0229: // Statement statement = null;
0230: //
0231: // try {
0232: // LOGGER.finer("entering fid set");
0233: //
0234: // //conn = getConnection();
0235: // statement = conn.createStatement();
0236: //
0237: // DefaultQuery query = new DefaultQuery();
0238: // query.setPropertyNames(new String[0]);
0239: //
0240: // SQLUnpacker unpacker = new SQLUnpacker(encoder.getCapabilities());
0241: //
0242: // //REVISIT: redo unpacker-this has to be called first, or it breaks.
0243: // unpacker.unPackAND(null);
0244: //
0245: // String sql = makeSql(unpacker, (Query) query);
0246: // ResultSet result = statement.executeQuery(sql);
0247: //
0248: // while (result.next()) {
0249: // //REVISIT: this formatting could be done after the remove,
0250: // //would speed things up, but also would make that code ugly.
0251: // fids.add(createFid(result.getString(1)));
0252: // }
0253: // } catch (SQLException sqle) {
0254: // String message = CONN_ERROR + sqle.getMessage();
0255: // LOGGER.warning(message);
0256: // close(conn, getTransaction(), sqle);
0257: // throw new DataSourceException(message, sqle);
0258: // } finally {
0259: // close(statement);
0260: // close(conn, getTransaction(), null);
0261: // }
0262: //
0263: // LOGGER.finest("returning fids " + fids);
0264: //
0265: // return fids;
0266: // }
0267:
0268: // /**
0269: // * Creates a sql insert statement. Uses each feature's schema, which makes
0270: // * it possible to insert out of order, as well as inserting less than all
0271: // * features.
0272: // *
0273: // * @param tableName the name of the feature table being inserted into.
0274: // * @param feature the feature to add.
0275: // *
0276: // * @return an insert sql statement.
0277: // *
0278: // * @throws IOException DOCUMENT ME!
0279: // */
0280: // private String makeInsertSql(String tableName, Feature feature) throws IOException {
0281: // String attrValue;
0282: // StringBuffer sql = new StringBuffer();
0283: //
0284: // sql.append("INSERT INTO \"");
0285: // sql.append(tableName);
0286: // sql.append("\"(");
0287: //
0288: // FeatureType featureSchema = feature.getFeatureType();
0289: // AttributeType[] types = featureSchema.getAttributeTypes();
0290: //
0291: // for (int i = 0; i < types.length; i++) {
0292: // sql.append("\"");
0293: // sql.append(types[i].getName());
0294: // sql.append("\"");
0295: // sql.append((i < (types.length - 1)) ? ", " : ") ");
0296: // }
0297: //
0298: // sql.append("VALUES (");
0299: //
0300: // Object[] attributes = feature.getAttributes(null);
0301: //
0302: // for (int j = 0; j < attributes.length; j++) {
0303: // if (attributes[j] == null) {
0304: // sql.append("null");
0305: // } else if (types[j].isGeometry()) {
0306: // int srid = getSRID(types[j].getName());
0307: // String geoText = geometryWriter.write((Geometry) attributes[j]);
0308: // sql.append("GeometryFromText('");
0309: // sql.append(geoText);
0310: // sql.append("', ");
0311: // sql.append(srid);
0312: // sql.append(")");
0313: // } else {
0314: // attrValue = addQuotes(attributes[j]);
0315: // sql.append(attrValue);
0316: // }
0317: //
0318: // if (j < (attributes.length - 1)) {
0319: // sql.append(", ");
0320: // }
0321: //
0322: // }
0323: //
0324: // sql.append(");");
0325: //
0326: // LOGGER.fine("insert statement is " + sql);
0327: // return sql.toString();
0328: // }
0329:
0330: protected int getSRID(String geomName) throws IOException {
0331: return getPostgisDataStore().getSRID(tableName, geomName);
0332: }
0333:
0334: /**
0335: * Adds quotes to an object for storage in postgis. The object should be a
0336: * string or a number. To perform an insert strings need quotes around
0337: * them, and numbers work fine with quotes, so this method can be called
0338: * on unknown objects.
0339: *
0340: * @param value The object to add quotes to.
0341: *
0342: * @return a string representation of the object with quotes.
0343: */
0344: private String addQuotes(Object value) {
0345: String retString;
0346:
0347: if (value != null) {
0348: retString = "'" + value.toString() + "'";
0349: } else {
0350: retString = "null";
0351: }
0352:
0353: return retString;
0354: }
0355:
0356: /**
0357: * Removes the features specified by the passed filter from the PostGIS
0358: * database.
0359: *
0360: * @param filter An OpenGIS filter; specifies which features to remove.
0361: *
0362: * @throws IOException If anything goes wrong or if deleting is not
0363: * supported.
0364: * @throws DataSourceException DOCUMENT ME!
0365: */
0366: public void removeFeatures(Filter filter) throws IOException {
0367: String sql = "";
0368: String whereStmt = null;
0369:
0370: // check locks!
0371: // (won't do anything if we use our own
0372: // database locking)
0373: assertFilter(filter);
0374:
0375: //boolean previousAutoCommit = getAutoCommit();
0376: //setAutoCommit(false);
0377: Filter encodableFilter = sqlBuilder.getPreQueryFilter(filter);
0378: Filter unEncodableFilter = sqlBuilder
0379: .getPostQueryFilter(filter);
0380:
0381: // SQLUnpacker unpacker = new SQLUnpacker(encoder.getCapabilities());
0382: // unpacker.unPackOR(filter);
0383:
0384: Statement statement = null;
0385: Connection conn = null;
0386:
0387: try {
0388: conn = getConnection();
0389: statement = conn.createStatement();
0390:
0391: if (encodableFilter == null && unEncodableFilter != null) {
0392: encodableFilter = getEncodableFilter(unEncodableFilter);
0393: }
0394:
0395: if (encodableFilter != null) {
0396: whereStmt = encoder.encode(encodableFilter);
0397: sql = "DELETE from "
0398: + sqlBuilder.encodeTableName(tableName)
0399: + whereStmt + ";";
0400:
0401: //do actual delete
0402: LOGGER.fine("sql statment is " + sql);
0403: DefaultQuery query = new DefaultQuery(getSchema()
0404: .getTypeName(), filter);
0405: Envelope bounds = bounds(query);
0406: statement.executeUpdate(sql);
0407:
0408: if (bounds != null && !bounds.isNull())
0409: getJDBCDataStore().listenerManager
0410: .fireFeaturesRemoved(getSchema()
0411: .getTypeName(), getTransaction(),
0412: bounds, false);
0413:
0414: }
0415:
0416: close(statement);
0417: } catch (SQLException sqle) {
0418: close(conn, getTransaction(), sqle);
0419:
0420: String message = CONN_ERROR + sqle.getMessage();
0421: LOGGER.warning(message);
0422: throw new DataSourceException(message, sqle);
0423: } catch (SQLEncoderException ence) {
0424: String message = "error encoding sql from filter "
0425: + ence.getMessage();
0426: LOGGER.warning(message);
0427:
0428: throw new DataSourceException(message, ence);
0429: } catch (IllegalAttributeException iae) {
0430: throw new DataSourceException("attribute problem", iae);
0431: } finally {
0432: close(statement);
0433: close(conn, getTransaction(), null);
0434: }
0435: }
0436:
0437: /**
0438: * Modifies the passed attribute types with the passed objects in all
0439: * features that correspond to the passed OGS filter.
0440: *
0441: * @param type The attributes to modify.
0442: * @param value The values to put in the attribute types.
0443: * @param filter An OGC filter to note which attributes to modify.
0444: *
0445: * @throws IOException If modificaton is not supported, if the attribute
0446: * and object arrays are not eqaul length, or if the object types
0447: * do not match the attribute types.
0448: * @throws DataSourceException DOCUMENT ME!
0449: *
0450: * @task REVISIT: validate values with types. Database does this a bit
0451: * now, but should be more fully implemented.
0452: * @task REVISIT: do some nice prepared statement stuff like oracle.
0453: */
0454: public void modifyFeatures(AttributeType[] type, Object[] value,
0455: Filter filter) throws IOException {
0456: // check locks!
0457: // (won't do anything if we use our own
0458: // database locking)
0459: LOGGER.finer("asserting filter " + filter);
0460: assertFilter(filter);
0461:
0462: //boolean previousAutoCommit = getAutoCommit();
0463: //setAutoCommit(false);
0464: boolean fail = false;
0465: Connection conn = null;
0466: Statement statement = null;
0467: String sql = "";
0468: String fid = null;
0469:
0470: //check schema with filter???
0471: // SQLUnpacker unpacker = new SQLUnpacker(encoder.getCapabilities());
0472: // unpacker.unPackOR(filter);
0473:
0474: String whereStmt = null;
0475: Filter encodableFilter = sqlBuilder.getPreQueryFilter(filter);
0476: Filter unEncodableFilter = sqlBuilder
0477: .getPostQueryFilter(filter);
0478:
0479: try {
0480: conn = getConnection();
0481: statement = conn.createStatement();
0482: // statement = conn.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_READ_ONLY);
0483:
0484: if (encodableFilter == null && unEncodableFilter != null) {
0485: FidFilter fidFilter = getEncodableFilter(unEncodableFilter);
0486: encodableFilter = fidFilter;
0487: }
0488:
0489: if (encodableFilter != null) {
0490: whereStmt = encoder.encode(encodableFilter);
0491: sql = makeModifySql(type, value, whereStmt);
0492: LOGGER.finer("encoded modify is " + sql);
0493: DefaultQuery query = new DefaultQuery(getSchema()
0494: .getTypeName(), filter);
0495: Envelope bounds = bounds(query);
0496: statement.executeUpdate(sql);
0497: if (bounds != null && !bounds.isNull()) {
0498: Envelope afterBounds = bounds(query);
0499: if (afterBounds != null)
0500: bounds.expandToInclude(afterBounds);
0501: } else {
0502: bounds = bounds(query);
0503: }
0504: if (bounds != null && !bounds.isNull())
0505: getJDBCDataStore().listenerManager
0506: .fireFeaturesChanged(getSchema()
0507: .getTypeName(), getTransaction(),
0508: bounds, false);
0509: }
0510:
0511: } catch (SQLException sqle) {
0512: fail = true;
0513: close(conn, getTransaction(), sqle);
0514:
0515: String message = CONN_ERROR + sqle.getMessage();
0516: LOGGER.warning(message);
0517: throw new DataSourceException(message, sqle);
0518: } catch (SQLEncoderException ence) {
0519: fail = true;
0520:
0521: String message = "error encoding sql from filter "
0522: + ence.getMessage();
0523: LOGGER.warning(message);
0524: throw new DataSourceException(message, ence);
0525: } catch (IllegalAttributeException iae) {
0526: throw new DataSourceException("attribute problem", iae);
0527: } finally {
0528: close(statement);
0529: close(conn, getTransaction(), null);
0530: }
0531: }
0532:
0533: private FidFilter getEncodableFilter(Filter unEncodableFilter)
0534: throws IOException, FactoryConfigurationError,
0535: IllegalAttributeException {
0536: // this is very similar to getFidSet - the reason is so that we
0537: // don't spend time constructing geometries when we don't need
0538: // to, but we probably could get some better code reuse.
0539: DefaultQuery query = new DefaultQuery();
0540: query.setPropertyNames(new String[0]);
0541: query.setFilter(unEncodableFilter);
0542:
0543: FeatureCollection features = getFeatures(unEncodableFilter);
0544:
0545: FilterFactory ff = FilterFactoryFinder.createFilterFactory();
0546: FidFilter fidFilter = ff.createFidFilter();
0547: FeatureIterator it = features.features();
0548: try {
0549: while (it.hasNext()) {
0550: Feature feature = it.next();
0551: fidFilter.addFid(feature.getID());
0552: }
0553: } finally {
0554: features.close(it);
0555: }
0556: return fidFilter;
0557: }
0558:
0559: /**
0560: * strips the tableName from the fid for those in the format
0561: * featureName.3534 should maybe just strip out all alpha-numeric
0562: * characters.
0563: *
0564: * @param feature The feature for which the fid number should be stripped.
0565: *
0566: * @return The fid without the leading tablename.
0567: */
0568: private String formatFid(Feature feature) {
0569: String fid = feature.getID();
0570:
0571: if (fid.startsWith(tableName)) {
0572: //take out the tableName and the .
0573: fid = fid.substring(tableName.length() + 1);
0574: }
0575:
0576: return addQuotes(fid);
0577: }
0578:
0579: /**
0580: * Modifies the passed attribute types with the passed objects in all
0581: * features that correspond to the passed OGS filter. A convenience
0582: * method for single attribute modifications.
0583: *
0584: * @param type The attributes to modify.
0585: * @param value The values to put in the attribute types.
0586: * @param filter An OGC filter to note which attributes to modify.
0587: *
0588: * @throws IOException If modificaton is not supported, if the object type
0589: * do not match the attribute type.
0590: */
0591: public void modifyFeatures(AttributeType type, Object value,
0592: Filter filter) throws IOException {
0593: AttributeType[] singleType = { type };
0594: Object[] singleVal = { value };
0595: modifyFeatures(singleType, singleVal, filter);
0596: }
0597:
0598: /**
0599: * Creates a sql update statement.
0600: *
0601: * @param types the attribute to be changed.
0602: * @param values the value to change it to.
0603: * @param whereStmt the feature to update.
0604: *
0605: * @return an update sql statement.
0606: *
0607: * @throws IOException if the lengths of types and values don't match.
0608: */
0609: private String makeModifySql(AttributeType[] types,
0610: Object[] values, String whereStmt) throws IOException {
0611: int arrLength = types.length;
0612:
0613: if (arrLength == values.length) {
0614: StringBuffer sqlStatement = new StringBuffer("UPDATE ");
0615: sqlStatement.append(sqlBuilder.encodeTableName(tableName)
0616: + " SET ");
0617:
0618: for (int i = 0; i < arrLength; i++) {
0619: AttributeType curType = types[i];
0620: Object curValue = values[i];
0621: String newValue;
0622:
0623: //check her to make sure object matches attribute type.
0624: if (curType instanceof GeometryAttributeType) {
0625: //create the text to add geometry
0626: int srid = getSRID(curType.getName());
0627: String geoText = geometryWriter
0628: .write((Geometry) curValue);
0629: newValue = "GeometryFromText('" + geoText + "', "
0630: + srid + ")";
0631: } else {
0632: //or add quotes, covers rest of cases
0633: newValue = addQuotes(curValue);
0634: }
0635:
0636: sqlStatement.append(sqlBuilder.encodeColumnName(curType
0637: .getName())
0638: + " = " + newValue);
0639:
0640: //sqlStatement.append(curType.getName() + " = " + newValue);
0641: sqlStatement.append((i < (arrLength - 1)) ? ", " : " ");
0642: }
0643:
0644: sqlStatement.append(whereStmt + ";");
0645:
0646: return sqlStatement.toString();
0647: } else {
0648: throw new IOException("length of value array is not "
0649: + "same length as type array");
0650: }
0651: }
0652:
0653: /**
0654: * Performs the setFeautres operation by removing all and then adding the
0655: * full collection. This is not efficient, the add, modify and remove
0656: * operations should be used instead, this is just to follow the
0657: * interface.
0658: *
0659: * @return DOCUMENT ME!
0660: *
0661: * @task REVISIT: to abstract class, same as oracle.
0662: */
0663:
0664: /*public void setFeatures(FeatureCollection features)
0665: throws IOException {
0666: boolean originalAutoCommit = getAutoCommit();
0667: setAutoCommit(false);
0668: removeFeatures(null);
0669: addFeatures(features);
0670: //commit();
0671: //setAutoCommit(originalAutoCommit);
0672: }*/
0673: protected PostgisDataStore getPostgisDataStore() {
0674: return (PostgisDataStore) super .getJDBCDataStore();
0675: }
0676:
0677: /**
0678: * Creates a SQL statement for the PostGIS database.
0679: *
0680: * @deprecated please use makeSql(query)
0681: * @param unpacker the object to get the encodable filter.
0682: * @param query the getFeature query - for the tableName, properties and
0683: * maxFeatures.
0684: * @throws IOException if there are problems encoding the sql.
0685: */
0686: public String makeSql(SQLUnpacker unpacker, Query query)
0687: throws IOException {
0688: //one to one relationship for now, so typeName is not used.
0689: //String tableName = query.getTypeName();
0690:
0691: boolean useLimit = (unpacker.getUnSupported() == null);
0692: Filter filter = unpacker.getSupported();
0693: LOGGER.fine("Filter in making sql is " + filter);
0694:
0695: AttributeType[] attributeTypes = getAttTypes(query);
0696: int numAttributes = attributeTypes.length;
0697:
0698: StringBuffer sqlStatement = new StringBuffer("SELECT ");
0699: if (!fidMapper.returnFIDColumnsAsAttributes()) {
0700: for (int i = 0; i < fidMapper.getColumnCount(); i++) {
0701: sqlStatement.append(fidMapper.getColumnName(i));
0702: if (numAttributes > 0
0703: || i < (fidMapper.getColumnCount() - 1))
0704: sqlStatement.append(", ");
0705: }
0706: }
0707:
0708: LOGGER.finer("making sql for " + numAttributes + " attributes");
0709:
0710: for (int i = 0; i < numAttributes; i++) {
0711: String curAttName = attributeTypes[i].getName();
0712:
0713: if (Geometry.class.isAssignableFrom(attributeTypes[i]
0714: .getType())) {
0715: sqlStatement.append(", AsText(force_2d(\"" + curAttName
0716: + "\"))");
0717: } else {
0718: sqlStatement.append(", \"" + curAttName + "\"");
0719: }
0720: }
0721:
0722: String where = "";
0723:
0724: if (filter != null) {
0725: try {
0726: where = encoder.encode(filter);
0727: } catch (SQLEncoderException sqle) {
0728: String message = "Encoder error" + sqle.getMessage();
0729: LOGGER.warning(message);
0730: throw new DataSourceException(message, sqle);
0731: }
0732: }
0733:
0734: //int limit = HARD_MAX_FEATURES;
0735: String limit = "";
0736:
0737: if (useLimit) {
0738: limit = " LIMIT " + query.getMaxFeatures();
0739: }
0740:
0741: sqlStatement.append(
0742: " FROM \"" + tableName + "\" " + where + limit + ";")
0743: .toString();
0744: LOGGER.fine("sql statement is " + sqlStatement);
0745:
0746: return sqlStatement.toString();
0747: }
0748:
0749: /**
0750: * Creates a SQL statement for the PostGIS database.
0751: *
0752: * @return Full SQL statement.
0753: * @throws IOException if there are problems encoding the sql.
0754: *
0755: * @task REVISIT: faithfully use sqlBuilder
0756: */
0757: public String makeSql(Query query) throws IOException {
0758: //one to one relationship for now, so typeName is not used.
0759: //String tableName = query.getTypeName();
0760:
0761: Filter encodableFilter = sqlBuilder.getPreQueryFilter(query
0762: .getFilter());
0763: Filter unEncodableFilter = sqlBuilder.getPostQueryFilter(query
0764: .getFilter());
0765: boolean useLimit = (unEncodableFilter == null || unEncodableFilter
0766: .equals(Filter.INCLUDE));
0767:
0768: LOGGER.fine("Filter in making sql is " + encodableFilter);
0769:
0770: AttributeType[] attributeTypes = getAttTypes(query);
0771: int numAttributes = attributeTypes.length;
0772:
0773: StringBuffer sqlStatement = new StringBuffer("SELECT ");
0774: if (!fidMapper.returnFIDColumnsAsAttributes()) {
0775: for (int i = 0; i < fidMapper.getColumnCount(); i++) {
0776: sqlStatement.append(fidMapper.getColumnName(i));
0777: if (numAttributes > 0
0778: || i < (fidMapper.getColumnCount() - 1))
0779: sqlStatement.append(", ");
0780: }
0781: }
0782:
0783: LOGGER.finer("making sql for " + numAttributes + " attributes");
0784:
0785: for (int i = 0; i < numAttributes; i++) {
0786: String curAttName = attributeTypes[i].getName();
0787:
0788: if (Geometry.class.isAssignableFrom(attributeTypes[i]
0789: .getType())) {
0790: sqlStatement.append(", AsText(force_2d(\"" + curAttName
0791: + "\"))");
0792: } else {
0793: sqlStatement.append(", \"" + curAttName + "\"");
0794: }
0795: }
0796:
0797: String where = "";
0798:
0799: if (encodableFilter != null) {
0800: try {
0801: where = encoder.encode(encodableFilter);
0802: } catch (SQLEncoderException sqle) {
0803: String message = "Encoder error" + sqle.getMessage();
0804: LOGGER.warning(message);
0805: throw new DataSourceException(message, sqle);
0806: }
0807: }
0808:
0809: //int limit = HARD_MAX_FEATURES;
0810: String limit = "";
0811:
0812: if (useLimit) {
0813: limit = " LIMIT " + query.getMaxFeatures();
0814: }
0815:
0816: sqlBuilder.sqlFrom(sqlStatement, tableName);
0817: sqlStatement.append(where + limit + ";");
0818: LOGGER.fine("sql statement is " + sqlStatement);
0819:
0820: return sqlStatement.toString();
0821: }
0822:
0823: // /**
0824: // * Prepends the tablename (featureType) on to featureIds that start with
0825: // * digits.
0826: // *
0827: // * @param featureId A featureId string to be prepended with tablename if
0828: // * needed.
0829: // *
0830: // * @return the prepended feautre Id.
0831: // */
0832: // protected String createFid(String featureId) {
0833: // String newFid;
0834: //
0835: // if (Character.isDigit(featureId.charAt(0))) {
0836: // //so prepend the table name.
0837: // newFid = tableName + "." + featureId;
0838: // } else {
0839: // newFid = featureId;
0840: // }
0841: //
0842: // return newFid;
0843: // }
0844:
0845: /**
0846: * Gets the attribute types from the query. If all are requested then
0847: * returns all attribute types of this query. If only certain
0848: * propertyNames are requested then this returns the correct attribute
0849: * types, throwing an exception is they can not be found.
0850: *
0851: * @param query contains the propertyNames.
0852: *
0853: * @return the array of attribute types to be returned by getFeature.
0854: *
0855: * @throws IOException if query contains a propertyName that is not a part
0856: * of this type's schema.
0857: */
0858: private AttributeType[] getAttTypes(Query query) throws IOException {
0859: AttributeType[] schemaTypes = getSchema().getAttributeTypes();
0860:
0861: if (query.retrieveAllProperties()) {
0862: return schemaTypes;
0863: } else {
0864: List attNames = Arrays.asList(query.getPropertyNames());
0865: AttributeType[] retAttTypes = new AttributeType[attNames
0866: .size()];
0867: int retPos = 0;
0868:
0869: for (int i = 0, n = schemaTypes.length; i < n; i++) {
0870: String schemaTypeName = schemaTypes[i].getName();
0871:
0872: if (attNames.contains(schemaTypeName)) {
0873: retAttTypes[retPos++] = schemaTypes[i];
0874: }
0875: }
0876:
0877: //TODO: better error reporting, and completely test this method.
0878: if (attNames.size() != retPos) {
0879: String msg = "attempted to request a property, "
0880: + attNames.get(0)
0881: + " that is not part of the schema ";
0882: throw new IOException(msg);
0883: }
0884:
0885: return retAttTypes;
0886: }
0887: }
0888:
0889: public Envelope getBounds() throws IOException {
0890: return getBounds(Query.ALL);
0891: }
0892:
0893: /**
0894: * Retrieve Bounds of Query results.
0895: *
0896: * <p>
0897: * Currently returns null, consider getFeatures( query ).getBounds()
0898: * instead.
0899: * </p>
0900: *
0901: * <p>
0902: * Subclasses may override this method to perform the appropriate
0903: * optimization for this result.
0904: * </p>
0905: *
0906: * @param query Query we are requesting the bounds of
0907: *
0908: * @return null representing the lack of an optimization
0909: *
0910: * @throws IOException DOCUMENT ME!
0911: */
0912: public Envelope getBounds(Query query) throws IOException {
0913: return bounds(query);
0914: }
0915:
0916: // TODO: change this so that it queries for just the bbox instead of the entire sub-query schema columns!
0917: // (this is harder than you might think because of filter requirements!)
0918: //
0919: protected ReferencedEnvelope bounds(Query query) throws IOException {
0920: Filter filter = query.getFilter();
0921:
0922: if (filter == Filter.EXCLUDE) {
0923: return new ReferencedEnvelope(new Envelope(), query
0924: .getCoordinateSystem());
0925: }
0926:
0927: FeatureType schema = getSchema();
0928: JDBCDataStore jdbc = (JDBCDataStore) getJDBCDataStore();
0929: SQLBuilder sqlBuilder = jdbc
0930: .getSqlBuilder(schema.getTypeName());
0931:
0932: Filter postQueryFilter = sqlBuilder.getPostQueryFilter(query
0933: .getFilter());
0934: if (postQueryFilter != null
0935: && !postQueryFilter.equals(Filter.INCLUDE)) {
0936: // this would require postprocessing the filter
0937: // so we cannot optimize
0938: return null;
0939: }
0940:
0941: Connection conn = null;
0942:
0943: try {
0944: conn = getConnection();
0945:
0946: Envelope retEnv = new Envelope();
0947: Filter preFilter = sqlBuilder.getPreQueryFilter(query
0948: .getFilter());
0949: AttributeType[] attributeTypes = schema.getAttributeTypes();
0950: FeatureType schemaNew = schema;
0951: //DJB: this should ensure that schema has a geometry in it or the bounds query has no chance of working
0952: if (!query.retrieveAllProperties()) {
0953: try {
0954: schemaNew = DataUtilities.createSubType(schema,
0955: query.getPropertyNames());
0956: if (schemaNew.getDefaultGeometry() == null) // does the sub-schema have a geometry in it?
0957: {
0958: //uh-oh better get one!
0959: if (schema.getDefaultGeometry() != null) // does the entire schema have a geometry in it?
0960: {
0961: //buff-up the sub-schema so it has the default geometry in it.
0962: ArrayList al = new ArrayList(Arrays
0963: .asList(query.getPropertyNames()));
0964: al.add(schema.getDefaultGeometry()
0965: .getName());
0966: schemaNew = DataUtilities.createSubType(
0967: schema, (String[]) al
0968: .toArray(new String[1]));
0969: }
0970: }
0971: } catch (SchemaException e1) {
0972: throw new DataSourceException(
0973: "Could not create subtype", e1);
0974: }
0975: }
0976: // at this point, the query should have a geometry in it.
0977: // BUT, if there's no geometry in the table, then the query will not (obviously) have a geometry in it.
0978:
0979: attributeTypes = schemaNew.getAttributeTypes();
0980:
0981: for (int j = 0, n = schemaNew.getAttributeCount(); j < n; j++) {
0982: if (Geometry.class.isAssignableFrom(attributeTypes[j]
0983: .getType())) // same as .isgeometry() - see new featuretype javadoc
0984: {
0985: String attName = attributeTypes[j].getName();
0986: Envelope curEnv = getEnvelope(conn, attName,
0987: sqlBuilder, filter);
0988:
0989: if (curEnv == null) {
0990: return null;
0991: }
0992:
0993: retEnv.expandToInclude(curEnv);
0994: }
0995: }
0996:
0997: LOGGER.finer("returning bounds " + retEnv);
0998:
0999: // handle reprojection and crs forcing
1000: CoordinateReferenceSystem base = null;
1001: if (query.getCoordinateSystem() != null)
1002: base = query.getCoordinateSystem();
1003: else if (schemaNew.getDefaultGeometry() != null)
1004: base = schemaNew.getDefaultGeometry()
1005: .getCoordinateSystem();
1006: CoordinateReferenceSystem dest = query
1007: .getCoordinateSystemReproject();
1008:
1009: ReferencedEnvelope result = new ReferencedEnvelope(retEnv,
1010: base);
1011: if (base != null && dest != null)
1012: result = result.transform(dest, true);
1013: return result;
1014: } catch (SQLException sqlException) {
1015: JDBCUtils.close(conn, transaction, sqlException);
1016: conn = null;
1017: throw new DataSourceException("Could not count "
1018: + query.getHandle(), sqlException);
1019: } catch (SQLEncoderException e) {
1020: // could not encode count
1021: // but at least we did not break the connection
1022: return null;
1023: } catch (ParseException parseE) {
1024: String message = "Could not read geometry: "
1025: + parseE.getMessage();
1026:
1027: return null;
1028: } catch (FactoryException e) {
1029: throw new DataSourceException("Could not reproject", e);
1030: } catch (TransformException e) {
1031: throw new DataSourceException("Could not reproject", e);
1032: } finally {
1033: JDBCUtils.close(conn, transaction, null);
1034: }
1035: }
1036:
1037: //REVISIT: do we want maxFeatures here too? If we don't have maxFeatures then the answer
1038: //is still always going to be right (and guaranteed to be right, as opposed to two selects
1039: // that could be slightly different). And the performance hit shouldn't be all that much.
1040: protected Envelope getEnvelope(Connection conn, String geomName,
1041: SQLBuilder sqlBuilder, Filter filter) throws SQLException,
1042: SQLEncoderException, IOException, ParseException {
1043: String typeName = getSchema().getTypeName();
1044:
1045: StringBuffer sql = new StringBuffer();
1046: sql.append("SELECT AsText(force_2d(Envelope(");
1047:
1048: //check if we can apply the estimated_extent optimization
1049: boolean useEstimatedExtent = (filter == null || filter == Filter.INCLUDE)
1050: && ((PostgisDataStore) getDataStore())
1051: .isEstimatedExtent();
1052:
1053: if (useEstimatedExtent) {
1054: sql.append("estimated_extent(");
1055: sql.append("'" + typeName + "','" + geomName + "'))));");
1056: } else {
1057: sql.append("Extent(\"" + geomName + "\")))) ");
1058: sqlBuilder.sqlFrom(sql, typeName);
1059: sqlBuilder.sqlWhere(sql, filter);
1060: }
1061:
1062: LOGGER.fine("SQL: " + sql);
1063:
1064: Statement statement = conn.createStatement();
1065: ResultSet results = statement.executeQuery(sql.toString());
1066: results.next();
1067:
1068: String wkt = results.getString(1);
1069: Envelope retEnv = null;
1070:
1071: if (wkt == null) {
1072: return null;
1073: } else {
1074: retEnv = geometryReader.read(wkt).getEnvelopeInternal();
1075: }
1076:
1077: results.close();
1078: statement.close();
1079:
1080: return retEnv;
1081: }
1082:
1083: }
|