Source Code Cross Referenced for ArcSDEQuery.java in  » GIS » GeoTools-2.4.1 » org » geotools » arcsde » data » Java Source Code / Java DocumentationJava Source Code and Java Documentation

Java Source Code / Java Documentation
1. 6.0 JDK Core
2. 6.0 JDK Modules
3. 6.0 JDK Modules com.sun
4. 6.0 JDK Modules com.sun.java
5. 6.0 JDK Modules sun
6. 6.0 JDK Platform
7. Ajax
8. Apache Harmony Java SE
9. Aspect oriented
10. Authentication Authorization
11. Blogger System
12. Build
13. Byte Code
14. Cache
15. Chart
16. Chat
17. Code Analyzer
18. Collaboration
19. Content Management System
20. Database Client
21. Database DBMS
22. Database JDBC Connection Pool
23. Database ORM
24. Development
25. EJB Server geronimo
26. EJB Server GlassFish
27. EJB Server JBoss 4.2.1
28. EJB Server resin 3.1.5
29. ERP CRM Financial
30. ESB
31. Forum
32. GIS
33. Graphic Library
34. Groupware
35. HTML Parser
36. IDE
37. IDE Eclipse
38. IDE Netbeans
39. Installer
40. Internationalization Localization
41. Inversion of Control
42. Issue Tracking
43. J2EE
44. JBoss
45. JMS
46. JMX
47. Library
48. Mail Clients
49. Net
50. Parser
51. PDF
52. Portal
53. Profiler
54. Project Management
55. Report
56. RSS RDF
57. Rule Engine
58. Science
59. Scripting
60. Search Engine
61. Security
62. Sevlet Container
63. Source Control
64. Swing Library
65. Template Engine
66. Test Coverage
67. Testing
68. UML
69. Web Crawler
70. Web Framework
71. Web Mail
72. Web Server
73. Web Services
74. Web Services apache cxf 2.0.1
75. Web Services AXIS2
76. Wiki Engine
77. Workflow Engines
78. XML
79. XML UI
Java
Java Tutorial
Java Open Source
Jar File Download
Java Articles
Java Products
Java by API
Photoshop Tutorials
Maya Tutorials
Flash Tutorials
3ds-Max Tutorials
Illustrator Tutorials
GIMP Tutorials
C# / C Sharp
C# / CSharp Tutorial
C# / CSharp Open Source
ASP.Net
ASP.NET Tutorial
JavaScript DHTML
JavaScript Tutorial
JavaScript Reference
HTML / CSS
HTML CSS Reference
C / ANSI-C
C Tutorial
C++
C++ Tutorial
Ruby
PHP
Python
Python Tutorial
Python Open Source
SQL Server / T-SQL
SQL Server / T-SQL Tutorial
Oracle PL / SQL
Oracle PL/SQL Tutorial
PostgreSQL
SQL / MySQL
MySQL Tutorial
VB.Net
VB.Net Tutorial
Flash / Flex / ActionScript
VBA / Excel / Access / Word
XML
XML Tutorial
Microsoft Office PowerPoint 2007 Tutorial
Microsoft Office Excel 2007 Tutorial
Microsoft Office Word 2007 Tutorial
Java Source Code / Java Documentation » GIS » GeoTools 2.4.1 » org.geotools.arcsde.data 
Source Cross Referenced  Class Diagram Java Document (Java Doc) 


0001:        /*
0002:         *    Geotools2 - OpenSource mapping toolkit
0003:         *    http://geotools.org
0004:         *    (C) 2002-2006, Geotools Project Managment Committee (PMC)
0005:         *
0006:         *    This library is free software; you can redistribute it and/or
0007:         *    modify it under the terms of the GNU Lesser General Public
0008:         *    License as published by the Free Software Foundation;
0009:         *    version 2.1 of the License.
0010:         *
0011:         *    This library is distributed in the hope that it will be useful,
0012:         *    but WITHOUT ANY WARRANTY; without even the implied warranty of
0013:         *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
0014:         *    Lesser General Public License for more details.
0015:         *
0016:         */
0017:        package org.geotools.arcsde.data;
0018:
0019:        import java.io.ByteArrayOutputStream;
0020:        import java.io.IOException;
0021:        import java.io.PrintStream;
0022:        import java.util.ArrayList;
0023:        import java.util.Arrays;
0024:        import java.util.HashSet;
0025:        import java.util.Iterator;
0026:        import java.util.List;
0027:        import java.util.NoSuchElementException;
0028:        import java.util.Set;
0029:        import java.util.logging.Level;
0030:        import java.util.logging.Logger;
0031:
0032:        import net.sf.jsqlparser.statement.select.PlainSelect;
0033:
0034:        import org.geotools.arcsde.filter.FilterToSQLSDE;
0035:        import org.geotools.arcsde.filter.GeometryEncoderException;
0036:        import org.geotools.arcsde.filter.GeometryEncoderSDE;
0037:        import org.geotools.arcsde.pool.ArcSDEConnectionPool;
0038:        import org.geotools.arcsde.pool.ArcSDEPooledConnection;
0039:        import org.geotools.data.DataSourceException;
0040:        import org.geotools.data.DataUtilities;
0041:        import org.geotools.data.DefaultQuery;
0042:        import org.geotools.data.Query;
0043:        import org.geotools.data.jdbc.FilterToSQLException;
0044:        import org.geotools.feature.AttributeType;
0045:        import org.geotools.feature.FeatureType;
0046:        import org.geotools.feature.SchemaException;
0047:        import org.geotools.filter.visitor.DefaultFilterVisitor;
0048:        import org.geotools.filter.visitor.PostPreProcessFilterSplittingVisitor;
0049:        import org.opengis.filter.Filter;
0050:        import org.opengis.filter.expression.PropertyName;
0051:
0052:        import com.esri.sde.sdk.client.SeException;
0053:        import com.esri.sde.sdk.client.SeExtent;
0054:        import com.esri.sde.sdk.client.SeFilter;
0055:        import com.esri.sde.sdk.client.SeLayer;
0056:        import com.esri.sde.sdk.client.SeQuery;
0057:        import com.esri.sde.sdk.client.SeQueryInfo;
0058:        import com.esri.sde.sdk.client.SeRow;
0059:        import com.esri.sde.sdk.client.SeSqlConstruct;
0060:        import com.esri.sde.sdk.client.SeTable;
0061:        import com.vividsolutions.jts.geom.Envelope;
0062:
0063:        /**
0064:         * Wrapper class for SeQuery to hold a SeConnection until close() is called and
0065:         * provide utility methods.
0066:         *
0067:         * @author Gabriel Roldan, Axios Engineering
0068:         * @source $URL: http://svn.geotools.org/geotools/tags/2.4.1/modules/unsupported/arcsde/datastore/src/main/java/org/geotools/arcsde/data/ArcSDEQuery.java $
0069:         * @version $Id: ArcSDEQuery.java 27863 2007-11-12 20:34:34Z desruisseaux $
0070:         */
0071:        class ArcSDEQuery {
0072:            /** Shared package's logger */
0073:            private static final Logger LOGGER = org.geotools.util.logging.Logging
0074:                    .getLogger(ArcSDEQuery.class.getName());
0075:
0076:            /**
0077:             * The connection to the ArcSDE server obtained when first created the
0078:             * SeQuery in <code>getSeQuery</code>. It is retained until
0079:             * <code>close()</code> is called. Do not use it directly, but through
0080:             * <code>getConnection()</code>.
0081:             * <p>
0082:             * NOTE: this member is package visible only for unit test pourposes
0083:             * </p>
0084:             */
0085:            ArcSDEPooledConnection connection = null;
0086:
0087:            /**
0088:             * The exact feature type this query is about to request from the arcsde
0089:             * server. It could have less attributes than the ones of the actual table
0090:             * schema, in which case only those attributes will be requested.
0091:             */
0092:            private FeatureType schema;
0093:
0094:            /**
0095:             * The query built using the constraints given by the geotools Query. It
0096:             * must not be accessed directly, but through <code>getSeQuery()</code>,
0097:             * since it is lazyly created
0098:             */
0099:            private SeQuery query;
0100:
0101:            /**
0102:             * Holds the geotools Filter that originated this query from which can
0103:             * parse the sql where clause and the set of spatial filters for the
0104:             * ArcSDE Java API
0105:             */
0106:            private ArcSDEQuery.FilterSet filters;
0107:
0108:            /** The lazyly calculated result count */
0109:            private int resultCount = -1;
0110:
0111:            /** DOCUMENT ME!  */
0112:            private FIDReader fidReader;
0113:
0114:            private Object[] previousRowValues;
0115:
0116:            /**
0117:             * Creates a new SDEQuery object.
0118:             *
0119:             * @param connection the connection attached to the life cycle of this query
0120:             * @param schema the schema with all the attributes as expected.
0121:             * @param filterSet DOCUMENT ME!
0122:             *
0123:             * @throws DataSourceException DOCUMENT ME!
0124:             *
0125:             * @see prepareQuery
0126:             */
0127:            private ArcSDEQuery(ArcSDEPooledConnection connection,
0128:                    FeatureType schema, FilterSet filterSet, FIDReader fidReader)
0129:                    throws DataSourceException {
0130:                this .connection = connection;
0131:                this .schema = schema;
0132:                this .filters = filterSet;
0133:                this .fidReader = fidReader;
0134:            }
0135:
0136:            /**
0137:             * DOCUMENT ME!
0138:             *
0139:             * @param store DOCUMENT ME!
0140:             * @param query DOCUMENT ME!
0141:             *
0142:             * @return DOCUMENT ME!
0143:             *
0144:             * @throws IOException DOCUMENT ME!
0145:             */
0146:            public static ArcSDEQuery createQuery(ArcSDEDataStore store,
0147:                    Query query) throws IOException {
0148:                String typeName = query.getTypeName();
0149:                FeatureType schema = store.getSchema(typeName);
0150:                return createQuery(store, schema, query);
0151:            }
0152:
0153:            /**
0154:             * DOCUMENT ME!
0155:             *
0156:             * @param store DOCUMENT ME!
0157:             * @param schema DOCUMENT ME!
0158:             * @param query DOCUMENT ME!
0159:             *
0160:             * @return the newly created ArcSDEQuery or null if <code>Filter.EXCLUDE ==
0161:             *         query.getFilter()</code>.
0162:             *
0163:             * @throws IOException see <i>throws DataSourceException</i> bellow.
0164:             * @throws NullPointerException if some of the arguments is null.
0165:             * @throws DataSourceException DOCUMENT ME!
0166:             */
0167:            public static ArcSDEQuery createQuery(ArcSDEDataStore store,
0168:                    FeatureType schema, Query query) throws IOException {
0169:                if ((store == null) || (schema == null) || (query == null)) {
0170:                    throw new NullPointerException("store=" + store
0171:                            + ", schema=" + schema + ", query=" + query);
0172:                }
0173:
0174:                Filter filter = query.getFilter();
0175:
0176:                if (filter == Filter.EXCLUDE) {
0177:                    return null;
0178:                }
0179:
0180:                LOGGER.fine("Creating new ArcSDEQuery");
0181:
0182:                final ArcSDEConnectionPool pool = store.getConnectionPool();
0183:
0184:                final ArcSDEQuery sdeQuery;
0185:                final String typeName = schema.getTypeName();
0186:                ArcSDEPooledConnection conn = null;
0187:                final boolean isInprocessView = store.isView(typeName);
0188:                final FIDReader fidReader;
0189:                final SeLayer sdeLayer;
0190:                final SeQueryInfo definitionQuery;
0191:
0192:                try {
0193:                    if (isInprocessView) {
0194:                        fidReader = FIDReader.NULL_READER;
0195:                        definitionQuery = store.getViewQueryInfo(typeName);
0196:                        //the first table has to be the main layer
0197:                        String layerName;
0198:                        try {
0199:                            layerName = definitionQuery.getConstruct()
0200:                                    .getTables()[0];
0201:                            //@REVISIT: HACK HERE!, look how to get rid of alias in query info, or
0202:                            //better stop using queryinfo as definition query and use the PlainSelect,
0203:                            //then construct the query info dynamically when needed?
0204:                            if (layerName.indexOf(" AS") > 0) {
0205:                                layerName = layerName.substring(0, layerName
0206:                                        .indexOf(" AS"));
0207:                            }
0208:                        } catch (SeException e) {
0209:                            throw new DataSourceException("shouldn't happen: "
0210:                                    + e.getMessage(), e);
0211:                        }
0212:                        conn = pool.getConnection();
0213:                        sdeLayer = pool.getSdeLayer(conn, layerName);
0214:                    } else {
0215:                        definitionQuery = null;
0216:                        //sdeLayer = pool.getSdeLayer(conn, typeName);
0217:                        sdeLayer = pool.getSdeLayer(typeName);
0218:                        conn = pool.getConnection();
0219:                        fidReader = FIDReader.getFidReader(conn, sdeLayer);
0220:                    }
0221:
0222:                    //guess which properties needs actually be retrieved.
0223:                    List queryColumns = getQueryColumns(query, schema);
0224:
0225:                    /*Simple*/FeatureType querySchema = null;
0226:
0227:                    //TODO: create attributes with namespace when switching to GeoAPI FM
0228:                    //        String ns = store.getNamespace() == null? null : store.getNamespace().toString();
0229:                    //        AttributeName[] attNames = new AttributeName[queryColumns.size()];
0230:                    //
0231:                    //        for (int i = 0; i < queryColumns.size(); i++) {
0232:                    //            String colName = (String) queryColumns.get(i);
0233:                    //            attNames[i] = new org.geotools.util.AttributeName(ns, colName);
0234:                    //        }
0235:
0236:                    String[] attNames = (String[]) queryColumns
0237:                            .toArray(new String[queryColumns.size()]);
0238:
0239:                    try {
0240:                        //create the resulting feature type for the real attributes to retrieve
0241:                        querySchema = DataUtilities.createSubType(schema,
0242:                                attNames);
0243:                    } catch (SchemaException ex) {
0244:                        throw new DataSourceException(
0245:                                "Some requested attributes do not match the table schema: "
0246:                                        + ex.getMessage(), ex);
0247:                    }
0248:
0249:                    //create the set of filters to work over
0250:                    ArcSDEQuery.FilterSet filterSet = createFilters(sdeLayer,
0251:                            querySchema, filter, definitionQuery, store
0252:                                    .getViewSelectStatement(typeName),
0253:                            fidReader);
0254:
0255:                    sdeQuery = new ArcSDEQuery(conn, querySchema, filterSet,
0256:                            fidReader);
0257:                } catch (Throwable t) {
0258:                    //something went wrong while creating this connection.  Be sure and close out the
0259:                    //ArcSDE connection that we opened up.
0260:                    if (conn != null)
0261:                        conn.close();
0262:                    if (t instanceof  IOException) {
0263:                        throw (IOException) t;
0264:                    } else {
0265:                        throw new DataSourceException(
0266:                                "Error while creating ArcSDE Connection"
0267:                                        + t.getMessage(), t);
0268:                    }
0269:                }
0270:                return sdeQuery;
0271:            }
0272:
0273:            /**
0274:             * Returns the FID strategy used
0275:             *
0276:             * @return DOCUMENT ME!
0277:             */
0278:            public FIDReader getFidReader() {
0279:                return this .fidReader;
0280:            }
0281:
0282:            /**
0283:             * DOCUMENT ME!
0284:             *
0285:             * @param queryColumns DOCUMENT ME!
0286:             * @param schema DOCUMENT ME!
0287:             *
0288:             * @return DOCUMENT ME!
0289:             *
0290:             * @throws DataSourceException DOCUMENT ME!
0291:             */
0292:            private static List /*<String>*/getQueryColumns(Query query,
0293:                    final FeatureType schema) throws DataSourceException {
0294:                final HashSet columnNames;
0295:
0296:                String[] queryColumns = query.getPropertyNames();
0297:
0298:                if ((queryColumns == null) || (queryColumns.length == 0)) {
0299:                    List attNames = Arrays.asList(schema.getAttributeTypes());
0300:
0301:                    columnNames = new HashSet(attNames.size());
0302:
0303:                    for (Iterator it = attNames.iterator(); it.hasNext();) {
0304:                        AttributeType att = (AttributeType) it.next();
0305:                        String attName = att.getLocalName();
0306:                        //de namespace-ify the names
0307:                        if (attName.indexOf(":") != -1) {
0308:                            attName = attName
0309:                                    .substring(attName.indexOf(":") + 1);
0310:                        }
0311:                        columnNames.add(attName);
0312:                    }
0313:                } else {
0314:                    columnNames = new HashSet();
0315:                    columnNames.addAll(Arrays.asList(queryColumns));
0316:                }
0317:
0318:                Filter f = query.getFilter();
0319:                if (f != null) {
0320:                    Set s = new HashSet();
0321:                    f.accept(new DefaultFilterVisitor() {
0322:                        public Object visit(PropertyName expr, Object data) {
0323:                            String attName = expr.getPropertyName();
0324:                            if (attName.indexOf(":") != -1) {
0325:                                attName = attName.substring(attName
0326:                                        .indexOf(":") + 1);
0327:                            }
0328:                            columnNames.add(attName);
0329:                            return data;
0330:                        }
0331:                    }, s);
0332:                }
0333:
0334:                List ret = new ArrayList();
0335:                ret.addAll(columnNames);
0336:                return ret;
0337:            }
0338:
0339:            /**
0340:             * DOCUMENT ME!
0341:             *
0342:             * @param store DOCUMENT ME!
0343:             * @param typeName DOCUMENT ME!
0344:             * @param filter DOCUMENT ME!
0345:             *
0346:             * @return DOCUMENT ME!
0347:             *
0348:             * @throws NoSuchElementException DOCUMENT ME!
0349:             * @throws IOException DOCUMENT ME!
0350:             */
0351:            public static ArcSDEQuery.FilterSet createFilters(SeLayer layer,
0352:                    FeatureType schema, Filter filter, SeQueryInfo qInfo,
0353:                    PlainSelect viewSelect, FIDReader fidReader)
0354:                    throws NoSuchElementException, IOException {
0355:
0356:                ArcSDEQuery.FilterSet filters = new ArcSDEQuery.FilterSet(
0357:                        layer, filter, schema, qInfo, viewSelect, fidReader);
0358:
0359:                return filters;
0360:            }
0361:
0362:            /**
0363:             * Returns the stream used to fetch rows, creating it if it was not yet
0364:             * created.
0365:             *
0366:             *
0367:             * @throws SeException
0368:             * @throws IOException
0369:             */
0370:            private SeQuery getSeQuery() throws SeException, IOException {
0371:                if (this .query == null) {
0372:                    try {
0373:                        String[] propsToQuery = fidReader
0374:                                .getPropertiesToFetch(this .schema);
0375:                        this .query = createSeQueryForFetch(connection,
0376:                                propsToQuery);
0377:                    } catch (DataSourceException e) {
0378:                        throw e;
0379:                    } catch (IOException e) {
0380:                        throw e;
0381:                    } catch (SeException e) {
0382:                        e.printStackTrace();
0383:                        throw e;
0384:                    }
0385:                }
0386:                return this .query;
0387:            }
0388:
0389:            /**
0390:             * creates an SeQuery with the filters provided to the constructor and
0391:             * returns it.  Queries created with this method can be used to execute and
0392:             * fetch results.  They cannot be used for other operations, such as
0393:             * calculating layer extents, or result count.
0394:             * <p> 
0395:             * Difference with {@link #createSeQueryForFetch(ArcSDEPooledConnection, String[])}
0396:             * is tha this function tells <code>SeQuery.setSpatialConstraints</code> to 
0397:             * NOT return geometry based bitmasks, which are needed for calculating the
0398:             * query extent and result count, but not for fetching SeRows.
0399:             * </p>
0400:             *
0401:             * @param connection DOCUMENT ME!
0402:             * @param propertyNames names of attributes to build the query for,
0403:             *        respecting order
0404:             *
0405:             * @return DOCUMENT ME!
0406:             *
0407:             * @throws SeException if the ArcSDE Java API throws it while creating the
0408:             *         SeQuery or setting it the spatial constraints.
0409:             * @throws DataSourceException DOCUMENT ME!
0410:             */
0411:            private SeQuery createSeQueryForFetch(
0412:                    ArcSDEPooledConnection connection, String[] propertyNames)
0413:                    throws SeException, DataSourceException {
0414:                if (LOGGER.isLoggable(Level.FINE)) {
0415:                    LOGGER
0416:                            .fine("constructing new sql query with connection: "
0417:                                    + connection
0418:                                    + ", propnames: "
0419:                                    + java.util.Arrays.asList(propertyNames)
0420:                                    + " sqlConstruct where clause: '"
0421:                                    + this .filters.getSeSqlConstruct()
0422:                                            .getWhere() + "'");
0423:                }
0424:
0425:                SeQuery query = new SeQuery(connection);
0426:
0427:                SeQueryInfo qInfo = filters.getQueryInfo(propertyNames);
0428:                if (LOGGER.isLoggable(Level.FINER)) {
0429:                    String msg = "ArcSDE query is: " + toString(qInfo);
0430:                    LOGGER.finer(msg);
0431:                }
0432:                try {
0433:                    query.prepareQueryInfo(qInfo);
0434:                } catch (SeException e) {
0435:                    // HACK: a DATABASE LEVEL ERROR (code -51) occurs when using
0436:                    // prepareQueryInfo but the geometry att is not required in the list
0437:                    // of properties to retrieve, and thus propertyNames contains
0438:                    // SHAPE.fid as a last resort to get a fid
0439:                    if (-51 == e.getSeError().getSdeError()) {
0440:                        query.close();
0441:                        query = new SeQuery(connection, propertyNames, filters
0442:                                .getSeSqlConstruct());
0443:                        query.prepareQuery();
0444:                    } else {
0445:                        throw e;
0446:                    }
0447:                }
0448:
0449:                SeFilter[] spatialConstraints = this .filters
0450:                        .getSpatialFilters();
0451:                if (spatialConstraints.length > 0) {
0452:                    final boolean setReturnGeometryMasks = false;
0453:                    query.setSpatialConstraints(SeQuery.SE_OPTIMIZE,
0454:                            setReturnGeometryMasks, spatialConstraints);
0455:                }
0456:
0457:                return query;
0458:            }
0459:
0460:            private String toString(SeQueryInfo qInfo) {
0461:                StringBuffer sb = new StringBuffer("SeQueryInfo[\n\tcolumns=");
0462:                try {
0463:                    SeSqlConstruct sql = qInfo.getConstruct();
0464:                    String[] tables = sql.getTables();
0465:                    String[] cols = qInfo.getColumns();
0466:                    String by = null;
0467:                    try {
0468:                        by = qInfo.getByClause();
0469:                    } catch (NullPointerException npe) {
0470:                        //no-op
0471:                    }
0472:                    String where = sql.getWhere();
0473:                    for (int i = 0; cols != null && i < cols.length; i++) {
0474:                        sb.append(cols[i]);
0475:                        if (i < cols.length - 1)
0476:                            sb.append(", ");
0477:                    }
0478:                    sb.append("\n\tTables=");
0479:                    for (int i = 0; i < tables.length; i++) {
0480:                        sb.append(tables[i]);
0481:                        if (i < tables.length - 1)
0482:                            sb.append(", ");
0483:                    }
0484:                    sb.append("\n\tWhere=");
0485:                    sb.append(where);
0486:                    sb.append("\n\tOrderBy=");
0487:                    sb.append(by);
0488:                } catch (SeException e) {
0489:                    sb.append("Exception retrieving query info properties: "
0490:                            + e.getMessage());
0491:                }
0492:                sb.append("]");
0493:                return sb.toString();
0494:            }
0495:
0496:            /**
0497:             * creates an SeQuery with the filters provided to the constructor and
0498:             * returns it.  Queries created with this method are to be used for
0499:             * calculating layer extents and result counts.  These queries cannot
0500:             * be executed or used to fetch results.
0501:             * <p> 
0502:             * Difference with {@link #createSeQueryForFetch(ArcSDEPooledConnection, String[])}
0503:             * is tha this function tells <code>SeQuery.setSpatialConstraints</code> to 
0504:             * return geometry based bitmasks, which are needed for calculating the
0505:             * query extent and result count, but not for fetching SeRows.
0506:             * </p>
0507:             * 
0508:             *
0509:             * @param connection DOCUMENT ME!
0510:             * @param propertyNames names of attributes to build the query for,
0511:             *        respecting order
0512:             *        
0513:             * @return DOCUMENT ME!
0514:             *
0515:             * @throws SeException if the ArcSDE Java API throws it while creating the
0516:             *         SeQuery or setting it the spatial constraints.
0517:             * @throws DataSourceException DOCUMENT ME!
0518:             */
0519:            private SeQuery createSeQueryForQueryInfo(
0520:                    ArcSDEPooledConnection connection) throws SeException,
0521:                    DataSourceException {
0522:
0523:                SeQuery query = new SeQuery(connection);
0524:
0525:                SeFilter[] spatialConstraints = this .filters
0526:                        .getSpatialFilters();
0527:
0528:                if (spatialConstraints.length > 0) {
0529:                    final boolean setReturnGeometryMasks = true;
0530:                    query.setSpatialConstraints(SeQuery.SE_OPTIMIZE,
0531:                            setReturnGeometryMasks, spatialConstraints);
0532:                }
0533:
0534:                return query;
0535:            }
0536:
0537:            /**
0538:             * Returns the schema of the originating Query
0539:             *
0540:             * @return the schema of the originating Query
0541:             */
0542:            public FeatureType getSchema() {
0543:                return this .schema;
0544:            }
0545:
0546:            /**
0547:             * DOCUMENT ME!
0548:             *
0549:             * @return DOCUMENT ME!
0550:             */
0551:            public ArcSDEQuery.FilterSet getFilters() {
0552:                return this .filters;
0553:            }
0554:
0555:            /**
0556:             * Convenient method to just calculate the result count of a given query.
0557:             *
0558:             * @param ds
0559:             * @param query
0560:             *
0561:             *
0562:             * @throws IOException
0563:             */
0564:            public static int calculateResultCount(ArcSDEDataStore ds,
0565:                    Query query) throws IOException {
0566:                ArcSDEQuery countQuery = createQuery(ds, query);
0567:                int count;
0568:                try {
0569:                    count = countQuery.calculateResultCount();
0570:                } finally {
0571:                    countQuery.close();
0572:                }
0573:                return count;
0574:            }
0575:
0576:            /**
0577:             * Convenient method to just calculate the resulting bound box of a given
0578:             * query.
0579:             *
0580:             * @param ds DOCUMENT ME!
0581:             * @param query DOCUMENT ME!
0582:             *
0583:             * @return DOCUMENT ME!
0584:             *
0585:             * @throws IOException DOCUMENT ME!
0586:             */
0587:            public static Envelope calculateQueryExtent(ArcSDEDataStore ds,
0588:                    Query query) throws IOException {
0589:                FeatureType queryFt = ds.getSchema(query.getTypeName());
0590:                HashSet pnames = new HashSet();
0591:
0592:                if (query.getPropertyNames() == null) {
0593:                    //fetch the first attribute?
0594:                    pnames.add(queryFt.getAttributeTypes()[0].getLocalName());
0595:                } else {
0596:                    // add the individual property names, if they're actually spelled out specifically
0597:                    pnames.addAll(Arrays.asList(query.getPropertyNames()));
0598:                }
0599:                if (!pnames.contains(queryFt.getDefaultGeometry()
0600:                        .getLocalName())) {
0601:                    //we're calculating the bounds, so we'd better be sure and add the spatial
0602:                    //column to the query's propertynames
0603:                    pnames.add(queryFt.getDefaultGeometry().getLocalName());
0604:                    DefaultQuery realQuery = new DefaultQuery(query);
0605:                    realQuery.setPropertyNames(Arrays.asList(pnames
0606:                            .toArray(new String[pnames.size()])));
0607:                    query = realQuery;
0608:                }
0609:
0610:                ArcSDEQuery boundsQuery = createQuery(ds, query);
0611:                if (boundsQuery.getFilters().getUnsupportedFilter() != Filter.INCLUDE) {
0612:                    //there's a non-sde-db compatible filter in-play here.  We can't really
0613:                    //do an optimized calcQueryExtent in this case.  Have to return null.
0614:                    boundsQuery.close();
0615:                    return null;
0616:                }
0617:                Envelope queryExtent;
0618:                try {
0619:                    queryExtent = boundsQuery.calculateQueryExtent();
0620:                } finally {
0621:                    boundsQuery.close();
0622:                }
0623:                return queryExtent;
0624:            }
0625:
0626:            /**
0627:             * if the query has been parsed as just a where clause filter, or has no
0628:             * filter at all, the result count calculation is optimized by selecting a
0629:             * <code>count()</code> single row. If the filter involves any kind of
0630:             * spatial filter, such as BBOX, the calculation can't be optimized by
0631:             * this way, because the ArcSDE Java API throws a <code>"DATABASE LEVEL
0632:             * ERROR OCURRED"</code> exception. So, in this case, a query over the
0633:             * shape field is made and the result is traversed counting the number of
0634:             * rows inside a while loop
0635:             *
0636:             * @return DOCUMENT ME!
0637:             *
0638:             * @throws IOException DOCUMENT ME!
0639:             * @throws DataSourceException DOCUMENT ME!
0640:             */
0641:            public int calculateResultCount() throws IOException {
0642:                LOGGER.fine("about to calculate result count");
0643:
0644:                if (this .resultCount == -1) {
0645:                    String aFieldName = "*";
0646:                    String[] columns = { aFieldName };
0647:
0648:                    SeQuery countQuery = null;
0649:
0650:                    if (filters.getUnsupportedFilter() == Filter.INCLUDE) {
0651:                        //there's nothing to filter post-db, so we're clear to do the result count
0652:                        //by sending a query to the db and completely trusting the result.
0653:
0654:                        try {
0655:                            countQuery = createSeQueryForQueryInfo(connection);
0656:                            SeQueryInfo qInfo = filters.getQueryInfo(columns);
0657:
0658:                            SeTable.SeTableStats tableStats = countQuery
0659:                                    .calculateTableStatistics(
0660:                                            aFieldName,
0661:                                            SeTable.SeTableStats.SE_COUNT_STATS,
0662:                                            qInfo, 0);
0663:
0664:                            this .resultCount = tableStats.getCount();
0665:                        } catch (SeException e) {
0666:                            if (LOGGER.isLoggable(Level.FINE)) {
0667:                                LOGGER
0668:                                        .fine("Error calculating result cout with SQL where clause: "
0669:                                                + this .filters
0670:                                                        .getSeSqlConstruct()
0671:                                                        .getWhere());
0672:                            }
0673:                            //why throw an exception here?  Just return -1 and let the caller deal with it...
0674:                            //throw new DataSourceException("Calculating result count: " + e.getSeError().getErrDesc(), e);
0675:                        } finally {
0676:                            close(countQuery);
0677:                        }
0678:
0679:                    } else {
0680:                        //well, we've got to filter the results after the query, so
0681:                        //let's not do that twice.  -1 is the best anyone will get
0682:                        //on this one...
0683:                        LOGGER
0684:                                .fine("Non-supported ArcSDE filters included in this query.  Can't pre-calculate result count.");
0685:                    }
0686:                }
0687:
0688:                return this .resultCount;
0689:            }
0690:
0691:            /**
0692:             * Returns the envelope for all features within the layer that pass any SQL
0693:             * construct, state, or spatial constraints for the stream.
0694:             *
0695:             * @return DOCUMENT ME!
0696:             *
0697:             * @throws IOException DOCUMENT ME!
0698:             * @throws DataSourceException DOCUMENT ME!
0699:             */
0700:            public Envelope calculateQueryExtent() throws IOException {
0701:                Envelope envelope = null;
0702:                SeQuery extentQuery = null;
0703:
0704:                LOGGER
0705:                        .fine("Building a new SeQuery to consult it's resulting envelope");
0706:
0707:                try {
0708:                    SeExtent extent = null;
0709:
0710:                    String[] spatialCol = { schema.getDefaultGeometry()
0711:                            .getLocalName() };
0712:
0713:                    extentQuery = createSeQueryForQueryInfo(connection);
0714:
0715:                    SeQueryInfo sdeQueryInfo = filters.getQueryInfo(spatialCol);
0716:
0717:                    extent = extentQuery.calculateLayerExtent(sdeQueryInfo);
0718:
0719:                    envelope = new Envelope(extent.getMinX(), extent.getMaxX(),
0720:                            extent.getMinY(), extent.getMaxY());
0721:                    LOGGER.fine("got extent: " + extent + ", built envelope: "
0722:                            + envelope);
0723:                } catch (SeException ex) {
0724:                    SeSqlConstruct sqlCons = this .filters.getSeSqlConstruct();
0725:                    String sql = (sqlCons == null) ? null : sqlCons.getWhere();
0726:                    if (ex.getSeError().getSdeError() == -288) {
0727:                        //gah, the dreaded 'LOGFILE SYSTEM TABLES DO NOT EXIST' error.
0728:                        //this error is worthless.  Make it quiet, at least.
0729:                        LOGGER
0730:                                .severe("ArcSDE is complaining that your 'LOGFILE SYSTEM TABLES DO NOT EXIST'.  This is an ignorable error.");
0731:                    } else {
0732:                        LOGGER.log(Level.SEVERE, "***********************\n"
0733:                                + ex.getSeError().getErrDesc() + "\nfilter: "
0734:                                + this .filters.getGeometryFilter() + "\nSQL: "
0735:                                + sql, ex);
0736:                    }
0737:                } finally {
0738:                    close(extentQuery);
0739:                }
0740:
0741:                return envelope;
0742:            }
0743:
0744:            /**
0745:             * Silently closes this query.
0746:             *
0747:             * @param query
0748:             */
0749:            private void close(SeQuery query) {
0750:                if (query == null) {
0751:                    return;
0752:                }
0753:
0754:                try {
0755:                    query.close();
0756:                } catch (SeException e) {
0757:                    LOGGER.warning("Closing query: "
0758:                            + e.getSeError().getErrDesc());
0759:                }
0760:            }
0761:
0762:            // //////////////////////////////////////////////////////////////////////
0763:            // //////////// RELEVANT METHODS WRAPPED FROM SeStreamOp ////////////////
0764:            // //////////////////////////////////////////////////////////////////////
0765:
0766:            /**
0767:             * Closes the query and releases the holded connection back to the
0768:             * connection pool. If reset is TRUE, the query status is set to INACTIVE;
0769:             * also releases the SeConnection back to the SeConnectionPool
0770:             */
0771:            public void close() {
0772:                close(this .query);
0773:                this .query = null;
0774:                if (connection != null) {
0775:                    connection.close();
0776:                    connection = null;
0777:                }
0778:            }
0779:
0780:            /**
0781:             * Tells the server to execute a stream operation.
0782:             *
0783:             * @throws IOException DOCUMENT ME!
0784:             * @throws DataSourceException DOCUMENT ME!
0785:             */
0786:            public void execute() throws IOException {
0787:                try {
0788:                    getSeQuery().execute();
0789:                } catch (SeException e) {
0790:                    throw new DataSourceException(e.getSeError().getErrDesc(),
0791:                            e);
0792:                }
0793:            }
0794:
0795:            /**
0796:             * Flushes any outstanding insert/update buffers.
0797:             *
0798:             * @throws IOException DOCUMENT ME!
0799:             * @throws DataSourceException DOCUMENT ME!
0800:             */
0801:            public void flushBufferedWrites() throws IOException {
0802:                try {
0803:                    getSeQuery().flushBufferedWrites();
0804:                } catch (SeException e) {
0805:                    throw new DataSourceException(e.getSeError().getErrDesc(),
0806:                            e);
0807:                }
0808:            }
0809:
0810:            /**
0811:             * Cancels the current operation on the stream. If <code>reset</code> is
0812:             * TRUE, the query status is set to INACTIVE. If reset is FALSE the query
0813:             * status is set to CLOSED.
0814:             *
0815:             * @param reset if true the Query is closed, else it is resetted to be
0816:             *        reused
0817:             *
0818:             * @throws IOException DOCUMENT ME!
0819:             * @throws DataSourceException DOCUMENT ME!
0820:             */
0821:            public void cancel(boolean reset) throws IOException {
0822:                try {
0823:                    getSeQuery().cancel(reset);
0824:                } catch (SeException e) {
0825:                    throw new DataSourceException(e.getSeError().getErrDesc(),
0826:                            e);
0827:                }
0828:            }
0829:
0830:            /**
0831:             * Sets state constraints for input and output stream operations. If a
0832:             * differenct type is specified, then only features different in the way
0833:             * supplied are returned.
0834:             * 
0835:             * <p>
0836:             * differencesType:
0837:             * 
0838:             * <ul>
0839:             * <li>
0840:             * SeState.SE_STATE_DIFF_NOCHECK Returns all features in the source state.
0841:             * It doesn't check the differences between source state and differences
0842:             * state.
0843:             * </li>
0844:             * <li>
0845:             * SeState.SE_STATE_DIFF_NOCHANGE_UPDATE Returns all features that haven't
0846:             * changed in the source state, but have been updated in the differences
0847:             * state.
0848:             * </li>
0849:             * <li>
0850:             * SeState.SE_STATE_DIFF_NOCHANGE_DELETE Returns all features that haven't
0851:             * changed in the source state, but have been deleted in the differences
0852:             * state.
0853:             * </li>
0854:             * <li>
0855:             * SeState.SE_STATE_DIFF_UPDATE_NOCHANGE Returns all features that have
0856:             * been updated in the source state, but unchanged in the differences
0857:             * state.
0858:             * </li>
0859:             * <li>
0860:             * SeState.SE_STATE_DIFF_UPDATE_UPDATE Returns all features that have been
0861:             * updated in both the source and difference states.
0862:             * </li>
0863:             * <li>
0864:             * SeState.SE_STATE_DIFF_UPDATE_DELETE Returns all features that have been
0865:             * updated in the source state but deleted in the difference states.
0866:             * </li>
0867:             * <li>
0868:             * SeState.SE_STATE_DIFF_INSERT Returns all features that were inserted
0869:             * into the source state and that never existed in the differences state.
0870:             * </li>
0871:             * </ul>
0872:             * </p>
0873:             *
0874:             * @param lockActions The id of the state to direct input into and take
0875:             *        output from
0876:             *
0877:             * @throws IOException DOCUMENT ME!
0878:             * @throws DataSourceException DOCUMENT ME!
0879:             */
0880:
0881:            /*
0882:             * public void setState(SeObjectId sourceId, SeObjectId differencesId, int
0883:             * differencesType) throws SeException { getSeQuery().setState(sourceId,
0884:             * differencesId, differencesType); }
0885:             */
0886:
0887:            /**
0888:             * Sets the row locking environment for a stream.
0889:             * 
0890:             * <p>
0891:             * The row locking environment remains in effect until the stream is closed
0892:             * with reset TRUE or the stream is freed. The row lock types are:
0893:             * 
0894:             * <ul>
0895:             * <li>
0896:             * SE_ROWLOCKING_LOCK_ON_QUERY - Rows selected by a query are locked.
0897:             * </li>
0898:             * <li>
0899:             * SE_ROWLOCKING_LOCK_ON_INSERT - New rows are locked when inserted.
0900:             * </li>
0901:             * <li>
0902:             * SE_ROWLOCKING_LOCK_ON_UPDATE - Updated rows are locked.
0903:             * </li>
0904:             * <li>
0905:             * SE_ROWLOCKING_UNLOCK_ON_QUERY - Locks are removed upon query.
0906:             * </li>
0907:             * <li>
0908:             * SE_ROWLOCKING_UNLOCK_ON_UPDATE - Modified rows are unlocked.
0909:             * </li>
0910:             * <li>
0911:             * SE_ROWLOCKING_FILTER_MY_LOCKS - Only rows locked by the user are
0912:             * returned on query.
0913:             * </li>
0914:             * <li>
0915:             * SE_ROWLOCKING_FILTER_OTHER_LOCKS - Only rows locked by other users are
0916:             * returned on query.
0917:             * </li>
0918:             * <li>
0919:             * SE_ROWLOCKING_FILTER_UNLOCKED - Only unlocked rows are returned.
0920:             * </li>
0921:             * <li>
0922:             * SE_ROWLOCKING_LOCK_ONLY - Query operations lock but don't return rows.
0923:             * </li>
0924:             * </ul>
0925:             * </p>
0926:             *
0927:             * @param lockActions DOCUMENT ME!
0928:             *
0929:             * @throws IOException DOCUMENT ME!
0930:             * @throws DataSourceException DOCUMENT ME!
0931:             */
0932:            public void setRowLocking(int lockActions) throws IOException {
0933:                try {
0934:                    getSeQuery().setRowLocking(lockActions);
0935:                } catch (SeException e) {
0936:                    throw new DataSourceException(e.getSeError().getErrDesc(),
0937:                            e);
0938:                }
0939:            }
0940:
0941:            // //////////////////////////////////////////////////////////////////////
0942:            // /////////////// METHODS WRAPPED FROM SeQuery /////////////////////
0943:            // //////////////////////////////////////////////////////////////////////
0944:
0945:            /**
0946:             * Initializes a stream with a query using a selected set of columns and an
0947:             * SeSqlConstruct object for the where clause. The where clause can?t
0948:             * contain any ORDER BY or GROUP BY clauses.
0949:             *
0950:             * @throws IOException DOCUMENT ME!
0951:             * @throws DataSourceException DOCUMENT ME!
0952:             */
0953:            public void prepareQuery() throws IOException {
0954:                try {
0955:                    getSeQuery().prepareQuery();
0956:                } catch (SeException e) {
0957:                    throw new DataSourceException(e.getSeError().getErrDesc(),
0958:                            e);
0959:                }
0960:            }
0961:
0962:            /**
0963:             * Fetches an SeRow of data.
0964:             * 
0965:             * @return DOCUMENT ME!
0966:             * 
0967:             * @throws IOException
0968:             *             (DataSourceException) if the fetching fails
0969:             * @throws IllegalStateException
0970:             *             if the query was already closed or {@link #execute()} hastn't
0971:             *             been called yet
0972:             */
0973:            public SdeRow fetch() throws IOException, IllegalStateException {
0974:                if (this .query == null) {
0975:                    throw new IllegalStateException(
0976:                            "query closed or not yet executed");
0977:                }
0978:
0979:                try {
0980:                    SeQuery seQuery = getSeQuery();
0981:                    SeRow row = seQuery.fetch();
0982:                    SdeRow currentRow = (row == null) ? null : new SdeRow(row,
0983:                            previousRowValues);
0984:                    previousRowValues = currentRow == null ? null : currentRow
0985:                            .getAll();
0986:                    return currentRow;
0987:                } catch (SeException e) {
0988:                    close();
0989:                    throw new DataSourceException(e.getSeError().getErrDesc(),
0990:                            e);
0991:                } catch (Exception e) {
0992:                    close();
0993:                    LOGGER.log(Level.SEVERE, "fetching row: " + e.getMessage(),
0994:                            e);
0995:                    throw new DataSourceException("fetching row: "
0996:                            + e.getMessage(), e);
0997:                }
0998:            }
0999:
1000:            /**
1001:             * Sets the spatial filters on the query using SE_OPTIMIZE as the policy
1002:             * for spatial index search
1003:             *
1004:             * @param filters a set of spatial constraints to filter upon
1005:             *
1006:             * @throws IOException DOCUMENT ME!
1007:             * @throws DataSourceException DOCUMENT ME!
1008:             */
1009:            public void setSpatialConstraints(SeFilter[] filters)
1010:                    throws IOException {
1011:                try {
1012:                    getSeQuery().setSpatialConstraints(SeQuery.SE_OPTIMIZE,
1013:                            false, filters);
1014:                } catch (SeException e) {
1015:                    throw new DataSourceException(e.getSeError().getErrDesc(),
1016:                            e);
1017:                }
1018:            }
1019:
1020:            /**
1021:             * DOCUMENT ME!
1022:             *
1023:             * @return DOCUMENT ME!
1024:             */
1025:            public String toString() {
1026:                return "Schema: " + this .schema.getTypeName() + ", query: "
1027:                        + this .query;
1028:            }
1029:
1030:            /**
1031:             * DOCUMENT ME!
1032:             *
1033:             * @author $author$
1034:             * @version $Revision: 1.9 $
1035:             */
1036:            public static class FilterSet {
1037:                /** DOCUMENT ME! */
1038:                private SeQueryInfo definitionQuery;
1039:
1040:                private PlainSelect layerSelectStatement;
1041:
1042:                private FIDReader fidReader;
1043:
1044:                /** DOCUMENT ME! */
1045:                private final SeLayer sdeLayer;
1046:
1047:                /** DOCUMENT ME! */
1048:                private final Filter sourceFilter;
1049:
1050:                /** DOCUMENT ME! */
1051:                private Filter sqlFilter;
1052:
1053:                /** DOCUMENT ME! */
1054:                private Filter geometryFilter;
1055:
1056:                /** DOCUMENT ME! */
1057:                private Filter unsupportedFilter;
1058:
1059:                private FilterToSQLSDE sqlEncoder;
1060:
1061:                /**
1062:                 * Holds the ArcSDE Java API definition of the geometry related filters
1063:                 * this datastore implementation supports natively.
1064:                 */
1065:                private SeFilter[] sdeSpatialFilters;
1066:
1067:                /**
1068:                 * Holds the ArcSDE Java API definition of the <strong>non</strong>
1069:                 * geometry related filters this datastore implementation supports
1070:                 * natively.
1071:                 */
1072:                private SeSqlConstruct sdeSqlConstruct;
1073:
1074:                private FeatureType featureType;
1075:
1076:                /**
1077:                 * Creates a new FilterSet object.
1078:                 *
1079:                 * @param sdeLayer DOCUMENT ME!
1080:                 * @param sourceFilter DOCUMENT ME!
1081:                 */
1082:                public FilterSet(SeLayer sdeLayer, Filter sourceFilter,
1083:                        FeatureType ft, SeQueryInfo definitionQuery,
1084:                        PlainSelect layerSelectStatement, FIDReader fidReader) {
1085:                    assert sdeLayer != null;
1086:                    assert sourceFilter != null;
1087:                    assert ft != null;
1088:
1089:                    this .sdeLayer = sdeLayer;
1090:                    this .sourceFilter = sourceFilter;
1091:                    this .featureType = ft;
1092:                    this .definitionQuery = definitionQuery;
1093:                    this .layerSelectStatement = layerSelectStatement;
1094:                    this .fidReader = fidReader;
1095:                    createGeotoolsFilters();
1096:                }
1097:
1098:                /**
1099:                 * Given the <code>Filter</code> passed to the constructor, unpacks it
1100:                 * to three different filters, one for the supported SQL based filter,
1101:                 * another for the supported Geometry based filter, and the last one
1102:                 * for the unsupported filter. All of them can be retrieved from its
1103:                 * corresponding getter.
1104:                 */
1105:                private void createGeotoolsFilters() {
1106:                    FilterToSQLSDE sqlEncoder = getSqlEncoder();
1107:
1108:                    PostPreProcessFilterSplittingVisitor unpacker = new PostPreProcessFilterSplittingVisitor(
1109:                            sqlEncoder.getCapabilities(), featureType, null);
1110:                    sourceFilter.accept(unpacker, null);
1111:
1112:                    this .sqlFilter = unpacker.getFilterPre();
1113:
1114:                    if (LOGGER.isLoggable(Level.FINE) && sqlFilter != null)
1115:                        LOGGER.fine("SQL portion of SDE Query: '" + sqlFilter
1116:                                + "'");
1117:
1118:                    Filter remainingFilter = unpacker.getFilterPost();
1119:
1120:                    unpacker = new PostPreProcessFilterSplittingVisitor(
1121:                            GeometryEncoderSDE.getCapabilities(), featureType,
1122:                            null);
1123:                    remainingFilter.accept(unpacker, null);
1124:
1125:                    this .geometryFilter = unpacker.getFilterPre();
1126:                    if (LOGGER.isLoggable(Level.FINE) && geometryFilter != null)
1127:                        LOGGER.fine("Spatial-Filter portion of SDE Query: '"
1128:                                + geometryFilter + "'");
1129:
1130:                    this .unsupportedFilter = unpacker.getFilterPost();
1131:                    if (LOGGER.isLoggable(Level.FINE)
1132:                            && unsupportedFilter != null)
1133:                        LOGGER
1134:                                .fine("Unsupported (and therefore ignored) portion of SDE Query: '"
1135:                                        + unsupportedFilter + "'");
1136:                }
1137:
1138:                /**
1139:                 * Returns an SeQueryInfo that can be used to retrieve a set of SeRows from
1140:                 * an ArcSDE layer or a layer with joins. The SeQueryInfo object lacks the set
1141:                 * of column names to fetch. It is the responsibility of the calling code to
1142:                 * call setColumns(String []) on the returned object to specify which properties
1143:                 * to fetch.
1144:                 * 
1145:                 * @param unqualifiedPropertyNames
1146:                 * @return
1147:                 * @throws SeException
1148:                 * @throws DataSourceException
1149:                 */
1150:                public SeQueryInfo getQueryInfo(
1151:                        String[] unqualifiedPropertyNames) throws SeException,
1152:                        DataSourceException {
1153:                    String[] tables;
1154:                    String byClause = null;
1155:
1156:                    final SeSqlConstruct plainSqlConstruct = getSeSqlConstruct();
1157:
1158:                    String where = plainSqlConstruct.getWhere();
1159:
1160:                    if (definitionQuery == null) {
1161:                        tables = new String[] { this .sdeLayer
1162:                                .getQualifiedName() };
1163:                    } else {
1164:                        tables = definitionQuery.getConstruct().getTables();
1165:                        String joinWhere = definitionQuery.getConstruct()
1166:                                .getWhere();
1167:                        if (where == null) {
1168:                            where = joinWhere;
1169:                        } else {
1170:                            where = joinWhere == null ? where : (joinWhere
1171:                                    + " AND " + where);
1172:                        }
1173:                        try {
1174:                            byClause = definitionQuery.getByClause();
1175:                        } catch (NullPointerException e) {
1176:                            //no-op
1177:                        }
1178:                    }
1179:
1180:                    final SeQueryInfo qInfo = new SeQueryInfo();
1181:                    final SeSqlConstruct sqlConstruct = new SeSqlConstruct();
1182:                    sqlConstruct.setTables(tables);
1183:                    if (where != null && where.length() > 0) {
1184:                        sqlConstruct.setWhere(where);
1185:                    }
1186:
1187:                    final int queriedAttCount = unqualifiedPropertyNames == null ? 0
1188:                            : unqualifiedPropertyNames.length;
1189:
1190:                    if (queriedAttCount > 0) {
1191:                        String[] sdeAttNames = new String[queriedAttCount];
1192:                        FilterToSQLSDE sqlEncoder = getSqlEncoder();
1193:
1194:                        for (int i = 0; i < queriedAttCount; i++) {
1195:                            String attName = unqualifiedPropertyNames[i];
1196:                            String coldef = sqlEncoder
1197:                                    .getColumnDefinition(attName);
1198:                            sdeAttNames[i] = coldef;
1199:                        }
1200:                        qInfo.setColumns(sdeAttNames);
1201:                    }
1202:
1203:                    qInfo.setConstruct(sqlConstruct);
1204:                    if (byClause != null) {
1205:                        qInfo.setByClause(byClause);
1206:                    }
1207:                    return qInfo;
1208:                }
1209:
1210:                /**
1211:                 * DOCUMENT ME!
1212:                 *
1213:                 * @return the SeSqlConstruct corresponding to the given SeLayer and
1214:                 *         SQL based filter. Should never return null.
1215:                 *
1216:                 * @throws DataSourceException if an error occurs encoding the sql
1217:                 *         filter to a SQL where clause, or creating the
1218:                 *         SeSqlConstruct for the given layer and where clause.
1219:                 */
1220:                public SeSqlConstruct getSeSqlConstruct()
1221:                        throws DataSourceException {
1222:                    if (this .sdeSqlConstruct == null) {
1223:                        final String layerName;
1224:                        try {
1225:                            layerName = this .sdeLayer.getQualifiedName();
1226:                            this .sdeSqlConstruct = new SeSqlConstruct(layerName);
1227:                        } catch (SeException e) {
1228:                            throw new DataSourceException(
1229:                                    "Can't create SQL construct: "
1230:                                            + e.getSeError().getErrDesc(), e);
1231:                        }
1232:
1233:                        Filter sqlFilter = getSqlFilter();
1234:
1235:                        if (!Filter.INCLUDE.equals(sqlFilter)) {
1236:                            String whereClause = null;
1237:                            FilterToSQLSDE sqlEncoder = getSqlEncoder();
1238:
1239:                            try {
1240:                                whereClause = sqlEncoder
1241:                                        .encodeToString(sqlFilter);
1242:                            } catch (FilterToSQLException sqle) {
1243:                                String message = "Geometry encoder error: "
1244:                                        + sqle.getMessage();
1245:                                throw new DataSourceException(message, sqle);
1246:                            }
1247:                            LOGGER.fine("ArcSDE where clause '" + whereClause
1248:                                    + "'");
1249:
1250:                            this .sdeSqlConstruct.setWhere(whereClause);
1251:                        }
1252:                    }
1253:
1254:                    return this .sdeSqlConstruct;
1255:                }
1256:
1257:                /**
1258:                 * Lazily creates the array of <code>SeShapeFilter</code> objects that
1259:                 * map the corresponding geometry related filters included in the
1260:                 * original  <code>org.geotools.data.Query</code> passed to the
1261:                 * constructor.
1262:                 *
1263:                 * @return an array with the spatial filters to be applied to the
1264:                 *         SeQuery, or null if none.
1265:                 *
1266:                 * @throws DataSourceException DOCUMENT ME!
1267:                 */
1268:                public SeFilter[] getSpatialFilters()
1269:                        throws DataSourceException {
1270:                    if (this .sdeSpatialFilters == null) {
1271:                        GeometryEncoderSDE geometryEncoder = new GeometryEncoderSDE(
1272:                                this .sdeLayer, featureType);
1273:
1274:                        try {
1275:                            geometryEncoder.encode(getGeometryFilter());
1276:                        } catch (GeometryEncoderException e) {
1277:                            throw new DataSourceException(
1278:                                    "Error parsing geometry filters: "
1279:                                            + e.getMessage(), e);
1280:                        }
1281:
1282:                        this .sdeSpatialFilters = geometryEncoder
1283:                                .getSpatialFilters();
1284:                    }
1285:
1286:                    return this .sdeSpatialFilters;
1287:                }
1288:
1289:                /**
1290:                 * DOCUMENT ME!
1291:                 *
1292:                 * @return the subset, non geometry related, of the original filter
1293:                 *         this datastore implementation supports natively, or
1294:                 *         <code>Filter.INCLUDE</code> if the original Query does not
1295:                 *         contains non spatial filters that we can deal with at the
1296:                 *         ArcSDE Java API side.
1297:                 */
1298:                public Filter getSqlFilter() {
1299:                    return (this .sqlFilter == null) ? Filter.INCLUDE
1300:                            : this .sqlFilter;
1301:                }
1302:
1303:                /**
1304:                 * DOCUMENT ME!
1305:                 *
1306:                 * @return the geometry related subset of the original filter this
1307:                 *         datastore implementation supports natively, or
1308:                 *         <code>Filter.INCLUDE</code> if the original Query does not
1309:                 *         contains spatial filters that we can deal with at the
1310:                 *         ArcSDE Java API side.
1311:                 */
1312:                public Filter getGeometryFilter() {
1313:                    return (this .geometryFilter == null) ? Filter.INCLUDE
1314:                            : this .geometryFilter;
1315:                }
1316:
1317:                /**
1318:                 * DOCUMENT ME!
1319:                 *
1320:                 * @return the part of the original filter this datastore
1321:                 *         implementation does not supports natively, or
1322:                 *         <code>Filter.INCLUDE</code> if we support the whole Query
1323:                 *         filter.
1324:                 */
1325:                public Filter getUnsupportedFilter() {
1326:                    return (this .unsupportedFilter == null) ? Filter.INCLUDE
1327:                            : this .unsupportedFilter;
1328:                }
1329:
1330:                private FilterToSQLSDE getSqlEncoder() {
1331:                    if (sqlEncoder == null) {
1332:                        final String layerName;
1333:                        try {
1334:                            layerName = sdeLayer.getQualifiedName();
1335:                        } catch (SeException e) {
1336:                            throw (RuntimeException) new RuntimeException(
1337:                                    "error getting layer's qualified name")
1338:                                    .initCause(e);
1339:                        }
1340:                        String fidColumn = fidReader.getFidColumn();
1341:                        sqlEncoder = new FilterToSQLSDE(layerName, fidColumn,
1342:                                featureType, layerSelectStatement);
1343:                    }
1344:                    return sqlEncoder;
1345:                }
1346:            }
1347:        }
www.java2java.com | Contact Us
Copyright 2009 - 12 Demo Source and Support. All rights reserved.
All other trademarks are property of their respective owners.