0001: /*
0002: * Geotools2 - OpenSource mapping toolkit
0003: * http://geotools.org
0004: * (C) 2002-2006, Geotools Project Managment Committee (PMC)
0005: *
0006: * This library is free software; you can redistribute it and/or
0007: * modify it under the terms of the GNU Lesser General Public
0008: * License as published by the Free Software Foundation;
0009: * version 2.1 of the License.
0010: *
0011: * This library is distributed in the hope that it will be useful,
0012: * but WITHOUT ANY WARRANTY; without even the implied warranty of
0013: * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
0014: * Lesser General Public License for more details.
0015: *
0016: */
0017: package org.geotools.arcsde.data;
0018:
0019: import java.io.IOException;
0020: import java.util.ArrayList;
0021: import java.util.HashSet;
0022: import java.util.List;
0023: import java.util.NoSuchElementException;
0024: import java.util.logging.Level;
0025: import java.util.logging.Logger;
0026:
0027: import javax.xml.parsers.SAXParser;
0028: import javax.xml.parsers.SAXParserFactory;
0029:
0030: import junit.extensions.TestSetup;
0031: import junit.framework.Test;
0032: import junit.framework.TestCase;
0033: import junit.framework.TestSuite;
0034:
0035: import org.geotools.arcsde.ArcSDEDataStoreFactory;
0036: import org.geotools.arcsde.pool.ArcSDEConnectionPool;
0037: import org.geotools.data.DataStore;
0038: import org.geotools.data.DataStoreFinder;
0039: import org.geotools.data.DefaultQuery;
0040: import org.geotools.data.FeatureReader;
0041: import org.geotools.data.FeatureSource;
0042: import org.geotools.data.Query;
0043: import org.geotools.data.Transaction;
0044: import org.geotools.factory.CommonFactoryFinder;
0045: import org.geotools.feature.Feature;
0046: import org.geotools.feature.FeatureCollection;
0047: import org.geotools.feature.FeatureIterator;
0048: import org.geotools.feature.FeatureType;
0049: import org.geotools.feature.GeometryAttributeType;
0050: import org.geotools.feature.IllegalAttributeException;
0051: import org.geotools.filter.FilterFilter;
0052: import org.geotools.gml.GMLFilterDocument;
0053: import org.geotools.gml.GMLFilterGeometry;
0054: import org.opengis.filter.And;
0055: import org.opengis.filter.Filter;
0056: import org.opengis.filter.FilterFactory;
0057: import org.opengis.filter.Id;
0058: import org.opengis.filter.identity.FeatureId;
0059: import org.opengis.filter.spatial.BBOX;
0060: import org.opengis.referencing.crs.CoordinateReferenceSystem;
0061: import org.xml.sax.helpers.ParserAdapter;
0062:
0063: import com.esri.sde.sdk.pe.PeFactory;
0064: import com.esri.sde.sdk.pe.PeProjectedCS;
0065: import com.esri.sde.sdk.pe.PeProjectionException;
0066: import com.vividsolutions.jts.geom.Envelope;
0067: import com.vividsolutions.jts.geom.Geometry;
0068:
0069: /**
0070: * ArcSDEDAtaStore test cases
0071: *
0072: * @author Gabriel Roldan, Axios Engineering
0073: * @source $URL: http://svn.geotools.org/geotools/tags/2.4.1/modules/unsupported/arcsde/datastore/src/test/java/org/geotools/arcsde/data/ArcSDEDataStoreTest.java $
0074: * @version $Id: ArcSDEDataStoreTest.java 27863 2007-11-12 20:34:34Z desruisseaux $
0075: */
0076: public class ArcSDEDataStoreTest extends TestCase {
0077: /** package logger */
0078: private static Logger LOGGER = org.geotools.util.logging.Logging
0079: .getLogger(ArcSDEDataStoreTest.class.getPackage().getName());
0080:
0081: /** DOCUMENT ME! */
0082: private static TestData testData;
0083:
0084: /** an ArcSDEDataStore created on setUp() to run tests against */
0085: private DataStore store;
0086:
0087: /** a filter factory for testing */
0088: FilterFactory ff = CommonFactoryFinder.getFilterFactory(null);
0089:
0090: /**
0091: * Creates a new ArcSDEDataStoreTest object.
0092: */
0093: public ArcSDEDataStoreTest() {
0094: this ("ArcSDE DataStore unit tests");
0095: }
0096:
0097: /**
0098: * Creates a new ArcSDEDataStoreTest object.
0099: *
0100: * @param name a name for the junit test
0101: */
0102: public ArcSDEDataStoreTest(String name) {
0103: super (name);
0104: }
0105:
0106: /**
0107: * Builds a test suite for all this class' tests with per suite
0108: * initialization directed to {@link #oneTimeSetUp()} and per suite clean up
0109: * directed to {@link #oneTimeTearDown()}
0110: *
0111: * @return
0112: */
0113: public static Test suite() {
0114: TestSuite suite = new TestSuite();
0115: suite.addTestSuite(ArcSDEDataStoreTest.class);
0116:
0117: TestSetup wrapper = new TestSetup(suite) {
0118: protected void setUp() throws IOException {
0119: oneTimeSetUp();
0120: }
0121:
0122: protected void tearDown() {
0123: oneTimeTearDown();
0124: }
0125: };
0126: return wrapper;
0127: }
0128:
0129: private static void oneTimeSetUp() throws IOException {
0130: testData = new TestData();
0131: testData.setUp();
0132: if (ArcSDEDataStoreFactory.JSDE_CLIENT_VERSION == ArcSDEDataStoreFactory.JSDE_VERSION_DUMMY)
0133: throw new RuntimeException(
0134: "Don't run the test-suite with the dummy jar. Make sure the real ArcSDE jars are on your classpath.");
0135: }
0136:
0137: private static void oneTimeTearDown() {
0138: boolean cleanTestTable = false;
0139: boolean cleanPool = true;
0140: testData.tearDown(cleanTestTable, cleanPool);
0141: }
0142:
0143: /**
0144: * loads {@code testData/testparams.properties} into a Properties object, wich is
0145: * used to obtain test tables names and is used as parameter to find the DataStore
0146: *
0147: * @throws Exception DOCUMENT ME!
0148: */
0149: protected void setUp() throws Exception {
0150: super .setUp();
0151: //facilitates running a single test at a time (eclipse lets you do this and it's very useful)
0152: if (testData == null) {
0153: oneTimeSetUp();
0154: }
0155: this .store = testData.getDataStore();
0156: }
0157:
0158: /**
0159: * DOCUMENT ME!
0160: *
0161: * @throws Exception DOCUMENT ME!
0162: */
0163: protected void tearDown() throws Exception {
0164: super .tearDown();
0165: this .store = null;
0166: }
0167:
0168: /**
0169: * DOCUMENT ME!
0170: *
0171: * @throws IOException DOCUMENT ME!
0172: */
0173: public void testFinder() throws IOException {
0174: DataStore sdeDs = null;
0175:
0176: DataStoreFinder.scanForPlugins();
0177: sdeDs = DataStoreFinder.getDataStore(testData.getConProps());
0178: assertNotNull(sdeDs);
0179: String failMsg = sdeDs + " is not an ArcSDEDataStore";
0180: assertTrue(failMsg, (sdeDs instanceof ArcSDEDataStore));
0181: LOGGER.fine("testFinder OK :" + sdeDs.getClass().getName());
0182: }
0183:
0184: /**
0185: * This test is currently broken. It's a placeholder for some logic
0186: * that sfarber wrote which tries to guess the SRS of a featureclass, based on connecting
0187: * to it via an SeLayer.
0188: *
0189: * @throws Throwable
0190: */
0191: public void _testAutoFillSRS() throws Throwable {
0192:
0193: ArcSDEDataStore ds = testData.getDataStore();
0194: CoordinateReferenceSystem sdeCRS = ds.getSchema(
0195: "GISDATA.TOWNS_POLY").getDefaultGeometry()
0196: .getCoordinateSystem();
0197:
0198: LOGGER.info(sdeCRS.toWKT().replaceAll(" ", "").replaceAll("\n",
0199: "").replaceAll("\"", "\\\""));
0200:
0201: //CoordinateReferenceSystem epsgCRS = CRS.decode("EPSG:26986");
0202:
0203: //LOGGER.info("are these two CRS's equal? " + CRS.equalsIgnoreMetadata(sdeCRS, epsgCRS));
0204:
0205: if (1 == 1)
0206: return;
0207:
0208: int epsgCode = -1;
0209: int[] projcs = PeFactory.projcsCodelist();
0210: LOGGER.info(projcs.length + " projections available.");
0211: for (int i = 0; i < projcs.length; i++) {
0212: try {
0213: PeProjectedCS candidate = PeFactory.projcs(projcs[i]);
0214: //in ArcSDE 9.2, if the PeFactory doesn't support a projection it claimed
0215: //to support, it returns 'null'. So check for it.
0216: if (candidate != null
0217: && candidate.getName().indexOf("Massachusetts") != -1) {
0218: //LOGGER.info("\n\n" + projcs[i] + " has name " + candidate.getName() + "\ntried to match " + wktName + "\n\n");
0219: epsgCode = projcs[i];
0220: } else if (candidate == null) {
0221: //LOGGER.info(projcs[i] + " was null");
0222: } else if (candidate != null) {
0223: //LOGGER.info(projcs[i] + " wasn't null");
0224: }
0225: } catch (PeProjectionException pe) {
0226: // Strangely SDE includes codes in the projcsCodeList() that
0227: // it doesn't actually support.
0228: // Catch the exception and skip them here.
0229: }
0230: }
0231:
0232: }
0233:
0234: /**
0235: * DOCUMENT ME!
0236: *
0237: * @throws Exception DOCUMENT ME!
0238: */
0239: public void _testStress() throws Exception {
0240: try {
0241: ArcSDEDataStore ds = testData.getDataStore();
0242:
0243: ArcSDEConnectionPool pool = ds.getConnectionPool();
0244: final int initialAvailableCount = pool.getAvailableCount();
0245: final int initialPoolSize = pool.getPoolSize();
0246:
0247: String typeName = testData.getPoint_table();
0248:
0249: FeatureSource source = ds.getFeatureSource(typeName);
0250:
0251: assertEquals(initialAvailableCount, pool
0252: .getAvailableCount());
0253: assertEquals(initialPoolSize, pool.getPoolSize());
0254:
0255: FeatureType schema = source.getSchema();
0256:
0257: assertEquals("After getSchema()", initialAvailableCount,
0258: pool.getAvailableCount());
0259: assertEquals("After getSchema()", initialPoolSize, pool
0260: .getPoolSize());
0261:
0262: final Envelope layerBounds = source.getBounds();
0263:
0264: assertEquals("After getBounds()", initialAvailableCount,
0265: pool.getAvailableCount());
0266: assertEquals("After getBounds()", initialPoolSize, pool
0267: .getPoolSize());
0268:
0269: source.getCount(Query.ALL);
0270:
0271: assertEquals("After size()", initialAvailableCount, pool
0272: .getAvailableCount());
0273: assertEquals("After size()", initialPoolSize, pool
0274: .getPoolSize());
0275:
0276: BBOX bbox = ff.bbox(schema.getDefaultGeometry()
0277: .getLocalName(), layerBounds.getMinX() + 10,
0278: layerBounds.getMinY() + 10,
0279: layerBounds.getMaxX() - 10,
0280: layerBounds.getMaxY() - 10, schema
0281: .getDefaultGeometry().getCoordinateSystem()
0282: .getName().getCode());
0283:
0284: for (int i = 0; i < 20; i++) {
0285: LOGGER.fine("Running iteration #" + i);
0286:
0287: FeatureCollection res = source.getFeatures(bbox);
0288: FeatureIterator reader = res.features();
0289:
0290: assertNotNull(reader.next());
0291:
0292: assertTrue(0 < res.size());
0293: assertNotNull(res.getBounds());
0294:
0295: assertNotNull(reader.next());
0296:
0297: assertTrue(0 < res.size());
0298: assertNotNull(res.getBounds());
0299:
0300: assertNotNull(reader.next());
0301:
0302: reader.close();
0303: }
0304:
0305: } catch (Exception e) {
0306: e.printStackTrace();
0307: throw e;
0308: }
0309: }
0310:
0311: /**
0312: * test that a ArcSDEDataStore that connects to de configured test database
0313: * contains the tables defined by the parameters "point_table",
0314: * "line_table" and "polygon_table", wether ot not they're defined as
0315: * single table names or as full qualified sde table names (i.e.
0316: * SDE.SDE.TEST_POINT)
0317: *
0318: * @throws IOException
0319: */
0320: public void testGetTypeNames() throws IOException {
0321: String[] featureTypes = store.getTypeNames();
0322: assertNotNull(featureTypes);
0323:
0324: if (LOGGER.isLoggable(Level.FINE)) {
0325: for (int i = 0; i < featureTypes.length; i++)
0326: System.out.println(featureTypes[i]);
0327: }
0328: testTypeExists(featureTypes, testData.getPoint_table());
0329: testTypeExists(featureTypes, testData.getLine_table());
0330: testTypeExists(featureTypes, testData.getPolygon_table());
0331: }
0332:
0333: /**
0334: * tests that the schema for the defined tests tables are returned.
0335: *
0336: * @throws IOException DOCUMENT ME!
0337: */
0338: public void testGetSchema() throws IOException {
0339: FeatureType schema;
0340:
0341: schema = store.getSchema(testData.getPoint_table());
0342: assertNotNull(schema);
0343: assertTrue(schema.getAttributeCount() > 0);
0344: schema = store.getSchema(testData.getLine_table());
0345: assertNotNull(schema);
0346: assertTrue(schema.getAttributeCount() > 0);
0347: schema = store.getSchema(testData.getPolygon_table());
0348: assertNotNull(schema);
0349: assertTrue(schema.getAttributeCount() > 0);
0350: LOGGER.fine("testGetSchema OK: " + schema);
0351: }
0352:
0353: /**
0354: * This method tests the feature reader by opening various simultaneous
0355: * FeatureReaders using the 3 test tables.
0356: *
0357: * <p>
0358: * I found experimentally that until 24 simultaneous streams can be opened
0359: * by a single connection. Each featurereader has an ArcSDE stream opened
0360: * until its <code>close()</code> method is called or hasNext() returns
0361: * flase, wich automatically closes the stream. If more than 24
0362: * simultaneous streams are tryied to be opened upon a single
0363: * SeConnection, an exception is thrown by de Java ArcSDE API saying that
0364: * a "NETWORK I/O OPERATION FAILED"
0365: * </p>
0366: *
0367: * @throws IOException DOCUMENT ME!
0368: * @throws IllegalAttributeException DOCUMENT ME!
0369: */
0370: public void testGetFeatureReader() throws IOException,
0371: IllegalAttributeException {
0372: final int NUM_READERS = Integer.parseInt(testData.getConProps()
0373: .getProperty("pool.maxConnections"));
0374: String[] typeNames = { testData.getPoint_table(),
0375: testData.getLine_table(), testData.getPolygon_table() };
0376: FeatureReader[] readers = new FeatureReader[NUM_READERS];
0377: int[] counts = new int[NUM_READERS];
0378:
0379: for (int i = 0; i < NUM_READERS;) {
0380: for (int j = 0; (j < typeNames.length) && (i < NUM_READERS); j++, i++) {
0381: readers[i] = getReader(typeNames[j]);
0382: }
0383: }
0384:
0385: long t = System.currentTimeMillis();
0386: boolean hasNext = false;
0387:
0388: while (true) {
0389: for (int i = 0; i < NUM_READERS; i++) {
0390: if (readers[i].hasNext()) {
0391: hasNext = true;
0392:
0393: break;
0394: }
0395:
0396: hasNext = false;
0397: }
0398:
0399: if (!hasNext) {
0400: for (int i = 0; i < NUM_READERS; i++)
0401: readers[i].close();
0402:
0403: break;
0404: }
0405:
0406: for (int i = 0; i < NUM_READERS; i++) {
0407: if (testNext(readers[i])) {
0408: ++counts[i];
0409: }
0410: }
0411: }
0412:
0413: t = System.currentTimeMillis() - t;
0414:
0415: String scounts = "";
0416:
0417: for (int i = 0; i < NUM_READERS; i++)
0418: scounts += (counts[i] + ", ");
0419:
0420: LOGGER.fine("testGetFeatureReader: traversed " + scounts
0421: + " features simultaneously from " + NUM_READERS
0422: + " different FeatureReaders in " + t + "ms");
0423: }
0424:
0425: /**
0426: * Checks that a query returns only the specified attributes.
0427: *
0428: * @throws IOException
0429: * @throws IllegalAttributeException
0430: */
0431: public void testRestrictsAttributes() throws IOException,
0432: IllegalAttributeException {
0433: final String typeName = testData.getPoint_table();
0434: final DataStore ds = testData.getDataStore();
0435: final FeatureType schema = ds.getSchema(typeName);
0436: final int queriedAttributeCount = schema.getAttributeCount() - 3;
0437: final String[] queryAtts = new String[queriedAttributeCount];
0438:
0439: for (int i = 0; i < queryAtts.length; i++) {
0440: queryAtts[i] = schema.getAttributeType(i).getLocalName();
0441: }
0442:
0443: //build the query asking for a subset of attributes
0444: final Query query = new DefaultQuery(typeName, Filter.INCLUDE,
0445: queryAtts);
0446:
0447: FeatureReader reader = null;
0448: FeatureType resultSchema;
0449: try {
0450: reader = ds
0451: .getFeatureReader(query, Transaction.AUTO_COMMIT);
0452: resultSchema = reader.getFeatureType();
0453: } finally {
0454: reader.close();
0455: }
0456: // it's conceivable that we didn't add the FID attribute, so be a little lenient.
0457: // Either the result is exactly equal, or one greater
0458: assertTrue(queriedAttributeCount == resultSchema
0459: .getAttributeCount()
0460: || queriedAttributeCount == resultSchema
0461: .getAttributeCount() - 1);
0462: //assertEquals(queriedAttributeCount, resultSchema.getAttributeCount());
0463:
0464: for (int i = 0; i < queriedAttributeCount; i++) {
0465: assertEquals(queryAtts[i], resultSchema.getAttributeType(i)
0466: .getLocalName());
0467: }
0468: }
0469:
0470: /**
0471: * Checks that arcsde datastore returns featuretypes whose attributes are
0472: * exactly in the requested order.
0473: *
0474: * @throws IOException DOCUMENT ME!
0475: * @throws IllegalAttributeException DOCUMENT ME!
0476: */
0477: public void testRespectsAttributeOrder() throws IOException,
0478: IllegalAttributeException {
0479: final String typeName = testData.getPoint_table();
0480: final DataStore ds = testData.getDataStore();
0481: final FeatureType schema = ds.getSchema(typeName);
0482: final int queriedAttributeCount = schema.getAttributeCount();
0483: final String[] queryAtts = new String[queriedAttributeCount];
0484:
0485: //build the attnames in inverse order
0486: for (int i = queryAtts.length, j = 0; i > 0; j++) {
0487: --i;
0488: queryAtts[j] = schema.getAttributeType(i).getLocalName();
0489: }
0490:
0491: //build the query asking for a subset of attributes
0492: final Query query = new DefaultQuery(typeName, Filter.INCLUDE,
0493: queryAtts);
0494:
0495: FeatureReader reader;
0496: reader = ds.getFeatureReader(query, Transaction.AUTO_COMMIT);
0497: try {
0498:
0499: FeatureType resultSchema = reader.getFeatureType();
0500: assertEquals(queriedAttributeCount, resultSchema
0501: .getAttributeCount());
0502:
0503: for (int i = 0; i < queriedAttributeCount; i++) {
0504: assertEquals(queryAtts[i], resultSchema
0505: .getAttributeType(i).getLocalName());
0506: }
0507: } finally {
0508: reader.close();
0509: }
0510: }
0511:
0512: /**
0513: * DOCUMENT ME!
0514: *
0515: * @param r DOCUMENT ME!
0516: *
0517: * @return DOCUMENT ME!
0518: *
0519: * @throws IOException DOCUMENT ME!
0520: * @throws IllegalAttributeException DOCUMENT ME!
0521: */
0522: private boolean testNext(FeatureReader r) throws IOException,
0523: IllegalAttributeException {
0524: if (r.hasNext()) {
0525: Feature f = r.next();
0526: assertNotNull(f);
0527: assertNotNull(f.getFeatureType());
0528: assertNotNull(f.getBounds());
0529:
0530: Geometry geom = f.getDefaultGeometry();
0531: assertNotNull(geom);
0532:
0533: return true;
0534: }
0535:
0536: return false;
0537: }
0538:
0539: /**
0540: * DOCUMENT ME!
0541: *
0542: * @param typeName DOCUMENT ME!
0543: *
0544: * @return DOCUMENT ME!
0545: *
0546: * @throws IOException DOCUMENT ME!
0547: */
0548: private FeatureReader getReader(String typeName) throws IOException {
0549: Query q = new DefaultQuery(typeName, Filter.INCLUDE);
0550: FeatureReader reader = store.getFeatureReader(q,
0551: Transaction.AUTO_COMMIT);
0552: FeatureType retType = reader.getFeatureType();
0553: assertNotNull(retType.getDefaultGeometry());
0554: assertTrue(reader.hasNext());
0555:
0556: return reader;
0557: }
0558:
0559: /**
0560: * tests the datastore behavior when fetching data based on mixed queries.
0561: *
0562: * <p>
0563: * "Mixed queries" refers to mixing alphanumeric and geometry based
0564: * filters, since that is the natural separation of things in the Esri
0565: * Java API for ArcSDE. This is necessary since mixed queries sometimes
0566: * are problematic. So this test ensures that:
0567: *
0568: * <ul>
0569: * <li>
0570: * A mixed query respects all filters
0571: * </li>
0572: * <li>
0573: * A mixed query does not fails when getBounds() is performed
0574: * </li>
0575: * <li>
0576: * A mixed query does not fails when size() is performed
0577: * </li>
0578: * </ul>
0579: * </p>
0580: *
0581: * @throws Exception DOCUMENT ME!
0582: */
0583: public void testMixedQueries() throws Exception {
0584: final int EXPECTED_RESULT_COUNT = 3;
0585: FeatureSource fs = store.getFeatureSource(testData
0586: .getPolygon_table());
0587: Filter bboxFilter = getBBoxfilter(fs);
0588: String sqlFilterUri = getFilterUri("filters.sql.polygons.filter");
0589: Filter sqlFilter = parseDocument(sqlFilterUri);
0590: LOGGER.fine("Geometry filter: " + bboxFilter);
0591: LOGGER.fine("SQL filter: " + sqlFilter);
0592:
0593: And mixedFilter = ff.and(sqlFilter, bboxFilter);
0594:
0595: LOGGER.fine("Mixed filter: " + mixedFilter);
0596:
0597: //verify both filter constraints are met
0598: testFilter(mixedFilter, fs, EXPECTED_RESULT_COUNT);
0599:
0600: final int LOOP_COUNT = 6;
0601:
0602: for (int i = 0; i < LOOP_COUNT; i++) {
0603: LOGGER.info("Running #" + i
0604: + " iteration for mixed query test");
0605:
0606: // check that getBounds and size do function
0607: FeatureIterator reader = null;
0608: FeatureCollection results = fs.getFeatures(mixedFilter);
0609: Envelope bounds = results.getBounds();
0610: assertNotNull(bounds);
0611: LOGGER.fine("results bounds: " + bounds);
0612:
0613: reader = results.features();
0614: try {
0615: /*
0616: * verify that when features are already being fetched,
0617: * getBounds and size still work
0618: */
0619: reader.next();
0620: bounds = results.getBounds();
0621: assertNotNull(bounds);
0622: LOGGER.fine("results bounds when reading: " + bounds);
0623:
0624: int count = results.size();
0625: assertEquals(EXPECTED_RESULT_COUNT, count);
0626: LOGGER.fine("wooohoooo...");
0627:
0628: } finally {
0629: reader.close();
0630: }
0631: }
0632: }
0633:
0634: /**
0635: * to expose GEOT-408, tests that queries in which only non spatial
0636: * attributes are requested does not fails due to the datastore trying to
0637: * parse the geometry attribute.
0638: *
0639: * @throws Exception
0640: * DOCUMENT ME!
0641: */
0642: public void testAttributeOnlyQuery() throws Exception {
0643: DataStore ds = testData.getDataStore();
0644: FeatureSource fSource = ds.getFeatureSource(testData
0645: .getLine_table());
0646: FeatureType type = fSource.getSchema();
0647: DefaultQuery attOnlyQuery = new DefaultQuery(type.getTypeName());
0648: List propNames = new ArrayList(type.getAttributeCount() - 1);
0649:
0650: for (int i = 0; i < type.getAttributeCount(); i++) {
0651: if (type.getAttributeType(i) instanceof GeometryAttributeType) {
0652: continue;
0653: }
0654:
0655: propNames.add(type.getAttributeType(i).getLocalName());
0656: }
0657:
0658: attOnlyQuery.setPropertyNames(propNames);
0659:
0660: FeatureCollection results = fSource.getFeatures(attOnlyQuery);
0661: FeatureType resultSchema = results.getSchema();
0662: assertEquals(propNames.size(), resultSchema.getAttributeCount());
0663:
0664: for (int i = 0; i < propNames.size(); i++) {
0665: assertEquals(propNames.get(i), resultSchema
0666: .getAttributeType(i).getLocalName());
0667: }
0668:
0669: //the problem described in GEOT-408 arises in attribute reader, so
0670: //we must to try fetching features
0671: FeatureIterator iterator = results.features();
0672: Feature feature = iterator.next();
0673: iterator.close();
0674: assertNotNull(feature);
0675:
0676: //the id must be grabed correctly.
0677: //this exercises the fact that although the geometry is not included
0678: //in the request, it must be fecthed anyway to obtain the SeShape.getFeatureId()
0679: //getID() should throw an exception if the feature is was not grabed (see
0680: // ArcSDEAttributeReader.readFID().
0681: String id = feature.getID();
0682: assertNotNull(id);
0683: assertFalse(id.endsWith(".-1"));
0684: assertFalse(id.endsWith(".0"));
0685: }
0686:
0687: /**
0688: * Test that FID filters are correctly handled
0689: *
0690: * @throws Exception DOCUMENT ME!
0691: */
0692: public void testFidFilters() throws Exception {
0693: final DataStore ds = testData.getDataStore();
0694: final String typeName = testData.getPoint_table();
0695:
0696: //grab some fids
0697: FeatureReader reader = ds.getFeatureReader(new DefaultQuery(
0698: typeName), Transaction.AUTO_COMMIT);
0699: List fids = new ArrayList();
0700:
0701: while (reader.hasNext()) {
0702: fids.add(ff.featureId(reader.next().getID()));
0703:
0704: //skip one
0705: if (reader.hasNext()) {
0706: reader.next();
0707: }
0708: }
0709:
0710: reader.close();
0711:
0712: Id filter = ff.id(new HashSet(fids));
0713:
0714: FeatureSource source = ds.getFeatureSource(typeName);
0715: Query query = new DefaultQuery(typeName, filter);
0716: FeatureCollection results = source.getFeatures(query);
0717:
0718: assertEquals(fids.size(), results.size());
0719: FeatureIterator iterator = results.features();
0720:
0721: while (iterator.hasNext()) {
0722: String fid = iterator.next().getID();
0723: assertTrue("a fid not included in query was returned: "
0724: + fid, fids.contains(ff.featureId(fid)));
0725: }
0726: results.close(iterator);
0727: }
0728:
0729: public void testMoreThan1000FidFilters() throws Exception {
0730: final DataStore ds = testData.getDataStore();
0731: final String typeName = testData.getPoint_table();
0732:
0733: //grab some fids
0734: FeatureReader reader = ds.getFeatureReader(new DefaultQuery(
0735: typeName), Transaction.AUTO_COMMIT);
0736: List fids = new ArrayList();
0737:
0738: if (reader.hasNext()) {
0739: fids.add(ff.featureId(reader.next().getID()));
0740: }
0741:
0742: reader.close();
0743:
0744: String idTemplate = ((FeatureId) fids.get(0)).getID();
0745: idTemplate = idTemplate.substring(0, idTemplate.length() - 1);
0746:
0747: for (int x = 100; x < 2000; x++) {
0748: fids.add(ff.featureId(idTemplate + x));
0749: }
0750:
0751: Id filter = ff.id(new HashSet(fids));
0752:
0753: FeatureSource source = ds.getFeatureSource(typeName);
0754: Query query = new DefaultQuery(typeName, filter);
0755: FeatureCollection results = source.getFeatures(query);
0756:
0757: assertEquals(1, results.size());
0758: FeatureIterator iterator = results.features();
0759:
0760: while (iterator.hasNext()) {
0761: String fid = iterator.next().getID();
0762: assertTrue("a fid not included in query was returned: "
0763: + fid, fids.contains(ff.featureId(fid)));
0764: }
0765: results.close(iterator);
0766: }
0767:
0768: /**
0769: * test that getFeatureSource over the point_table table works
0770: *
0771: * @throws IOException DOCUMENT ME!
0772: */
0773: public void testGetFeatureSourcePoint() throws IOException {
0774: testGetFeatureSource(store.getFeatureSource(testData
0775: .getPoint_table()));
0776: }
0777:
0778: /**
0779: * DOCUMENT ME!
0780: *
0781: * @throws IOException DOCUMENT ME!
0782: */
0783: public void testGetFeatureSourceLine() throws IOException {
0784: testGetFeatureSource(store.getFeatureSource(testData
0785: .getLine_table()));
0786: }
0787:
0788: /**
0789: * DOCUMENT ME!
0790: *
0791: * @throws IOException DOCUMENT ME!
0792: */
0793: public void testGetFeatureSourcePoly() throws IOException {
0794: testGetFeatureSource(store.getFeatureSource(testData
0795: .getPolygon_table()));
0796: }
0797:
0798: /**
0799: * DOCUMENT ME!
0800: *
0801: * @throws IOException DOCUMENT ME!
0802: */
0803: public void testGetFeaturesPoint() throws IOException {
0804: testGetFeatures("points", testData.getPoint_table());
0805: }
0806:
0807: /**
0808: * DOCUMENT ME!
0809: *
0810: * @throws IOException DOCUMENT ME!
0811: */
0812: public void testGetFeaturesLine() throws IOException {
0813: testGetFeatures("lines", testData.getLine_table());
0814: }
0815:
0816: /**
0817: * DOCUMENT ME!
0818: *
0819: * @throws IOException DOCUMENT ME!
0820: */
0821: public void testGetFeaturesPolygon() throws IOException {
0822: testGetFeatures("polygons", testData.getPolygon_table());
0823: }
0824:
0825: /**
0826: * DOCUMENT ME!
0827: *
0828: * @throws Exception DOCUMENT ME!
0829: */
0830: public void testSQLFilterPoints() throws Exception {
0831: String uri = getFilterUri("filters.sql.points.filter");
0832: int expected = getExpectedCount("filters.sql.points.expectedCount");
0833: testFilter(uri, testData.getPoint_table(), expected);
0834: }
0835:
0836: /**
0837: * DOCUMENT ME!
0838: *
0839: * @throws Exception DOCUMENT ME!
0840: */
0841: public void testSQLFilterLines() throws Exception {
0842: String uri = getFilterUri("filters.sql.lines.filter");
0843: int expected = getExpectedCount("filters.sql.lines.expectedCount");
0844: testFilter(uri, testData.getLine_table(), expected);
0845: }
0846:
0847: /**
0848: * DOCUMENT ME!
0849: *
0850: * @throws Exception DOCUMENT ME!
0851: */
0852: public void testSQLFilterPolygons() throws Exception {
0853: String uri = getFilterUri("filters.sql.polygons.filter");
0854: int expected = getExpectedCount("filters.sql.polygons.expectedCount");
0855: testFilter(uri, testData.getPolygon_table(), expected);
0856: }
0857:
0858: /**
0859: * DOCUMENT ME!
0860: *
0861: * @throws Exception DOCUMENT ME!
0862: */
0863: public void testBBoxFilterPoints() throws Exception {
0864: //String uri = getFilterUri("filters.bbox.points.filter");
0865: //int expected = getExpectedCount("filters.bbox.points.expectedCount");
0866: int expected = 6;
0867: testBBox(testData.getPoint_table(), expected);
0868: }
0869:
0870: /**
0871: * DOCUMENT ME!
0872: *
0873: * @throws Exception DOCUMENT ME!
0874: */
0875: public void testBBoxFilterLines() throws Exception {
0876: //String uri = getFilterUri("filters.bbox.lines.filter");
0877: //int expected = getExpectedCount("filters.bbox.lines.expectedCount");
0878: int expected = 22;
0879: testBBox(testData.getLine_table(), expected);
0880: }
0881:
0882: /**
0883: * DOCUMENT ME!
0884: *
0885: * @throws Exception DOCUMENT ME!
0886: */
0887: public void testBBoxFilterPolygons() throws Exception {
0888: //String uri = getFilterUri("filters.bbox.polygons.filter");
0889: //int expected = getExpectedCount("filters.bbox.polygons.expectedCount");
0890: int expected = 8;
0891: testBBox(testData.getPolygon_table(), expected);
0892: }
0893:
0894: /////////////////// HELPER FUNCTIONS ////////////////////////
0895:
0896: /**
0897: * for a given FeatureSource, makes the following assertions:
0898: *
0899: * <ul>
0900: * <li>
0901: * it's not null
0902: * </li>
0903: * <li>
0904: * .getDataStore() != null
0905: * </li>
0906: * <li>
0907: * .getDataStore() == the datastore obtained in setUp()
0908: * </li>
0909: * <li>
0910: * .getSchema() != null
0911: * </li>
0912: * <li>
0913: * .getBounds() != null
0914: * </li>
0915: * <li>
0916: * .getBounds().isNull() == false
0917: * </li>
0918: * <li>
0919: * .getFeatures().getCounr() > 0
0920: * </li>
0921: * <li>
0922: * .getFeatures().reader().hasNex() == true
0923: * </li>
0924: * <li>
0925: * .getFeatures().reader().next() != null
0926: * </li>
0927: * </ul>
0928: *
0929: *
0930: * @param fsource DOCUMENT ME!
0931: *
0932: * @throws IOException DOCUMENT ME!
0933: */
0934: private void testGetFeatureSource(FeatureSource fsource)
0935: throws IOException {
0936: assertNotNull(fsource);
0937: assertNotNull(fsource.getDataStore());
0938: assertEquals(fsource.getDataStore(), store);
0939: assertNotNull(fsource.getSchema());
0940:
0941: FeatureCollection results = fsource.getFeatures();
0942: int count = results.size();
0943: assertTrue("size returns " + count, count > 0);
0944: LOGGER.fine("feature count: " + count);
0945:
0946: Envelope env1;
0947: Envelope env2;
0948: env1 = fsource.getBounds();
0949: assertNotNull(env1);
0950: assertFalse(env1.isNull());
0951: env2 = fsource.getBounds(Query.ALL);
0952: assertNotNull(env2);
0953: assertFalse(env2.isNull());
0954: env1 = results.getBounds();
0955: assertNotNull(env1);
0956: assertFalse(env1.isNull());
0957:
0958: FeatureIterator reader = results.features();
0959: assertTrue(reader.hasNext());
0960:
0961: try {
0962: assertNotNull(reader.next());
0963: } catch (NoSuchElementException ex) {
0964: ex.printStackTrace();
0965: fail(ex.getMessage());
0966: }
0967:
0968: reader.close();
0969: }
0970:
0971: /**
0972: * DOCUMENT ME!
0973: *
0974: * @param filterKey DOCUMENT ME!
0975: *
0976: * @return DOCUMENT ME!
0977: */
0978: private String getFilterUri(String filterKey) throws IOException {
0979: String filterFileName = testData.getConProps().getProperty(
0980: filterKey);
0981:
0982: if (filterFileName == null) {
0983: fail(filterKey
0984: + " param not found in tests configurarion properties file");
0985: }
0986:
0987: String uri = org.geotools.test.TestData.url(null,
0988: filterFileName).toString();
0989:
0990: return uri;
0991: }
0992:
0993: /**
0994: * DOCUMENT ME!
0995: *
0996: * @param key DOCUMENT ME!
0997: *
0998: * @return DOCUMENT ME!
0999: */
1000: private int getExpectedCount(String key) {
1001: try {
1002: return Integer.parseInt(testData.getConProps().getProperty(
1003: key));
1004: } catch (NumberFormatException ex) {
1005: fail(key
1006: + " parameter not found or not an integer in testParams.properties");
1007: }
1008:
1009: return -1;
1010: }
1011:
1012: /**
1013: * DOCUMENT ME!
1014: *
1015: * @param filterUri DOCUMENT ME!
1016: * @param table DOCUMENT ME!
1017: * @param expected DOCUMENT ME!
1018: *
1019: * @throws Exception DOCUMENT ME!
1020: */
1021: private void testFilter(String filterUri, String table, int expected)
1022: throws Exception {
1023: Filter filter = parseDocument(filterUri);
1024: FeatureSource fsource = store.getFeatureSource(table);
1025: testFilter(filter, fsource, expected);
1026: }
1027:
1028: /**
1029: * DOCUMENT ME!
1030: *
1031: * @param filter DOCUMENT ME!
1032: * @param fsource DOCUMENT ME!
1033: * @param expected DOCUMENT ME!
1034: *
1035: * @throws IOException DOCUMENT ME!
1036: */
1037: private void testFilter(Filter filter, FeatureSource fsource,
1038: int expected) throws IOException {
1039: FeatureCollection fc = fsource.getFeatures(filter);
1040: int fCount = fc.size();
1041: LOGGER.info("collection size: " + fCount);
1042:
1043: FeatureIterator fi = fc.features();
1044: int numFeat = 0;
1045: while (fi.hasNext()) {
1046: fi.next();
1047: numFeat++;
1048: }
1049:
1050: String failMsg = "Fully fetched features size and estimated num features count does not match";
1051: assertEquals(failMsg, fCount, numFeat);
1052: fc.close(fi);
1053: }
1054:
1055: /**
1056: * DOCUMENT ME!
1057: *
1058: * @param table DOCUMENT ME!
1059: * @param expected DOCUMENT ME!
1060: *
1061: * @throws Exception DOCUMENT ME!
1062: */
1063: private void testBBox(String table, int expected) throws Exception {
1064: FeatureSource fs = store.getFeatureSource(table);
1065: Filter bboxFilter = getBBoxfilter(fs);
1066: testFilter(bboxFilter, fs, expected);
1067: }
1068:
1069: /**
1070: * DOCUMENT ME!
1071: *
1072: * @param fs DOCUMENT ME!
1073: *
1074: * @return DOCUMENT ME!
1075: *
1076: * @throws Exception DOCUMENT ME!
1077: */
1078: private Filter getBBoxfilter(FeatureSource fs) throws Exception {
1079: FeatureType schema = fs.getSchema();
1080: BBOX bbe = ff.bbox(schema.getDefaultGeometry().getLocalName(),
1081: -60, -55, -40, -20, schema.getDefaultGeometry()
1082: .getCoordinateSystem().getName().getCode());
1083: return bbe;
1084: }
1085:
1086: /**
1087: * checks for the existence of <code>table</code> in
1088: * <code>featureTypes</code>. <code>table</code> must be a full qualified
1089: * sde feature type name. (i.e "TEST_POINT" == "SDE.SDE.TEST_POINT")
1090: *
1091: * @param featureTypes DOCUMENT ME!
1092: * @param table DOCUMENT ME!
1093: */
1094: private void testTypeExists(String[] featureTypes, String table) {
1095: for (int i = 0; i < featureTypes.length; i++) {
1096: if (featureTypes[i].equalsIgnoreCase(table.toUpperCase())) {
1097: LOGGER.fine("testTypeExists OK: " + table);
1098:
1099: return;
1100: }
1101: }
1102:
1103: fail("table " + table + " not found in getFeatureTypes results");
1104: }
1105:
1106: /**
1107: * DOCUMENT ME!
1108: *
1109: * @param wich DOCUMENT ME!
1110: * @param table DOCUMENT ME!
1111: *
1112: * @throws IOException DOCUMENT ME!
1113: */
1114: private void testGetFeatures(String wich, String table)
1115: throws IOException {
1116: LOGGER.fine("getting all features from " + table);
1117:
1118: FeatureSource source = store.getFeatureSource(table);
1119: int expectedCount = getExpectedCount("getfeatures." + wich
1120: + ".expectedCount");
1121: int fCount = source.getCount(Query.ALL);
1122: String failMsg = "Expected and returned result count does not match";
1123: assertEquals(failMsg, expectedCount, fCount);
1124:
1125: FeatureCollection fresults = source.getFeatures();
1126: FeatureCollection features = fresults;
1127: failMsg = "FeatureResults.size and .collection().size thoes not match";
1128: assertEquals(failMsg, fCount, features.size());
1129: LOGGER.fine("fetched " + fCount + " features for " + wich
1130: + " layer, OK");
1131: }
1132:
1133: /**
1134: * stolen from filter module tests
1135: *
1136: * @param uri DOCUMENT ME!
1137: *
1138: * @return DOCUMENT ME!
1139: *
1140: * @throws Exception DOCUMENT ME!
1141: */
1142: private Filter parseDocument(String uri) throws Exception {
1143: LOGGER.finest("about to create parser");
1144:
1145: // chains all the appropriate filters together (in correct order)
1146: // and initiates parsing
1147: TestFilterHandler filterHandler = new TestFilterHandler();
1148: FilterFilter filterFilter = new FilterFilter(filterHandler,
1149: null);
1150: GMLFilterGeometry geometryFilter = new GMLFilterGeometry(
1151: filterFilter);
1152: GMLFilterDocument documentFilter = new GMLFilterDocument(
1153: geometryFilter);
1154: SAXParserFactory fac = SAXParserFactory.newInstance();
1155: SAXParser parser = fac.newSAXParser();
1156: ParserAdapter p = new ParserAdapter(parser.getParser());
1157: p.setContentHandler(documentFilter);
1158: LOGGER.finer("just made parser, " + uri);
1159: p.parse(uri);
1160: LOGGER.finest("just parsed: " + uri);
1161:
1162: Filter filter = filterHandler.getFilter();
1163:
1164: return filter;
1165: }
1166: }
|