Source Code Cross Referenced for ArcSDEFeatureStoreTest.java in  » GIS » GeoTools-2.4.1 » org » geotools » arcsde » data » Java Source Code / Java DocumentationJava Source Code and Java Documentation

Java Source Code / Java Documentation
1. 6.0 JDK Core
2. 6.0 JDK Modules
3. 6.0 JDK Modules com.sun
4. 6.0 JDK Modules com.sun.java
5. 6.0 JDK Modules sun
6. 6.0 JDK Platform
7. Ajax
8. Apache Harmony Java SE
9. Aspect oriented
10. Authentication Authorization
11. Blogger System
12. Build
13. Byte Code
14. Cache
15. Chart
16. Chat
17. Code Analyzer
18. Collaboration
19. Content Management System
20. Database Client
21. Database DBMS
22. Database JDBC Connection Pool
23. Database ORM
24. Development
25. EJB Server geronimo
26. EJB Server GlassFish
27. EJB Server JBoss 4.2.1
28. EJB Server resin 3.1.5
29. ERP CRM Financial
30. ESB
31. Forum
32. GIS
33. Graphic Library
34. Groupware
35. HTML Parser
36. IDE
37. IDE Eclipse
38. IDE Netbeans
39. Installer
40. Internationalization Localization
41. Inversion of Control
42. Issue Tracking
43. J2EE
44. JBoss
45. JMS
46. JMX
47. Library
48. Mail Clients
49. Net
50. Parser
51. PDF
52. Portal
53. Profiler
54. Project Management
55. Report
56. RSS RDF
57. Rule Engine
58. Science
59. Scripting
60. Search Engine
61. Security
62. Sevlet Container
63. Source Control
64. Swing Library
65. Template Engine
66. Test Coverage
67. Testing
68. UML
69. Web Crawler
70. Web Framework
71. Web Mail
72. Web Server
73. Web Services
74. Web Services apache cxf 2.0.1
75. Web Services AXIS2
76. Wiki Engine
77. Workflow Engines
78. XML
79. XML UI
Java
Java Tutorial
Java Open Source
Jar File Download
Java Articles
Java Products
Java by API
Photoshop Tutorials
Maya Tutorials
Flash Tutorials
3ds-Max Tutorials
Illustrator Tutorials
GIMP Tutorials
C# / C Sharp
C# / CSharp Tutorial
C# / CSharp Open Source
ASP.Net
ASP.NET Tutorial
JavaScript DHTML
JavaScript Tutorial
JavaScript Reference
HTML / CSS
HTML CSS Reference
C / ANSI-C
C Tutorial
C++
C++ Tutorial
Ruby
PHP
Python
Python Tutorial
Python Open Source
SQL Server / T-SQL
SQL Server / T-SQL Tutorial
Oracle PL / SQL
Oracle PL/SQL Tutorial
PostgreSQL
SQL / MySQL
MySQL Tutorial
VB.Net
VB.Net Tutorial
Flash / Flex / ActionScript
VBA / Excel / Access / Word
XML
XML Tutorial
Microsoft Office PowerPoint 2007 Tutorial
Microsoft Office Excel 2007 Tutorial
Microsoft Office Word 2007 Tutorial
Java Source Code / Java Documentation » GIS » GeoTools 2.4.1 » org.geotools.arcsde.data 
Source Cross Referenced  Class Diagram Java Document (Java Doc) 


001:        package org.geotools.arcsde.data;
002:
003:        import java.io.IOException;
004:        import java.util.HashMap;
005:        import java.util.HashSet;
006:        import java.util.Map;
007:        import java.util.logging.Logger;
008:
009:        import junit.framework.TestCase;
010:
011:        import org.geotools.arcsde.ArcSDEDataStoreFactory;
012:        import org.geotools.data.DataStore;
013:        import org.geotools.data.DefaultQuery;
014:        import org.geotools.data.DefaultTransaction;
015:        import org.geotools.data.FeatureReader;
016:        import org.geotools.data.FeatureSource;
017:        import org.geotools.data.FeatureWriter;
018:        import org.geotools.data.Query;
019:        import org.geotools.data.Transaction;
020:        import org.geotools.factory.CommonFactoryFinder;
021:        import org.geotools.feature.AttributeType;
022:        import org.geotools.feature.AttributeTypeFactory;
023:        import org.geotools.feature.Feature;
024:        import org.geotools.feature.FeatureCollection;
025:        import org.geotools.feature.FeatureIterator;
026:        import org.geotools.feature.FeatureType;
027:        import org.geotools.feature.FeatureTypeBuilder;
028:        import org.geotools.feature.SchemaException;
029:        import org.geotools.feature.SimpleFeature;
030:        import org.geotools.feature.type.GeometricAttributeType;
031:        import org.opengis.filter.Filter;
032:        import org.opengis.filter.FilterFactory;
033:        import org.opengis.filter.Or;
034:        import org.opengis.filter.PropertyIsEqualTo;
035:
036:        import com.vividsolutions.jts.geom.Coordinate;
037:        import com.vividsolutions.jts.geom.Geometry;
038:        import com.vividsolutions.jts.geom.GeometryFactory;
039:        import com.vividsolutions.jts.geom.LineString;
040:        import com.vividsolutions.jts.geom.MultiLineString;
041:        import com.vividsolutions.jts.geom.MultiPoint;
042:        import com.vividsolutions.jts.geom.MultiPolygon;
043:        import com.vividsolutions.jts.geom.Point;
044:        import com.vividsolutions.jts.geom.Polygon;
045:
046:        /**
047:         * Unit tests for transaction support
048:         *
049:         * @author Gabriel Roldan, Axios Engineering
050:         * @source $URL: http://svn.geotools.org/geotools/tags/2.4.1/modules/unsupported/arcsde/datastore/src/test/java/org/geotools/arcsde/data/ArcSDEFeatureStoreTest.java $
051:         * @version $Id: ArcSDEFeatureStoreTest.java 27863 2007-11-12 20:34:34Z desruisseaux $
052:         */
053:        public class ArcSDEFeatureStoreTest extends TestCase {
054:            /** package logger */
055:            private static Logger LOGGER = org.geotools.util.logging.Logging
056:                    .getLogger(ArcSDEFeatureStoreTest.class.getPackage()
057:                            .getName());
058:
059:            /** DOCUMENT ME! */
060:            private TestData testData;
061:
062:            /**
063:             * loads {@code test-data/testparams.properties} into a Properties object, wich is
064:             * used to obtain test tables names and is used as parameter to find the DataStore
065:             *
066:             * @throws Exception DOCUMENT ME!
067:             */
068:            protected void setUp() throws Exception {
069:                super .setUp();
070:                this .testData = new TestData();
071:                this .testData.setUp();
072:                if (ArcSDEDataStoreFactory.JSDE_CLIENT_VERSION == ArcSDEDataStoreFactory.JSDE_VERSION_DUMMY)
073:                    throw new RuntimeException(
074:                            "Don't run the test-suite with the dummy jar.  Make sure the real ArcSDE jars are on your classpath.");
075:            }
076:
077:            /**
078:             * DOCUMENT ME!
079:             *
080:             * @throws Exception DOCUMENT ME!
081:             */
082:            protected void tearDown() throws Exception {
083:                testData.tearDown(true, false);
084:                testData = null;
085:                super .tearDown();
086:            }
087:
088:            /**
089:             * DOCUMENT ME!
090:             *
091:             * @throws Exception DOCUMENT ME!
092:             */
093:            public void testDeleteByFID() throws Exception {
094:                testData.createTempTable(true);
095:
096:                DataStore ds = testData.getDataStore();
097:                String typeName = testData.getTemp_table();
098:
099:                //get a fid
100:                FeatureReader reader = ds.getFeatureReader(new DefaultQuery(
101:                        typeName), Transaction.AUTO_COMMIT);
102:                String fid = reader.next().getID();
103:                reader.close();
104:
105:                FilterFactory ff = CommonFactoryFinder.getFilterFactory(null);
106:                HashSet ids = new HashSet();
107:                ids.add(ff.featureId(fid));
108:                Filter fidFilter = ff.id(ids);
109:
110:                FeatureWriter writer = ds.getFeatureWriter(typeName, fidFilter,
111:                        Transaction.AUTO_COMMIT);
112:
113:                assertTrue(writer.hasNext());
114:
115:                Feature feature = writer.next();
116:                assertEquals(fid, feature.getID());
117:                writer.remove();
118:                assertFalse(writer.hasNext());
119:                writer.close();
120:
121:                //was it really removed?
122:                reader = ds.getFeatureReader(new DefaultQuery(typeName,
123:                        fidFilter), Transaction.AUTO_COMMIT);
124:                assertFalse(reader.hasNext());
125:                reader.close();
126:            }
127:
128:            /**
129:             * Tests that all the features that match a filter based on attribute only
130:             * filters (aka non spatial filters), are deleted correctly. This test
131:             * assumes that there are no duplicate values in the test data.
132:             *
133:             * @throws Exception
134:             */
135:            public void testDeleteByAttOnlyFilter() throws Exception {
136:                testData.createTempTable(true);
137:
138:                DataStore ds = this .testData.getDataStore();
139:                String typeName = this .testData.getTemp_table();
140:
141:                //get 2 features and build an OR'ed PropertyIsEqualTo filter
142:                FeatureSource fs = ds.getFeatureSource(typeName);
143:                FeatureType schema = fs.getSchema();
144:                AttributeType att = schema.getAttributeType(0);
145:                String attName = att.getLocalName();
146:
147:                FeatureIterator reader = fs.getFeatures().features();
148:                Object val1 = reader.next().getAttribute(0);
149:                Object val2 = reader.next().getAttribute(0);
150:                reader.close();
151:
152:                FilterFactory ff = CommonFactoryFinder.getFilterFactory(null);
153:                PropertyIsEqualTo eq1 = ff.equals(ff.literal(val1), ff
154:                        .property(attName));
155:                PropertyIsEqualTo eq2 = ff.equals(ff.literal(val2), ff
156:                        .property(attName));
157:                Or or = ff.or(eq1, eq2);
158:
159:                FeatureWriter writer = ds.getFeatureWriter(typeName, or,
160:                        Transaction.AUTO_COMMIT);
161:
162:                assertTrue(writer.hasNext());
163:
164:                Feature feature = writer.next();
165:                assertEquals(val1, feature.getAttribute(0));
166:                writer.remove();
167:
168:                feature = writer.next();
169:                assertEquals(val2, feature.getAttribute(0));
170:                writer.remove();
171:
172:                assertFalse(writer.hasNext());
173:                writer.close();
174:
175:                //was it really removed?
176:                FeatureReader read = ds.getFeatureReader(new DefaultQuery(
177:                        typeName, or), Transaction.AUTO_COMMIT);
178:                assertFalse(read.hasNext());
179:                read.close();
180:            }
181:
182:            /**
183:             * Tests the creation of new feature types, with CRS and all.
184:             * 
185:             * <p>
186:             * This test also ensures that the arcsde datastore is able of creating
187:             * schemas where the geometry attribute is not the last one. This is
188:             * important since to do so, the ArcSDE datastore must break the usual way
189:             * of creating schemas with the ArcSDE Java API, in which one first
190:             * creates the (non spatially enabled) "table" with all the non spatial 
191:             * attributes and finally
192:             * creates the "layer", adding the spatial attribute to the previously
193:             * created table. So, this test ensures the datastore correctly works
194:             * arround this limitation.
195:             * </p>
196:             *
197:             * @throws IOException DOCUMENT ME!
198:             * @throws SchemaException DOCUMENT ME!
199:             */
200:            public void testCreateSchema() throws IOException, SchemaException {
201:                FeatureType type;
202:                AttributeType[] atts = new AttributeType[4];
203:                String typeName = this .testData.getTemp_table();
204:                if (typeName.indexOf('.') != -1) {
205:                    LOGGER.fine("Unqualifying type name to create schema.");
206:                    typeName = typeName
207:                            .substring(typeName.lastIndexOf('.') + 1);
208:                }
209:
210:                atts[0] = AttributeTypeFactory.newAttributeType("FST_COL",
211:                        String.class, false);
212:                atts[1] = AttributeTypeFactory.newAttributeType("SECOND_COL",
213:                        Double.class, false);
214:                atts[2] = AttributeTypeFactory.newAttributeType("GEOM",
215:                        Point.class, false);
216:                atts[3] = AttributeTypeFactory.newAttributeType("FOURTH_COL",
217:                        Integer.class, false);
218:                type = FeatureTypeBuilder.newFeatureType(atts, typeName);
219:
220:                DataStore ds = this .testData.getDataStore();
221:
222:                this .testData.deleteTempTable(((ArcSDEDataStore) ds)
223:                        .getConnectionPool());
224:                Map hints = new HashMap();
225:                hints.put("configuration.keyword", testData.getConfigKeyword());
226:                ((ArcSDEDataStore) ds).createSchema(type, hints);
227:                this .testData.deleteTempTable(((ArcSDEDataStore) ds)
228:                        .getConnectionPool());
229:            }
230:
231:            /**
232:             * DOCUMENT ME!
233:             *
234:             * @throws Exception DOCUMENT ME!
235:             */
236:            public void testWriterGeometry() throws Exception {
237:                testFeatureWriterAutoCommit(Geometry.class);
238:            }
239:
240:            /**
241:             * DOCUMENT ME!
242:             *
243:             * @throws Exception DOCUMENT ME!
244:             */
245:            public void testWriterPoint() throws Exception {
246:                testFeatureWriterAutoCommit(Point.class);
247:            }
248:
249:            /**
250:             * DOCUMENT ME!
251:             *
252:             * @throws Exception DOCUMENT ME!
253:             */
254:            public void testWriterMultiPoint() throws Exception {
255:                testFeatureWriterAutoCommit(MultiPoint.class);
256:            }
257:
258:            /**
259:             * DOCUMENT ME!
260:             *
261:             * @throws Exception DOCUMENT ME!
262:             */
263:            public void testWriterLineString() throws Exception {
264:                testFeatureWriterAutoCommit(LineString.class);
265:            }
266:
267:            /**
268:             * DOCUMENT ME!
269:             *
270:             * @throws Exception DOCUMENT ME!
271:             */
272:            public void testWriterMultiLineString() throws Exception {
273:                testFeatureWriterAutoCommit(MultiLineString.class);
274:            }
275:
276:            /**
277:             * DOCUMENT ME!
278:             *
279:             * @throws Exception DOCUMENT ME!
280:             */
281:            public void testWriterPolygon() throws Exception {
282:                testFeatureWriterAutoCommit(Polygon.class);
283:            }
284:
285:            /**
286:             * DOCUMENT ME!
287:             *
288:             * @throws Exception DOCUMENT ME!
289:             */
290:            public void testWriterMultiPolygon() throws Exception {
291:                testFeatureWriterAutoCommit(MultiPolygon.class);
292:            }
293:
294:            /**
295:             * Tests the writing of features with autocommit transaction.
296:             *
297:             * @param geometryClass DOCUMENT ME!
298:             *
299:             * @throws Exception DOCUMENT ME!
300:             * @throws IllegalArgumentException DOCUMENT ME!
301:             */
302:            private void testFeatureWriterAutoCommit(Class geometryClass)
303:                    throws Exception {
304:                //the table created here is test friendly since it can hold
305:                //any kind of geometries.
306:                this .testData.createTempTable(true);
307:
308:                String typeName = this .testData.getTemp_table();
309:                FeatureCollection features = this .testData.createTestFeatures(
310:                        geometryClass, 10);
311:
312:                DataStore ds = this .testData.getDataStore();
313:                FeatureSource fsource = ds.getFeatureSource(typeName);
314:
315:                //incremented on each feature added event to
316:                //ensure events are being raised as expected
317:                //(the count is wraped inside an array to be able of declaring
318:                //the variable as final and accessing it from inside the anonymous
319:                //inner class)
320:                /*
321:                final int[] featureAddedEventCount = { 0 };
322:
323:                fsource.addFeatureListener(new FeatureListener() {
324:                        public void changed(FeatureEvent evt) {
325:                            if (evt.getEventType() != FeatureEvent.FEATURES_ADDED) {
326:                                throw new IllegalArgumentException(
327:                                    "Expected FEATURES_ADDED event, got "
328:                                    + evt.getEventType());
329:                            }
330:
331:                            ++featureAddedEventCount[0];
332:                        }
333:                    });
334:                 */
335:
336:                final int initialCount = fsource.getCount(Query.ALL);
337:
338:                FeatureWriter writer = ds.getFeatureWriterAppend(typeName,
339:                        Transaction.AUTO_COMMIT);
340:
341:                Feature source;
342:                SimpleFeature dest;
343:
344:                for (FeatureIterator fi = features.features(); fi.hasNext();) {
345:                    source = fi.next();
346:                    dest = (SimpleFeature) writer.next();
347:                    dest.setAttributes(source.getAttributes((Object[]) null));
348:                    writer.write();
349:                }
350:
351:                writer.close();
352:
353:                //was the features really inserted?
354:                int fcount = fsource.getCount(Query.ALL);
355:                assertEquals(features.size() + initialCount, fcount);
356:
357:                /*String msg = "a FEATURES_ADDED event should have been called "
358:                    + features.size() + " times";
359:                assertEquals(msg, features.size(), featureAddedEventCount[0]);*/
360:            }
361:
362:            public void testCreateNillableShapeSchema() throws IOException,
363:                    SchemaException {
364:                FeatureType type;
365:                AttributeType[] atts = new AttributeType[2];
366:                String typeName = this .testData.getTemp_table();
367:                if (typeName.indexOf('.') != -1) {
368:                    LOGGER.fine("Unqualifying type name to create schema.");
369:                    typeName = typeName
370:                            .substring(typeName.lastIndexOf('.') + 1);
371:                }
372:
373:                atts[0] = AttributeTypeFactory.newAttributeType("OBJECTID",
374:                        Integer.class, false);
375:                atts[1] = AttributeTypeFactory.newAttributeType("SHAPE",
376:                        MultiLineString.class, true);
377:
378:                type = FeatureTypeBuilder.newFeatureType(atts, typeName);
379:
380:                ArcSDEDataStore ds = this .testData.getDataStore();
381:
382:                this .testData.deleteTempTable(ds.getConnectionPool());
383:                Map hints = new HashMap();
384:                hints.put("configuration.keyword", this .testData
385:                        .getConfigKeyword());
386:                ds.createSchema(type, hints);
387:                this .testData.deleteTempTable(ds.getConnectionPool());
388:            }
389:
390:            public void testWriteAndUpdateNullShapes() throws IOException,
391:                    SchemaException {
392:                FeatureType type;
393:                AttributeType[] atts = new AttributeType[2];
394:                String typeName = this .testData.getTemp_table();
395:                if (typeName.indexOf('.') != -1) {
396:                    LOGGER.fine("Unqualifying type name to create schema.");
397:                    typeName = typeName
398:                            .substring(typeName.lastIndexOf('.') + 1);
399:                }
400:
401:                atts[0] = AttributeTypeFactory.newAttributeType("OBJECTID",
402:                        Integer.class, false);
403:                atts[1] = new GeometricAttributeType("SHAPE",
404:                        MultiLineString.class, true, null, null, null);
405:
406:                type = FeatureTypeBuilder.newFeatureType(atts, typeName);
407:
408:                DataStore ds = this .testData.getDataStore();
409:
410:                this .testData.deleteTempTable(((ArcSDEDataStore) ds)
411:                        .getConnectionPool());
412:
413:                Map hints = new HashMap();
414:                hints.put("configuration.keyword", this .testData
415:                        .getConfigKeyword());
416:                hints.put("rowid.column.type", "SDE");
417:                hints.put("rowid.column.name", "OBJECTID");
418:
419:                ((ArcSDEDataStore) ds).createSchema(type, hints);
420:                LOGGER.info("Created null-geom sde layer");
421:
422:                try {
423:                    FeatureWriter writer = ds.getFeatureWriter(this .testData
424:                            .getTemp_table(), Transaction.AUTO_COMMIT);
425:                    Feature f = writer.next();
426:                    f.setAttribute(0, new Integer(1));
427:
428:                    writer.write();
429:                    writer.close();
430:                    LOGGER.info("Wrote null-geom feature to sde");
431:
432:                    FeatureReader r = ds.getFeatureReader(new DefaultQuery(
433:                            this .testData.getTemp_table(), Filter.INCLUDE),
434:                            Transaction.AUTO_COMMIT);
435:                    assertTrue(r.hasNext());
436:                    f = r.next();
437:                    LOGGER.info("recovered geometry " + f.getDefaultGeometry()
438:                            + " from single inserted feature.");
439:                    assertTrue(f.getDefaultGeometry().isEmpty());
440:                    //save the ID to update the feature later
441:                    String newId = f.getID();
442:                    assertFalse(r.hasNext());
443:                    r.close();
444:                    LOGGER
445:                            .info("Confirmed exactly one feature in new sde layer");
446:
447:                    FilterFactory ff = CommonFactoryFinder
448:                            .getFilterFactory(null);
449:                    HashSet ids = new HashSet();
450:                    ids.add(ff.featureId(newId));
451:                    Filter idFilter = ff.id(ids);
452:
453:                    writer = ds.getFeatureWriter(this .testData.getTemp_table(),
454:                            idFilter, Transaction.AUTO_COMMIT);
455:
456:                    assertTrue(writer.hasNext());
457:
458:                    LOGGER
459:                            .info("Confirmed feature is fetchable via it's api-determined FID");
460:
461:                    GeometryFactory gf = new GeometryFactory();
462:                    int index = 10;
463:                    Coordinate[] coords1 = { new Coordinate(0, 0),
464:                            new Coordinate(++index, ++index) };
465:                    Coordinate[] coords2 = { new Coordinate(0, index),
466:                            new Coordinate(index, 0) };
467:                    LineString[] lines = { gf.createLineString(coords1),
468:                            gf.createLineString(coords2) };
469:                    MultiLineString sampleMultiLine = gf
470:                            .createMultiLineString(lines);
471:
472:                    Feature toBeUpdated = writer.next();
473:                    toBeUpdated.setAttribute(1, sampleMultiLine);
474:                    writer.write();
475:                    writer.close();
476:
477:                    LOGGER
478:                            .info("Null-geom feature updated with a sample geometry.");
479:
480:                    DefaultQuery query = new DefaultQuery(this .testData
481:                            .getTemp_table(), idFilter);
482:                    r = ds.getFeatureReader(query, Transaction.AUTO_COMMIT);
483:                    assertTrue(r.hasNext());
484:                    f = r.next();
485:                    MultiLineString recoveredMLS = (MultiLineString) f
486:                            .getDefaultGeometry();
487:                    assertTrue(!recoveredMLS.isEmpty());
488:                    //I tried to compare the recovered MLS to the sampleMultiLineString, but they're
489:                    // slightly different.  SDE does some rounding, and winds up giving me 0.0000002 for zero,
490:                    // and 11.9992 for 12.  Meh.
491:                    r.close();
492:
493:                } catch (Exception e) {
494:                    throw new RuntimeException(e);
495:                }
496:                this .testData.deleteTempTable(((ArcSDEDataStore) ds)
497:                        .getConnectionPool());
498:            }
499:
500:            /**
501:             * Tests the writing of features with real transactions
502:             *
503:             * @throws UnsupportedOperationException DOCUMENT ME!
504:             */
505:            public void testFeatureWriterTransaction() throws Exception {
506:                //the table created here is test friendly since it can hold
507:                //any kind of geometries.
508:                this .testData.createTempTable(true);
509:
510:                String typeName = this .testData.getTemp_table();
511:
512:                DataStore ds = this .testData.getDataStore();
513:                FeatureSource fsource = ds.getFeatureSource(typeName);
514:
515:                final int initialCount = fsource.getCount(Query.ALL);
516:                final int writeCount = initialCount + 2;
517:                FeatureCollection features = this .testData.createTestFeatures(
518:                        LineString.class, writeCount);
519:
520:                //incremented on each feature added event to
521:                //ensure events are being raised as expected
522:                //(the count is wraped inside an array to be able of declaring
523:                //the variable as final and accessing it from inside the anonymous
524:                //inner class)
525:                //final int[] featureAddedEventCount = { 0 };
526:
527:                Transaction transaction = new DefaultTransaction();
528:                FeatureWriter writer = ds.getFeatureWriter(typeName,
529:                        Filter.INCLUDE, transaction);
530:
531:                Feature source;
532:                SimpleFeature dest;
533:
534:                int count = 0;
535:                for (FeatureIterator fi = features.features(); fi.hasNext(); count++) {
536:                    if (count < initialCount) {
537:                        assertTrue("at index " + count, writer.hasNext());
538:                    } else {
539:                        assertFalse("at index " + count, writer.hasNext());
540:                    }
541:
542:                    source = fi.next();
543:                    dest = (SimpleFeature) writer.next();
544:                    dest.setAttributes(source.getAttributes((Object[]) null));
545:                    writer.write();
546:                }
547:
548:                transaction.commit();
549:                writer.close();
550:
551:                //was the features really inserted?
552:                int fcount = fsource.getCount(Query.ALL);
553:                assertEquals(writeCount, fcount);
554:
555:                /*
556:                String msg = "a FEATURES_ADDED event should have been called "
557:                    + features.size() + " times";
558:                assertEquals(msg, features.size(), featureAddedEventCount[0]);
559:                 */
560:            }
561:
562:            /**
563:             * DOCUMENT ME!
564:             *
565:             * @throws UnsupportedOperationException DOCUMENT ME!
566:             */
567:            public void testFeatureWriterAppend() throws Exception {
568:                //the table created here is test friendly since it can hold
569:                //any kind of geometries.
570:                this .testData.createTempTable(true);
571:
572:                String typeName = this .testData.getTemp_table();
573:                FeatureCollection features = this .testData.createTestFeatures(
574:                        LineString.class, 2);
575:
576:                DataStore ds = this .testData.getDataStore();
577:                FeatureSource fsource = ds.getFeatureSource(typeName);
578:
579:                final int initialCount = fsource.getCount(Query.ALL);
580:
581:                FeatureWriter writer = ds.getFeatureWriterAppend(typeName,
582:                        Transaction.AUTO_COMMIT);
583:
584:                Feature source;
585:                SimpleFeature dest;
586:
587:                for (FeatureIterator fi = features.features(); fi.hasNext();) {
588:                    assertFalse(writer.hasNext());
589:                    source = fi.next();
590:                    dest = (SimpleFeature) writer.next();
591:                    dest.setAttributes(source.getAttributes((Object[]) null));
592:                    writer.write();
593:                }
594:
595:                writer.close();
596:
597:                //were the features really inserted?
598:                int fcount = fsource.getCount(Query.ALL);
599:                assertEquals(features.size() + initialCount, fcount);
600:            }
601:
602:            /**
603:             * DOCUMENT ME!
604:             *
605:             * @param args DOCUMENT ME!
606:             */
607:            public static void main(String[] args) {
608:                junit.textui.TestRunner.run(ArcSDEFeatureStoreTest.class);
609:            }
610:        }
www.java2java.com | Contact Us
Copyright 2009 - 12 Demo Source and Support. All rights reserved.
All other trademarks are property of their respective owners.