Source Code Cross Referenced for IndexedShapefileDataStore.java in  » GIS » GeoTools-2.4.1 » org » geotools » data » shapefile » indexed » Java Source Code / Java DocumentationJava Source Code and Java Documentation

Java Source Code / Java Documentation
1. 6.0 JDK Core
2. 6.0 JDK Modules
3. 6.0 JDK Modules com.sun
4. 6.0 JDK Modules com.sun.java
5. 6.0 JDK Modules sun
6. 6.0 JDK Platform
7. Ajax
8. Apache Harmony Java SE
9. Aspect oriented
10. Authentication Authorization
11. Blogger System
12. Build
13. Byte Code
14. Cache
15. Chart
16. Chat
17. Code Analyzer
18. Collaboration
19. Content Management System
20. Database Client
21. Database DBMS
22. Database JDBC Connection Pool
23. Database ORM
24. Development
25. EJB Server geronimo
26. EJB Server GlassFish
27. EJB Server JBoss 4.2.1
28. EJB Server resin 3.1.5
29. ERP CRM Financial
30. ESB
31. Forum
32. GIS
33. Graphic Library
34. Groupware
35. HTML Parser
36. IDE
37. IDE Eclipse
38. IDE Netbeans
39. Installer
40. Internationalization Localization
41. Inversion of Control
42. Issue Tracking
43. J2EE
44. JBoss
45. JMS
46. JMX
47. Library
48. Mail Clients
49. Net
50. Parser
51. PDF
52. Portal
53. Profiler
54. Project Management
55. Report
56. RSS RDF
57. Rule Engine
58. Science
59. Scripting
60. Search Engine
61. Security
62. Sevlet Container
63. Source Control
64. Swing Library
65. Template Engine
66. Test Coverage
67. Testing
68. UML
69. Web Crawler
70. Web Framework
71. Web Mail
72. Web Server
73. Web Services
74. Web Services apache cxf 2.0.1
75. Web Services AXIS2
76. Wiki Engine
77. Workflow Engines
78. XML
79. XML UI
Java
Java Tutorial
Java Open Source
Jar File Download
Java Articles
Java Products
Java by API
Photoshop Tutorials
Maya Tutorials
Flash Tutorials
3ds-Max Tutorials
Illustrator Tutorials
GIMP Tutorials
C# / C Sharp
C# / CSharp Tutorial
C# / CSharp Open Source
ASP.Net
ASP.NET Tutorial
JavaScript DHTML
JavaScript Tutorial
JavaScript Reference
HTML / CSS
HTML CSS Reference
C / ANSI-C
C Tutorial
C++
C++ Tutorial
Ruby
PHP
Python
Python Tutorial
Python Open Source
SQL Server / T-SQL
SQL Server / T-SQL Tutorial
Oracle PL / SQL
Oracle PL/SQL Tutorial
PostgreSQL
SQL / MySQL
MySQL Tutorial
VB.Net
VB.Net Tutorial
Flash / Flex / ActionScript
VBA / Excel / Access / Word
XML
XML Tutorial
Microsoft Office PowerPoint 2007 Tutorial
Microsoft Office Excel 2007 Tutorial
Microsoft Office Word 2007 Tutorial
Java Source Code / Java Documentation » GIS » GeoTools 2.4.1 » org.geotools.data.shapefile.indexed 
Source Cross Referenced  Class Diagram Java Document (Java Doc) 


0001:        /*
0002:         *    GeoTools - OpenSource mapping toolkit
0003:         *    http://geotools.org
0004:         *    (C) 2003-2006, GeoTools Project Managment Committee (PMC)
0005:         * 
0006:         *    This library is free software; you can redistribute it and/or
0007:         *    modify it under the terms of the GNU Lesser General Public
0008:         *    License as published by the Free Software Foundation;
0009:         *    version 2.1 of the License.
0010:         *
0011:         *    This library is distributed in the hope that it will be useful,
0012:         *    but WITHOUT ANY WARRANTY; without even the implied warranty of
0013:         *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
0014:         *    Lesser General Public License for more details.
0015:         */
0016:        package org.geotools.data.shapefile.indexed;
0017:
0018:        import java.io.File;
0019:        import java.io.IOException;
0020:        import java.net.MalformedURLException;
0021:        import java.net.URI;
0022:        import java.net.URL;
0023:        import java.nio.channels.FileChannel;
0024:        import java.nio.channels.ReadableByteChannel;
0025:        import java.nio.charset.Charset;
0026:        import java.util.ArrayList;
0027:        import java.util.Arrays;
0028:        import java.util.Collection;
0029:        import java.util.HashSet;
0030:        import java.util.Iterator;
0031:        import java.util.List;
0032:        import java.util.Set;
0033:        import java.util.logging.Level;
0034:
0035:        import org.geotools.data.AbstractAttributeIO;
0036:        import org.geotools.data.AbstractFeatureLocking;
0037:        import org.geotools.data.AbstractFeatureSource;
0038:        import org.geotools.data.AbstractFeatureStore;
0039:        import org.geotools.data.AttributeReader;
0040:        import org.geotools.data.DataSourceException;
0041:        import org.geotools.data.DataStore;
0042:        import org.geotools.data.DataUtilities;
0043:        import org.geotools.data.EmptyFeatureReader;
0044:        import org.geotools.data.FeatureListener;
0045:        import org.geotools.data.FeatureReader;
0046:        import org.geotools.data.FeatureSource;
0047:        import org.geotools.data.FeatureWriter;
0048:        import org.geotools.data.InProcessLockingManager;
0049:        import org.geotools.data.Query;
0050:        import org.geotools.data.Transaction;
0051:        import org.geotools.data.TransactionStateDiff;
0052:        import org.geotools.data.shapefile.ShapefileDataStore;
0053:        import org.geotools.data.shapefile.ShapefileDataStoreFactory;
0054:        import org.geotools.data.shapefile.dbf.DbaseFileException;
0055:        import org.geotools.data.shapefile.dbf.DbaseFileHeader;
0056:        import org.geotools.data.shapefile.dbf.DbaseFileReader;
0057:        import org.geotools.data.shapefile.dbf.DbaseFileWriter;
0058:        import org.geotools.data.shapefile.dbf.IndexedDbaseFileReader;
0059:        import org.geotools.data.shapefile.shp.IndexFile;
0060:        import org.geotools.data.shapefile.shp.JTSUtilities;
0061:        import org.geotools.data.shapefile.shp.ShapeHandler;
0062:        import org.geotools.data.shapefile.shp.ShapeType;
0063:        import org.geotools.data.shapefile.shp.ShapefileException;
0064:        import org.geotools.data.shapefile.shp.ShapefileReader;
0065:        import org.geotools.data.shapefile.shp.ShapefileWriter;
0066:        import org.geotools.data.shapefile.shp.ShapefileReader.Record;
0067:        import org.geotools.feature.AttributeType;
0068:        import org.geotools.feature.Feature;
0069:        import org.geotools.feature.FeatureType;
0070:        import org.geotools.feature.FeatureTypes;
0071:        import org.geotools.feature.GeometryAttributeType;
0072:        import org.geotools.feature.IllegalAttributeException;
0073:        import org.geotools.feature.SchemaException;
0074:        import org.geotools.feature.type.BasicFeatureTypes;
0075:        import org.geotools.filter.FidFilter;
0076:        import org.geotools.filter.FilterAttributeExtractor;
0077:        import org.geotools.filter.Filters;
0078:        import org.geotools.index.Data;
0079:        import org.geotools.index.DataDefinition;
0080:        import org.geotools.index.LockTimeoutException;
0081:        import org.geotools.index.TreeException;
0082:        import org.geotools.index.UnsupportedFilterException;
0083:        import org.geotools.index.quadtree.QuadTree;
0084:        import org.geotools.index.quadtree.StoreException;
0085:        import org.geotools.index.quadtree.fs.FileSystemIndexStore;
0086:        import org.geotools.index.rtree.FilterConsumer;
0087:        import org.geotools.index.rtree.RTree;
0088:        import org.geotools.index.rtree.fs.FileSystemPageStore;
0089:        import org.opengis.filter.Filter;
0090:
0091:        import com.vividsolutions.jts.geom.Envelope;
0092:        import com.vividsolutions.jts.geom.Geometry;
0093:        import com.vividsolutions.jts.geom.LineString;
0094:        import com.vividsolutions.jts.geom.MultiLineString;
0095:        import com.vividsolutions.jts.geom.MultiPoint;
0096:        import com.vividsolutions.jts.geom.MultiPolygon;
0097:        import com.vividsolutions.jts.geom.Point;
0098:        import com.vividsolutions.jts.geom.Polygon;
0099:
0100:        /**
0101:         * A DataStore implementation which allows reading and writing from Shapefiles.
0102:         *
0103:         * @author Ian Schneider
0104:         * @author Tommaso Nolli
0105:         *
0106:         * @todo fix file creation bug
0107:         * @source $URL:
0108:         *         http://svn.geotools.org/geotools/branches/constantTimeFid/src/org/geotools/data/shapefile/indexed/IndexedShapefileDataStore.java $
0109:         */
0110:        public class IndexedShapefileDataStore extends ShapefileDataStore {
0111:            public static final byte TREE_NONE = 0;
0112:
0113:            public static final byte TREE_GRX = 1;
0114:
0115:            public static final byte TREE_QIX = 2;
0116:
0117:            private static final Object FIX_LOCK = new Object();
0118:
0119:            private static final Object GRX_LOCK = new Object();
0120:
0121:            private static final Object QIX_LOCK = new Object();
0122:
0123:            final URL treeURL;
0124:
0125:            public URL fixURL;
0126:
0127:            byte treeType;
0128:
0129:            boolean createIndex;
0130:
0131:            final boolean useIndex;
0132:
0133:            private RTree rtree;
0134:
0135:            int maxDepth;
0136:
0137:            /**
0138:             * Creates a new instance of ShapefileDataStore.
0139:             *
0140:             * @param url
0141:             *            The URL of the shp file to use for this DataSource.
0142:             */
0143:            public IndexedShapefileDataStore(URL url)
0144:                    throws java.net.MalformedURLException {
0145:                this (url, null, false, true, TREE_QIX);
0146:            }
0147:
0148:            /**
0149:             * Creates a new instance of ShapefileDataStore.
0150:             *
0151:             * @param url
0152:             *            The URL of the shp file to use for this DataSource.
0153:             * @param namespace
0154:             *            DOCUMENT ME!
0155:             */
0156:            public IndexedShapefileDataStore(URL url, URI namespace)
0157:                    throws java.net.MalformedURLException {
0158:                this (url, namespace, false, true, TREE_QIX);
0159:            }
0160:
0161:            /**
0162:             * Creates a new instance of ShapefileDataStore.
0163:             *
0164:             * @param url
0165:             *            The URL of the shp file to use for this DataSource.
0166:             * @param namespace
0167:             *            DOCUMENT ME!
0168:             * @param useMemoryMappedBuffer
0169:             *            enable/disable memory mapping of files
0170:             */
0171:            public IndexedShapefileDataStore(URL url, URI namespace,
0172:                    boolean useMemoryMappedBuffer)
0173:                    throws java.net.MalformedURLException {
0174:                this (url, namespace, useMemoryMappedBuffer, true, TREE_QIX);
0175:            }
0176:
0177:            /**
0178:             * Creates a new instance of ShapefileDataStore.
0179:             *
0180:             * @param url
0181:             *            The URL of the shp file to use for this DataSource.
0182:             * @param useMemoryMappedBuffer
0183:             *            enable/disable memory mapping of files
0184:             */
0185:            public IndexedShapefileDataStore(URL url,
0186:                    boolean useMemoryMappedBuffer)
0187:                    throws java.net.MalformedURLException {
0188:                this (url, (URI) null, useMemoryMappedBuffer, true, TREE_QIX);
0189:            }
0190:
0191:            /**
0192:             * Creates a new instance of ShapefileDataStore.
0193:             *
0194:             * @param url
0195:             *            The URL of the shp file to use for this DataSource.
0196:             * @param useMemoryMappedBuffer
0197:             *            enable/disable memory mapping of files
0198:             * @param createIndex
0199:             *            enable/disable automatic index creation if needed
0200:             */
0201:            public IndexedShapefileDataStore(URL url,
0202:                    boolean useMemoryMappedBuffer, boolean createIndex)
0203:                    throws java.net.MalformedURLException {
0204:                this (url, null, useMemoryMappedBuffer, createIndex, TREE_QIX);
0205:            }
0206:
0207:            /**
0208:             * Creates a new instance of ShapefileDataStore.
0209:             *
0210:             * @param url
0211:             *            The URL of the shp file to use for this DataSource.
0212:             * @param namespace
0213:             *            DOCUMENT ME!
0214:             * @param useMemoryMappedBuffer
0215:             *            enable/disable memory mapping of files
0216:             * @param createIndex
0217:             *            enable/disable automatic index creation if needed
0218:             * @param treeType
0219:             *            DOCUMENT ME!
0220:             *
0221:             * @throws NullPointerException
0222:             *             DOCUMENT ME!
0223:             * @throws .
0224:             */
0225:            public IndexedShapefileDataStore(URL url, URI namespace,
0226:                    boolean useMemoryMappedBuffer, boolean createIndex,
0227:                    byte treeType) throws MalformedURLException {
0228:                this (url, namespace, useMemoryMappedBuffer, createIndex,
0229:                        treeType, DEFAULT_STRING_CHARSET);
0230:            }
0231:
0232:            /**
0233:             * Creates a new instance of ShapefileDataStore.
0234:             *
0235:             * @param url
0236:             *            The URL of the shp file to use for this DataSource.
0237:             * @param namespace
0238:             *            DOCUMENT ME!
0239:             * @param useMemoryMappedBuffer
0240:             *            enable/disable memory mapping of files
0241:             * @param createIndex
0242:             *            enable/disable automatic index creation if needed
0243:             * @param treeType
0244:             *            DOCUMENT ME!
0245:             * @param dbfCharset {@link Charset} used to decode strings from the DBF
0246:             *
0247:             * @throws NullPointerException
0248:             *             DOCUMENT ME!
0249:             * @throws .
0250:             */
0251:            public IndexedShapefileDataStore(URL url, URI namespace,
0252:                    boolean useMemoryMappedBuffer, boolean createIndex,
0253:                    byte treeType, Charset dbfCharset)
0254:                    throws java.net.MalformedURLException {
0255:                super (url, namespace, true, dbfCharset);
0256:                // test that the shx file can be accessed
0257:
0258:                this .treeType = treeType;
0259:                this .useMemoryMappedBuffer = new File(shpURL.getFile())
0260:                        .exists()
0261:                        && useMemoryMappedBuffer;
0262:                this .useIndex = treeType != TREE_NONE && isLocal();
0263:
0264:                if (this .isLocal()) {
0265:                    fixURL = ShapefileDataStoreFactory.toFixURL(url);
0266:                    if (treeType == TREE_QIX) {
0267:                        treeURL = ShapefileDataStoreFactory.toQixURL(url);
0268:                        this .treeType = TREE_QIX;
0269:                        LOGGER.fine("Using qix tree");
0270:                    } else if (treeType == TREE_GRX) {
0271:                        treeURL = ShapefileDataStoreFactory.toGrxURL(url);
0272:                        LOGGER.fine("Using grx tree");
0273:                    } else {
0274:                        treeURL = ShapefileDataStoreFactory.toQixURL(url);
0275:                        this .treeType = TREE_NONE;
0276:                    }
0277:                    this .createIndex = new File(new File(treeURL.getFile())
0278:                            .getParent()).canWrite()
0279:                            && createIndex && useIndex;
0280:                } else {
0281:                    treeURL = ShapefileDataStoreFactory.toQixURL(url);
0282:                    this .treeType = TREE_NONE;
0283:                    this .createIndex = false;
0284:                    fixURL = null;
0285:                }
0286:
0287:            }
0288:
0289:            protected void finalize() throws Throwable {
0290:                if (rtree != null) {
0291:                    try {
0292:                        rtree.close();
0293:                    } catch (Exception e) {
0294:                        e.printStackTrace();
0295:                        LOGGER
0296:                                .severe("org.geotools.data.shapefile.indexed.IndexedShapeFileDataStore#finalize(): Error closing rtree. "
0297:                                        + e.getLocalizedMessage());
0298:                    }
0299:                }
0300:            }
0301:
0302:            /**
0303:             * Determine if the location of this shape is local or remote.
0304:             *
0305:             * @return true if local, false if remote
0306:             */
0307:            public boolean isLocal() {
0308:                return shpURL.getProtocol().equals("file");
0309:            }
0310:
0311:            protected Filter getUnsupportedFilter(String typeName, Filter filter) {
0312:
0313:                if (filter instanceof  FidFilter && fixURL != null)
0314:                    return Filter.INCLUDE;
0315:
0316:                return filter;
0317:            }
0318:
0319:            public FeatureWriter getFeatureWriterAppend(String typeName,
0320:                    Transaction transaction) throws IOException {
0321:                if (transaction == null) {
0322:                    throw new NullPointerException(
0323:                            "getFeatureWriter requires Transaction: "
0324:                                    + "did you mean to use Transaction.AUTO_COMMIT?");
0325:                }
0326:
0327:                FeatureWriter writer;
0328:
0329:                if (transaction == Transaction.AUTO_COMMIT) {
0330:                    return super .getFeatureWriterAppend(typeName, transaction);
0331:                } else {
0332:                    writer = state(transaction)
0333:                            .writer(typeName, Filter.EXCLUDE);
0334:                }
0335:
0336:                if (getLockingManager() != null) {
0337:                    // subclass has not provided locking so we will
0338:                    // fake it with InProcess locks
0339:                    writer = ((InProcessLockingManager) getLockingManager())
0340:                            .checkedWriter(writer, transaction);
0341:                }
0342:
0343:                while (writer.hasNext())
0344:                    writer.next();
0345:                return writer;
0346:            }
0347:
0348:            private TransactionStateDiff state(Transaction transaction) {
0349:                synchronized (transaction) {
0350:                    TransactionStateDiff state = (TransactionStateDiff) transaction
0351:                            .getState(this );
0352:
0353:                    if (state == null) {
0354:                        state = new TransactionStateDiff(this );
0355:                        transaction.putState(this , state);
0356:                    }
0357:
0358:                    return state;
0359:                }
0360:            }
0361:
0362:            /**
0363:             * Use the spatial index if available and adds a small optimization: if no
0364:             * attributes are going to be read, don't uselessly open and read the dbf
0365:             * file.
0366:             *
0367:             * @see org.geotools.data.AbstractDataStore#getFeatureReader(java.lang.String,
0368:             *      org.geotools.data.Query)
0369:             */
0370:            protected FeatureReader getFeatureReader(String typeName,
0371:                    Query query) throws IOException {
0372:                if (query.getFilter() == Filter.EXCLUDE)
0373:                    return new EmptyFeatureReader(getSchema());
0374:
0375:                String[] propertyNames = query.getPropertyNames() == null ? new String[0]
0376:                        : query.getPropertyNames();
0377:                String defaultGeomName = schema.getDefaultGeometry().getName();
0378:
0379:                FilterAttributeExtractor fae = new FilterAttributeExtractor();
0380:                query.getFilter().accept(fae, null);
0381:
0382:                Set attributes = new HashSet(Arrays.asList(propertyNames));
0383:                attributes.addAll(fae.getAttributeNameSet());
0384:
0385:                FeatureType newSchema = schema;
0386:                boolean readDbf = true;
0387:                boolean readGeometry = true;
0388:
0389:                propertyNames = (String[]) attributes
0390:                        .toArray(new String[attributes.size()]);
0391:
0392:                try {
0393:                    if (((query.getPropertyNames() != null)
0394:                            && (propertyNames.length == 1) && propertyNames[0]
0395:                            .equals(defaultGeomName))) {
0396:                        readDbf = false;
0397:                        newSchema = DataUtilities.createSubType(schema,
0398:                                propertyNames);
0399:                    } else if ((query.getPropertyNames() != null)
0400:                            && (propertyNames.length == 0)) {
0401:                        readDbf = false;
0402:                        readGeometry = false;
0403:                        newSchema = DataUtilities.createSubType(schema,
0404:                                propertyNames);
0405:                    }
0406:
0407:                    return createFeatureReader(typeName, getAttributesReader(
0408:                            readDbf, readGeometry, query.getFilter()),
0409:                            newSchema);
0410:                } catch (SchemaException se) {
0411:                    throw new DataSourceException("Error creating schema", se);
0412:                }
0413:            }
0414:
0415:            /**
0416:             * DOCUMENT ME!
0417:             *
0418:             * @param typeName
0419:             * @param r
0420:             * @param readerSchema
0421:             *
0422:             *
0423:             * @throws SchemaException
0424:             * @throws IOException
0425:             */
0426:            protected FeatureReader createFeatureReader(String typeName,
0427:                    Reader r, FeatureType readerSchema) throws SchemaException,
0428:                    IOException {
0429:
0430:                if (isLocal() && fixURL != null) {
0431:                    if (!(new File(fixURL.getFile()).exists()))
0432:                        fixURL = FidIndexer.generate(shpURL);
0433:
0434:                    if (fixURL == null)
0435:                        return new org.geotools.data.FIDFeatureReader(r,
0436:                                new ShapeFIDReader(getCurrentTypeName(), r),
0437:                                readerSchema);
0438:
0439:                    return new org.geotools.data.FIDFeatureReader(r,
0440:                            new IndexedFidReader(getCurrentTypeName(), r,
0441:                                    getReadChannel(fixURL)), readerSchema);
0442:                } else {
0443:                    return new org.geotools.data.FIDFeatureReader(r,
0444:                            new ShapeFIDReader(getCurrentTypeName(), r),
0445:                            readerSchema);
0446:                }
0447:            }
0448:
0449:            /**
0450:             * Returns the attribute reader, allowing for a pure shape reader, or a
0451:             * combined dbf/shp reader.
0452:             *
0453:             * @param readDbf -
0454:             *            if true, the dbf fill will be opened and read
0455:             * @param readGeometry
0456:             *            DOCUMENT ME!
0457:             * @param filter -
0458:             *            a Filter to use
0459:             *
0460:             *
0461:             * @throws IOException
0462:             */
0463:            protected Reader getAttributesReader(boolean readDbf,
0464:                    boolean readGeometry, Filter filter) throws IOException {
0465:                Envelope bbox = null;
0466:
0467:                Collection goodRecs = null;
0468:                if (filter instanceof  FidFilter && fixURL != null) {
0469:                    FidFilter fidFilter = (FidFilter) filter;
0470:                    goodRecs = queryFidIndex(fidFilter.getFids());
0471:                } else {
0472:                    if (filter != null) {
0473:                        FilterConsumer fc = new FilterConsumer();
0474:                        Filters.accept(filter, fc);
0475:                        bbox = fc.getBounds();
0476:                    }
0477:
0478:                    if ((bbox != null) && this .useIndex) {
0479:                        try {
0480:                            goodRecs = this .queryTree(bbox);
0481:                        } catch (TreeException e) {
0482:                            throw new IOException("Error querying index: "
0483:                                    + e.getMessage());
0484:                        }
0485:                    }
0486:                }
0487:
0488:                AttributeType[] atts = (schema == null) ? readAttributes()
0489:                        : schema.getAttributeTypes();
0490:
0491:                IndexedDbaseFileReader dbfR = null;
0492:
0493:                if (!readDbf) {
0494:                    LOGGER
0495:                            .fine("The DBF file won't be opened since no attributes "
0496:                                    + "will be read from it");
0497:                    atts = new AttributeType[] { schema.getDefaultGeometry() };
0498:
0499:                    if (!readGeometry) {
0500:                        atts = new AttributeType[0];
0501:                    }
0502:                } else {
0503:                    dbfR = (IndexedDbaseFileReader) openDbfReader();
0504:                }
0505:
0506:                return new Reader(atts, openShapeReader(), dbfR, goodRecs);
0507:            }
0508:
0509:            /**
0510:             * Uses the Fid index to quickly lookup the shp offset and the record number
0511:             * for the list of fids
0512:             *
0513:             * @param fids
0514:             *            the fids of the features to find.
0515:             * @return a list of Data objects
0516:             * @throws IOException
0517:             * @throws TreeException
0518:             */
0519:            private List queryFidIndex(String[] fids) throws IOException {
0520:                Arrays.sort(fids);
0521:                IndexedFidReader reader = null;
0522:                try {
0523:                    File indexFile = new File(fixURL.getFile());
0524:                    if (isLocal()) {
0525:                        synchronized (FIX_LOCK) {
0526:
0527:                            // remove index file if it is out of date.
0528:                            if (indexFile.exists() && !isIndexed(fixURL)) {
0529:                                if (!indexFile.delete()) {
0530:                                    indexFile.deleteOnExit();
0531:                                    fixURL = null;
0532:                                    return null;
0533:                                }
0534:                            }
0535:
0536:                            if (!(indexFile.exists()))
0537:                                FidIndexer.generate(shpURL);
0538:                        }
0539:                    } else {
0540:                        return null;
0541:                    }
0542:                    if (!(indexFile.exists())) {
0543:                        fixURL = null;
0544:                        return null;
0545:                    }
0546:
0547:                    reader = new IndexedFidReader(getCurrentTypeName(),
0548:                            getReadChannel(fixURL));
0549:                    if (reader.getRemoves() >= reader.getCount() / 2) {
0550:                        indexFile.deleteOnExit();
0551:                    }
0552:
0553:                } catch (Exception e) {
0554:                    fixURL = null;
0555:                    return null;
0556:                }
0557:
0558:                List records = new ArrayList(fids.length);
0559:                try {
0560:                    IndexFile shx = openIndexFile(shxURL);
0561:                    try {
0562:
0563:                        DataDefinition def = new DataDefinition("US-ASCII");
0564:                        def.addField(Integer.class);
0565:                        def.addField(Long.class);
0566:                        for (int i = 0; i < fids.length; i++) {
0567:                            long recno = reader.findFid(fids[i]);
0568:                            if (recno == -1)
0569:                                continue;
0570:                            try {
0571:                                Data data = new Data(def);
0572:                                data.addValue(new Integer((int) recno + 1));
0573:                                data.addValue(new Long(shx
0574:                                        .getOffsetInBytes((int) recno)));
0575:                                records.add(data);
0576:                            } catch (Exception e) {
0577:                                IOException exception = new IOException();
0578:                                exception.initCause(e);
0579:                                throw exception;
0580:                            }
0581:                        }
0582:                    } finally {
0583:                        shx.close();
0584:                    }
0585:                } finally {
0586:                    reader.close();
0587:                }
0588:
0589:                return records;
0590:            }
0591:
0592:            private boolean isIndexed(URL indexURL) {
0593:                if (!isLocal())
0594:                    return false;
0595:                File indexFile = new File(indexURL.getFile());
0596:                File shpFile = new File(shpURL.getPath());
0597:                return indexFile.exists()
0598:                        && indexFile.lastModified() >= shpFile.lastModified();
0599:            }
0600:
0601:            /**
0602:             * Returns true if the indices already exist and do not need to be regenerated.
0603:             *
0604:             * @return true if the indices already exist and do not need to be regenerated.
0605:             */
0606:            public boolean isIndexed() {
0607:                return isIndexed(fixURL) && isIndexed(treeURL);
0608:            }
0609:
0610:            /**
0611:             * Queries the spatial index
0612:             *
0613:             * @param bbox
0614:             *
0615:             * @return a List of <code>Data</code> objects
0616:             */
0617:            private Collection queryTree(Envelope bbox)
0618:                    throws DataSourceException, IOException, TreeException {
0619:                if (this .treeType == TREE_GRX) {
0620:                    return this .queryRTree(bbox);
0621:                } else if (this .treeType == TREE_QIX) {
0622:                    return this .queryQuadTree(bbox);
0623:                } else {
0624:                    // Should not happen
0625:                    return null;
0626:                }
0627:            }
0628:
0629:            /**
0630:             * RTree query
0631:             *
0632:             * @param bbox
0633:             *
0634:             *
0635:             * @throws DataSourceException
0636:             * @throws IOException
0637:             */
0638:            private List queryRTree(Envelope bbox) throws DataSourceException,
0639:                    IOException {
0640:                List goodRecs = null;
0641:                RTree rtree = this .openRTree();
0642:
0643:                try {
0644:                    if ((rtree != null) && (rtree.getBounds() != null)
0645:                            && !bbox.contains(rtree.getBounds())) {
0646:                        goodRecs = rtree.search(bbox);
0647:                    }
0648:                } catch (LockTimeoutException le) {
0649:                    throw new DataSourceException("Error querying RTree", le);
0650:                } catch (TreeException re) {
0651:                    throw new DataSourceException("Error querying RTree", re);
0652:                }
0653:
0654:                return goodRecs;
0655:            }
0656:
0657:            /**
0658:             * QuadTree Query
0659:             *
0660:             * @param bbox
0661:             *
0662:             *
0663:             * @throws DataSourceException
0664:             * @throws IOException
0665:             * @throws TreeException
0666:             *             DOCUMENT ME!
0667:             */
0668:            private Collection queryQuadTree(Envelope bbox)
0669:                    throws DataSourceException, IOException, TreeException {
0670:                Collection tmp = null;
0671:
0672:                try {
0673:                    QuadTree quadTree = openQuadTree();
0674:                    if ((quadTree != null)
0675:                            && !bbox.contains(quadTree.getRoot().getBounds())) {
0676:                        tmp = quadTree.search(bbox);
0677:
0678:                        if (tmp == null || !tmp.isEmpty())
0679:                            return tmp;
0680:                    }
0681:                    if (quadTree != null)
0682:                        quadTree.close();
0683:                } catch (Exception e) {
0684:                    throw new DataSourceException("Error querying QuadTree", e);
0685:                }
0686:
0687:                return null;
0688:            }
0689:
0690:            /**
0691:             * Convenience method for opening a DbaseFileReader.
0692:             *
0693:             * @return A new DbaseFileReader
0694:             *
0695:             * @throws IOException
0696:             *             If an error occurs during creation.
0697:             */
0698:            protected DbaseFileReader openDbfReader() throws IOException {
0699:                ReadableByteChannel rbc = getReadChannel(dbfURL);
0700:
0701:                if (rbc == null) {
0702:                    return null;
0703:                }
0704:
0705:                return new IndexedDbaseFileReader(rbc, false, dbfCharset);
0706:            }
0707:
0708:            /**
0709:             * Convenience method for opening an RTree index.
0710:             *
0711:             * @return A new RTree.
0712:             *
0713:             * @throws IOException
0714:             *             If an error occurs during creation.
0715:             * @throws DataSourceException
0716:             *             DOCUMENT ME!
0717:             */
0718:            protected RTree openRTree() throws IOException {
0719:                if (rtree == null) {
0720:                    if (!this .isLocal()) {
0721:                        return null;
0722:                    }
0723:
0724:                    File treeFile = new File(treeURL.getPath());
0725:                    synchronized (GRX_LOCK) {
0726:
0727:                        // remove index file if it is out of date.
0728:                        if (treeFile.exists() && !isIndexed(treeURL)) {
0729:                            if (!treeFile.delete()) {
0730:                                treeFile.deleteOnExit();
0731:                                createIndex = false;
0732:                                treeType = TREE_NONE;
0733:                                return null;
0734:                            }
0735:                        }
0736:
0737:                        if (!treeFile.exists() || (treeFile.length() == 0)) {
0738:                            if (this .createIndex) {
0739:                                try {
0740:                                    this .buildRTree();
0741:                                } catch (TreeException e) {
0742:                                    createIndex = false;
0743:                                    return null;
0744:                                }
0745:                            } else {
0746:                                return null;
0747:                            }
0748:                        }
0749:
0750:                        if (!treeFile.exists() || (treeFile.length() == 0)) {
0751:                            createIndex = false;
0752:                            treeType = TREE_NONE;
0753:                            return null;
0754:                        }
0755:                    }
0756:                    try {
0757:                        FileSystemPageStore fps = new FileSystemPageStore(
0758:                                treeFile);
0759:                        rtree = new RTree(fps);
0760:                    } catch (TreeException re) {
0761:                        throw new DataSourceException("Error opening RTree", re);
0762:                    }
0763:                }
0764:
0765:                return rtree;
0766:            }
0767:
0768:            /**
0769:             * Convenience method for opening a QuadTree index.
0770:             *
0771:             * @return A new QuadTree
0772:             *
0773:             * @throws StoreException
0774:             */
0775:            protected QuadTree openQuadTree() throws StoreException {
0776:                QuadTree quadTree = null;
0777:                if (quadTree == null) {
0778:                    File treeFile = new File(treeURL.getPath());
0779:                    synchronized (QIX_LOCK) {
0780:
0781:                        // remove index file if it is out of date.
0782:                        if (treeFile.exists() && !isIndexed(treeURL)) {
0783:                            if (!treeFile.delete()) {
0784:                                createIndex = false;
0785:                                treeType = TREE_NONE;
0786:                                return null;
0787:                            }
0788:                        }
0789:
0790:                        if (!treeFile.exists() || (treeFile.length() == 0)) {
0791:                            if (this .createIndex) {
0792:                                try {
0793:                                    this .buildQuadTree(maxDepth);
0794:                                } catch (Throwable e) {
0795:                                    createIndex = false;
0796:                                    treeType = TREE_NONE;
0797:                                    return null;
0798:                                }
0799:                            } else {
0800:                                return null;
0801:                            }
0802:                        }
0803:
0804:                        if (!treeFile.exists() || (treeFile.length() == 0)) {
0805:                            createIndex = false;
0806:                            treeType = TREE_NONE;
0807:                            return null;
0808:                        }
0809:                    }
0810:
0811:                    FileSystemIndexStore store = new FileSystemIndexStore(
0812:                            treeFile);
0813:                    try {
0814:                        quadTree = store.load(openIndexFile(shxURL));
0815:                    } catch (IOException e) {
0816:                        throw new StoreException(e);
0817:                    }
0818:                }
0819:
0820:                return quadTree;
0821:            }
0822:
0823:            /**
0824:             * Get an array of type names this DataStore holds.<BR/>ShapefileDataStore
0825:             * will always return a single name.
0826:             *
0827:             * @return An array of length one containing the single type held.
0828:             */
0829:            public String[] getTypeNames() {
0830:                return new String[] { getCurrentTypeName(), };
0831:            }
0832:
0833:            /**
0834:             * Create the type name of the single FeatureType this DataStore represents.<BR/>
0835:             * For example, if the urls path is file:///home/billy/mytheme.shp, the type
0836:             * name will be mytheme.
0837:             *
0838:             * @return A name based upon the last path component of the url minus the
0839:             *         extension.
0840:             */
0841:            protected String createFeatureTypeName() {
0842:                String path = shpURL.getPath();
0843:                int slash = Math.max(0, path.lastIndexOf('/') + 1);
0844:                int dot = path.indexOf('.', slash);
0845:
0846:                if (dot < 0) {
0847:                    dot = path.length();
0848:                }
0849:
0850:                return path.substring(slash, dot);
0851:            }
0852:
0853:            protected String getCurrentTypeName() {
0854:                return (schema == null) ? createFeatureTypeName() : schema
0855:                        .getTypeName();
0856:            }
0857:
0858:            /**
0859:             * A convenience method to check if a type name is correct.
0860:             *
0861:             * @param requested
0862:             *            The type name requested.
0863:             *
0864:             * @throws IOException
0865:             *             If the type name is not available
0866:             */
0867:            protected void typeCheck(String requested) throws IOException {
0868:                if (!getCurrentTypeName().equals(requested)) {
0869:                    throw new IOException("No such type : " + requested);
0870:                }
0871:            }
0872:
0873:            /**
0874:             * Create a FeatureWriter for the given type name.
0875:             *
0876:             * @param typeName
0877:             *            The typeName of the FeatureType to write
0878:             * @param transaction
0879:             *            DOCUMENT ME!
0880:             *
0881:             * @return A new FeatureWriter.
0882:             *
0883:             * @throws IOException
0884:             *             If the typeName is not available or some other error occurs.
0885:             */
0886:            protected FeatureWriter createFeatureWriter(String typeName,
0887:                    Transaction transaction) throws IOException {
0888:                typeCheck(typeName);
0889:
0890:                return new Writer(typeName);
0891:            }
0892:
0893:            /**
0894:             * Obtain the FeatureType of the given name. ShapefileDataStore contains
0895:             * only one FeatureType.
0896:             *
0897:             * @param typeName
0898:             *            The name of the FeatureType.
0899:             *
0900:             * @return The FeatureType that this DataStore contains.
0901:             *
0902:             * @throws IOException
0903:             *             If a type by the requested name is not present.
0904:             */
0905:            public FeatureType getSchema(String typeName) throws IOException {
0906:                typeCheck(typeName);
0907:
0908:                return getSchema();
0909:            }
0910:
0911:            public FeatureType getSchema() throws IOException {
0912:                if (schema == null) {
0913:                    try {
0914:                        AttributeType[] types = readAttributes();
0915:                        FeatureType parent = null;
0916:                        Class geomType = types[0].getType();
0917:
0918:                        if ((geomType == Point.class)
0919:                                || (geomType == MultiPoint.class)) {
0920:                            parent = BasicFeatureTypes.POINT;
0921:                        } else if ((geomType == Polygon.class)
0922:                                || (geomType == MultiPolygon.class)) {
0923:                            parent = BasicFeatureTypes.POLYGON;
0924:                        } else if ((geomType == LineString.class)
0925:                                || (geomType == MultiLineString.class)) {
0926:                            parent = BasicFeatureTypes.LINE;
0927:                        }
0928:
0929:                        if (parent != null) {
0930:                            schema = FeatureTypes.newFeatureType(
0931:                                    readAttributes(), createFeatureTypeName(),
0932:                                    namespace, false,
0933:                                    new FeatureType[] { parent });
0934:                        } else {
0935:                            if (namespace != null) {
0936:                                schema = FeatureTypes.newFeatureType(
0937:                                        readAttributes(),
0938:                                        createFeatureTypeName(), namespace,
0939:                                        false);
0940:                            } else {
0941:                                schema = FeatureTypes.newFeatureType(
0942:                                        readAttributes(),
0943:                                        createFeatureTypeName(),
0944:                                        FeatureTypes.DEFAULT_NAMESPACE, false);
0945:                            }
0946:                        }
0947:                    } catch (SchemaException se) {
0948:                        throw new DataSourceException(
0949:                                "Error creating FeatureType", se);
0950:                    }
0951:                }
0952:
0953:                return schema;
0954:            }
0955:
0956:            /**
0957:             * @see org.geotools.data.AbstractDataStore#getBounds(org.geotools.data.Query)
0958:             */
0959:            protected Envelope getBounds(Query query) throws IOException {
0960:                Envelope ret = null;
0961:
0962:                Set records = new HashSet();
0963:                if (query.getFilter() == Filter.INCLUDE || query == Query.ALL) {
0964:                    return getBounds();
0965:                } else if (this .useIndex) {
0966:                    if (treeType == TREE_GRX) {
0967:                        return getBoundsRTree(query);
0968:                    }
0969:                }
0970:
0971:                FidFilterParserVisitor visitor = new FidFilterParserVisitor();
0972:                Filters.accept(query.getFilter(), visitor);
0973:                if (!visitor.fids.isEmpty()) {
0974:                    List recordsFound = queryFidIndex((String[]) visitor.fids
0975:                            .toArray(new String[0]));
0976:                    if (recordsFound != null)
0977:                        records.addAll(recordsFound);
0978:                }
0979:
0980:                if (records.isEmpty())
0981:                    return null;
0982:
0983:                ShapefileReader reader = new ShapefileReader(
0984:                        getReadChannel(shpURL), this .readWriteLock);
0985:                try {
0986:                    ret = new Envelope();
0987:                    for (Iterator iter = records.iterator(); iter.hasNext();) {
0988:                        Data data = (Data) iter.next();
0989:                        reader.goTo(((Long) data.getValue(1)).intValue());
0990:                        Record record = reader.nextRecord();
0991:                        ret.expandToInclude(new Envelope(record.minX,
0992:                                record.maxX, record.minY, record.maxY));
0993:                    }
0994:                    return ret;
0995:                } finally {
0996:                    reader.close();
0997:                }
0998:            }
0999:
1000:            private Envelope getBoundsRTree(Query query) throws IOException {
1001:                Envelope ret = null;
1002:
1003:                RTree rtree = this .openRTree();
1004:
1005:                if (rtree != null) {
1006:                    try {
1007:                        ret = rtree.getBounds(query.getFilter());
1008:                    } catch (TreeException e) {
1009:                        LOGGER.log(Level.SEVERE, e.getMessage(), e);
1010:                    } catch (UnsupportedFilterException e) {
1011:                        // Ignoring...
1012:                    } finally {
1013:                        try {
1014:                            rtree.close();
1015:                        } catch (Exception ee) {
1016:                        }
1017:                    }
1018:                }
1019:                return ret;
1020:            }
1021:
1022:            /**
1023:             * @see org.geotools.data.DataStore#getFeatureSource(java.lang.String)
1024:             */
1025:            public FeatureSource getFeatureSource(final String typeName)
1026:                    throws IOException {
1027:                final FeatureType featureType = getSchema(typeName);
1028:
1029:                if (isWriteable) {
1030:                    if (getLockingManager() != null) {
1031:                        return new AbstractFeatureLocking() {
1032:                            public DataStore getDataStore() {
1033:                                return IndexedShapefileDataStore.this ;
1034:                            }
1035:
1036:                            public void addFeatureListener(
1037:                                    FeatureListener listener) {
1038:                                listenerManager.addFeatureListener(this ,
1039:                                        listener);
1040:                            }
1041:
1042:                            public void removeFeatureListener(
1043:                                    FeatureListener listener) {
1044:                                listenerManager.removeFeatureListener(this ,
1045:                                        listener);
1046:                            }
1047:
1048:                            public FeatureType getSchema() {
1049:                                return featureType;
1050:                            }
1051:
1052:                            public Envelope getBounds(Query query)
1053:                                    throws IOException {
1054:                                return IndexedShapefileDataStore.this 
1055:                                        .getBounds(query);
1056:                            }
1057:                        };
1058:                    } else {
1059:                        return new AbstractFeatureStore() {
1060:                            public DataStore getDataStore() {
1061:                                return IndexedShapefileDataStore.this ;
1062:                            }
1063:
1064:                            public void addFeatureListener(
1065:                                    FeatureListener listener) {
1066:                                listenerManager.addFeatureListener(this ,
1067:                                        listener);
1068:                            }
1069:
1070:                            public void removeFeatureListener(
1071:                                    FeatureListener listener) {
1072:                                listenerManager.removeFeatureListener(this ,
1073:                                        listener);
1074:                            }
1075:
1076:                            public FeatureType getSchema() {
1077:                                return featureType;
1078:                            }
1079:
1080:                            public Envelope getBounds(Query query)
1081:                                    throws IOException {
1082:                                return IndexedShapefileDataStore.this 
1083:                                        .getBounds(query);
1084:                            }
1085:                        };
1086:                    }
1087:                } else {
1088:                    return new AbstractFeatureSource() {
1089:                        public DataStore getDataStore() {
1090:                            return IndexedShapefileDataStore.this ;
1091:                        }
1092:
1093:                        public void addFeatureListener(FeatureListener listener) {
1094:                            listenerManager.addFeatureListener(this , listener);
1095:                        }
1096:
1097:                        public void removeFeatureListener(
1098:                                FeatureListener listener) {
1099:                            listenerManager.removeFeatureListener(this ,
1100:                                    listener);
1101:                        }
1102:
1103:                        public FeatureType getSchema() {
1104:                            return featureType;
1105:                        }
1106:
1107:                        public Envelope getBounds(Query query)
1108:                                throws IOException {
1109:                            return IndexedShapefileDataStore.this 
1110:                                    .getBounds(query);
1111:                        }
1112:                    };
1113:                }
1114:            }
1115:
1116:            /**
1117:             * Builds the RTree index
1118:             *
1119:             * @throws TreeException
1120:             *             DOCUMENT ME!
1121:             */
1122:            private void buildRTree() throws TreeException {
1123:                if (isLocal()) {
1124:                    LOGGER.fine("Creating spatial index for "
1125:                            + shpURL.getPath());
1126:
1127:                    synchronized (this ) {
1128:                        if (rtree != null) {
1129:                            rtree.close();
1130:                        }
1131:
1132:                        rtree = null;
1133:                    }
1134:
1135:                    ShapeFileIndexer indexer = new ShapeFileIndexer();
1136:                    indexer.setIdxType(ShapeFileIndexer.RTREE);
1137:                    indexer.setShapeFileName(shpURL.getPath());
1138:
1139:                    try {
1140:                        indexer.index(false, readWriteLock);
1141:                    } catch (MalformedURLException e) {
1142:                        throw new TreeException(e);
1143:                    } catch (LockTimeoutException e) {
1144:                        throw new TreeException(e);
1145:                    } catch (Exception e) {
1146:                        File f = new File(treeURL.getPath());
1147:
1148:                        if (f.exists()) {
1149:                            f.delete();
1150:                        }
1151:
1152:                        if (e instanceof  TreeException) {
1153:                            throw (TreeException) e;
1154:                        } else {
1155:                            throw new TreeException(e);
1156:                        }
1157:                    }
1158:                }
1159:            }
1160:
1161:            /**
1162:             * Builds the QuadTree index.  Usually not necessary since reading features will index when required
1163:             *
1164:             * @param maxDepth depth of the tree.  if < 0 then a best guess is made.
1165:             * @throws TreeException
1166:             */
1167:            public void buildQuadTree(int maxDepth) throws TreeException {
1168:                if (isLocal()) {
1169:                    LOGGER.fine("Creating spatial index for "
1170:                            + shpURL.getPath());
1171:
1172:                    ShapeFileIndexer indexer = new ShapeFileIndexer();
1173:                    indexer.setIdxType(ShapeFileIndexer.QUADTREE);
1174:                    indexer.setShapeFileName(shpURL.getPath());
1175:                    indexer.setMax(maxDepth);
1176:
1177:                    try {
1178:                        indexer.index(false, readWriteLock);
1179:                    } catch (MalformedURLException e) {
1180:                        throw new TreeException(e);
1181:                    } catch (LockTimeoutException e) {
1182:                        throw new TreeException(e);
1183:                    } catch (Exception e) {
1184:                        File f = new File(treeURL.getPath());
1185:
1186:                        if (f.exists()) {
1187:                            f.delete();
1188:                        }
1189:
1190:                        if (e instanceof  TreeException) {
1191:                            throw (TreeException) e;
1192:                        } else {
1193:                            throw new TreeException(e);
1194:                        }
1195:                    }
1196:                }
1197:            }
1198:
1199:            public boolean isMemoryMapped() {
1200:                return useMemoryMappedBuffer;
1201:            }
1202:
1203:            /**
1204:             * An AttributeReader implementation for shape. Pretty straightforward.
1205:             * <BR/>The default geometry is at position 0, and all dbf columns follow.
1206:             * <BR/>The dbf file may not be necessary, if not, just pass null as the
1207:             * DbaseFileReader
1208:             */
1209:            protected static class Reader extends AbstractAttributeIO implements 
1210:                    AttributeReader, RecordNumberTracker {
1211:
1212:                protected ShapefileReader shp;
1213:
1214:                protected IndexedDbaseFileReader dbf;
1215:
1216:                protected IndexedDbaseFileReader.Row row;
1217:
1218:                protected ShapefileReader.Record record;
1219:
1220:                protected Iterator goodRecs;
1221:
1222:                private int recno;
1223:
1224:                private Data next;
1225:
1226:                /**
1227:                 * Create the shape reader
1228:                 *
1229:                 * @param atts -
1230:                 *            the attributes that we are going to read.
1231:                 * @param shp -
1232:                 *            the shape reader, required
1233:                 * @param dbf -
1234:                 *            the dbf file reader. May be null, in this case no
1235:                 *            attributes will be read from the dbf file
1236:                 * @param goodRecs
1237:                 *            DOCUMENT ME!
1238:                 */
1239:                public Reader(AttributeType[] atts, ShapefileReader shp,
1240:                        IndexedDbaseFileReader dbf, Collection goodRecs) {
1241:                    super (atts);
1242:                    this .shp = shp;
1243:                    this .dbf = dbf;
1244:                    if (goodRecs != null)
1245:                        this .goodRecs = goodRecs.iterator();
1246:
1247:                    this .recno = 0;
1248:                }
1249:
1250:                public void close() throws IOException {
1251:                    try {
1252:                        if (shp != null)
1253:                            shp.close();
1254:
1255:                        if (dbf != null) {
1256:                            dbf.close();
1257:                        }
1258:                    } finally {
1259:                        row = null;
1260:                        record = null;
1261:                        shp = null;
1262:                        dbf = null;
1263:                        goodRecs = null;
1264:                    }
1265:                }
1266:
1267:                public boolean hasNext() throws IOException {
1268:                    if (this .goodRecs != null) {
1269:                        if (next != null)
1270:                            return true;
1271:                        if (this .goodRecs.hasNext()) {
1272:
1273:                            next = (Data) goodRecs.next();
1274:                            this .recno = ((Integer) next.getValue(0))
1275:                                    .intValue();
1276:                            return true;
1277:                        }
1278:                        return false;
1279:                    }
1280:
1281:                    int n = shp.hasNext() ? 1 : 0;
1282:
1283:                    if (dbf != null) {
1284:                        n += (dbf.hasNext() ? 2 : 0);
1285:                    }
1286:
1287:                    if ((n == 3) || ((n == 1) && (dbf == null))) {
1288:                        return true;
1289:                    }
1290:
1291:                    if (n == 0) {
1292:                        return false;
1293:                    }
1294:
1295:                    throw new IOException(((n == 1) ? "Shp" : "Dbf")
1296:                            + " has extra record");
1297:                }
1298:
1299:                public void next() throws IOException {
1300:                    if (!hasNext())
1301:                        throw new IndexOutOfBoundsException(
1302:                                "No more features in reader");
1303:                    if (this .goodRecs != null) {
1304:                        this .recno = ((Integer) next.getValue(0)).intValue();
1305:
1306:                        if (dbf != null) {
1307:                            dbf.goTo(this .recno);
1308:                        }
1309:
1310:                        Long l = (Long) next.getValue(1);
1311:                        shp.goTo((int) l.longValue());
1312:                        next = null;
1313:                    } else {
1314:                        this .recno++;
1315:                    }
1316:
1317:                    record = shp.nextRecord();
1318:
1319:                    if (dbf != null) {
1320:                        row = dbf.readRow();
1321:                    }
1322:                }
1323:
1324:                public int getRecordNumber() {
1325:                    return this .recno;
1326:                }
1327:
1328:                public Object read(int param) throws IOException,
1329:                        java.lang.ArrayIndexOutOfBoundsException {
1330:                    switch (param) {
1331:                    case 0:
1332:                        return record.shape();
1333:
1334:                    default:
1335:
1336:                        if (row != null) {
1337:                            return row.read(param - 1);
1338:                        } else {
1339:                            return null;
1340:                        }
1341:                    }
1342:                }
1343:            }
1344:
1345:            /**
1346:             * A FeatureWriter for ShapefileDataStore. Uses a write and annotate
1347:             * technique to avoid buffering attributes and geometries. Because the shape
1348:             * and dbf require header information which can only be obtained by reading
1349:             * the entire series of Features, the headers are updated after the initial
1350:             * write completes.
1351:             */
1352:            protected class Writer implements  FeatureWriter {
1353:                // store current time here as flag for temporary write
1354:                private long temp;
1355:
1356:                // the FeatureReader to obtain the current Feature from
1357:                protected FeatureReader featureReader;
1358:
1359:                // the AttributeReader
1360:                protected Reader attReader;
1361:
1362:                // the current Feature
1363:                private Feature currentFeature;
1364:
1365:                // the FeatureType we are representing
1366:                private FeatureType featureType;
1367:
1368:                // an array for reuse in Feature creation
1369:                private Object[] emptyAtts;
1370:
1371:                // an array for reuse in writing to dbf.
1372:                private Object[] transferCache;
1373:
1374:                private ShapeType shapeType;
1375:
1376:                private ShapeHandler handler;
1377:
1378:                // keep track of shape length during write, starts at 100 bytes for
1379:                // required header
1380:                private int shapefileLength = 100;
1381:
1382:                // keep track of the number of records written
1383:                private int records = 0;
1384:
1385:                // hold 1 if dbf should write the attribute at the index, 0 if not
1386:                private byte[] writeFlags;
1387:
1388:                private ShapefileWriter shpWriter;
1389:
1390:                private DbaseFileWriter dbfWriter;
1391:
1392:                private DbaseFileHeader dbfHeader;
1393:
1394:                private FileChannel dbfChannel;
1395:
1396:                // keep track of bounds during write
1397:                private Envelope bounds = new Envelope();
1398:
1399:                private IndexedFidWriter indexedFidWriter;
1400:
1401:                public Writer(String typeName) throws IOException {
1402:                    // set up reader
1403:                    try {
1404:                        temp = System.currentTimeMillis();
1405:                        attReader = getAttributesReader(true, true, null);
1406:                        featureReader = createFeatureReader(typeName,
1407:                                attReader, schema);
1408:                    } catch (Exception e) {
1409:                        FeatureType schema = getSchema(typeName);
1410:
1411:                        if (schema == null) {
1412:                            throw new IOException(
1413:                                    "To create a shape, you must first call createSchema()");
1414:                        }
1415:
1416:                        featureReader = new EmptyFeatureReader(schema);
1417:                        temp = 0;
1418:                    }
1419:
1420:                    this .featureType = featureReader.getFeatureType();
1421:
1422:                    // set up buffers and write flags
1423:                    emptyAtts = new Object[featureType.getAttributeCount()];
1424:                    writeFlags = new byte[featureType.getAttributeCount()];
1425:
1426:                    int cnt = 0;
1427:
1428:                    for (int i = 0, ii = featureType.getAttributeCount(); i < ii; i++) {
1429:                        // if its a geometry, we don't want to write it to the dbf...
1430:                        if (!(featureType.getAttributeType(i) instanceof  GeometryAttributeType)) {
1431:                            cnt++;
1432:                            writeFlags[i] = (byte) 1;
1433:                        }
1434:                    }
1435:
1436:                    // dbf transfer buffer
1437:                    transferCache = new Object[cnt];
1438:
1439:                    // open underlying writers
1440:                    shpWriter = new ShapefileWriter(
1441:                            (FileChannel) getWriteChannel(getStorageURL(shpURL,
1442:                                    temp)),
1443:                            (FileChannel) getWriteChannel(getStorageURL(shxURL,
1444:                                    temp)), readWriteLock);
1445:
1446:                    dbfChannel = (FileChannel) getWriteChannel(getStorageURL(
1447:                            dbfURL, temp));
1448:                    dbfHeader = createDbaseHeader();
1449:                    dbfWriter = new DbaseFileWriter(dbfHeader, dbfChannel);
1450:
1451:                    FileChannel fidIndexChannel = (FileChannel) getWriteChannel(getStorageURL(
1452:                            fixURL, temp));
1453:
1454:                    indexedFidWriter = new IndexedFidWriter(fidIndexChannel,
1455:                            new IndexedFidReader(getCurrentTypeName(),
1456:                                    temp != 0 ? getReadChannel(fixURL)
1457:                                            : fidIndexChannel));
1458:
1459:                    if (attReader != null && attReader.hasNext()) {
1460:                        shapeType = attReader.shp.getHeader().getShapeType();
1461:                        handler = shapeType.getShapeHandler();
1462:                        shpWriter.writeHeaders(bounds, shapeType, records,
1463:                                shapefileLength);
1464:                    }
1465:                }
1466:
1467:                /**
1468:                 * Go back and update the headers with the required info.
1469:                 *
1470:                 * @throws IOException
1471:                 *             DOCUMENT ME!
1472:                 */
1473:                protected void flush() throws IOException {
1474:                    if ((records <= 0) && (shapeType == null)) {
1475:                        GeometryAttributeType geometryAttributeType = featureType
1476:                                .getDefaultGeometry();
1477:
1478:                        Class gat = geometryAttributeType.getType();
1479:                        shapeType = JTSUtilities.getShapeType(gat);
1480:                    }
1481:
1482:                    shpWriter.writeHeaders(bounds, shapeType, records,
1483:                            shapefileLength);
1484:
1485:                    dbfHeader.setNumRecords(records);
1486:                    dbfChannel.position(0);
1487:                    dbfHeader.writeHeader(dbfChannel);
1488:                }
1489:
1490:                /**
1491:                 * Attempt to create a DbaseFileHeader for the FeatureType. Note, we
1492:                 * cannot set the number of records until the write has completed.
1493:                 *
1494:                 * @return DOCUMENT ME!
1495:                 *
1496:                 * @throws IOException
1497:                 *             DOCUMENT ME!
1498:                 * @throws DbaseFileException
1499:                 *             DOCUMENT ME!
1500:                 */
1501:                protected DbaseFileHeader createDbaseHeader()
1502:                        throws IOException, DbaseFileException {
1503:                    DbaseFileHeader header = new DbaseFileHeader();
1504:
1505:                    for (int i = 0, ii = featureType.getAttributeCount(); i < ii; i++) {
1506:                        AttributeType type = featureType.getAttributeType(i);
1507:
1508:                        Class colType = type.getType();
1509:                        String colName = type.getName();
1510:                        int fieldLen = FeatureTypes.getFieldLength(type);
1511:
1512:                        if (fieldLen <= 0) {
1513:                            fieldLen = 255;
1514:                        }
1515:
1516:                        // @todo respect field length
1517:                        if ((colType == Integer.class)
1518:                                || (colType == Short.class)
1519:                                || (colType == Byte.class)) {
1520:                            header.addColumn(colName, 'N', Math
1521:                                    .min(fieldLen, 9), 0);
1522:                        } else if (colType == Long.class) {
1523:                            header.addColumn(colName, 'N', Math.min(fieldLen,
1524:                                    19), 0);
1525:                        } else if ((colType == Double.class)
1526:                                || (colType == Float.class)
1527:                                || (colType == Number.class)) {
1528:                            int l = Math.min(fieldLen, 33);
1529:                            int d = Math.max(l - 2, 0);
1530:                            header.addColumn(colName, 'N', l, d);
1531:                        } else if (java.util.Date.class
1532:                                .isAssignableFrom(colType)) {
1533:                            header.addColumn(colName, 'D', fieldLen, 0);
1534:                        } else if (colType == Boolean.class) {
1535:                            header.addColumn(colName, 'L', 1, 0);
1536:                        } else if (CharSequence.class.isAssignableFrom(colType)) {
1537:                            // Possible fix for GEOT-42 : ArcExplorer doesn't like 0
1538:                            // length
1539:                            // ensure that maxLength is at least 1
1540:                            header.addColumn(colName, 'C', Math.min(254,
1541:                                    fieldLen), 0);
1542:                        } else if (Geometry.class.isAssignableFrom(colType)) {
1543:                            continue;
1544:                        } else {
1545:                            throw new IOException("Unable to write : "
1546:                                    + colType.getName());
1547:                        }
1548:                    }
1549:
1550:                    return header;
1551:                }
1552:
1553:                /**
1554:                 * In case someone doesn't close me.
1555:                 *
1556:                 * @throws Throwable
1557:                 *             DOCUMENT ME!
1558:                 */
1559:                protected void finalize() throws Throwable {
1560:                    if (featureReader != null) {
1561:                        try {
1562:                            close();
1563:                        } catch (Exception e) {
1564:                            // oh well, we tried
1565:                        }
1566:                    }
1567:                }
1568:
1569:                /**
1570:                 * Clean up our temporary write if there was one
1571:                 *
1572:                 * @throws IOException
1573:                 *             DOCUMENT ME!
1574:                 */
1575:                protected void clean() throws IOException {
1576:                    if (temp == 0) {
1577:                        return;
1578:                    }
1579:
1580:                    copyAndDelete(shpURL, temp);
1581:                    copyAndDelete(shxURL, temp);
1582:                    copyAndDelete(dbfURL, temp);
1583:                    if (fixURL != null)
1584:                        copyAndDelete(fixURL, temp);
1585:                }
1586:
1587:                /**
1588:                 * Release resources and flush the header information.
1589:                 *
1590:                 * @throws IOException
1591:                 *             DOCUMENT ME!
1592:                 */
1593:                public void close() throws IOException {
1594:                    if (featureReader == null) {
1595:                        throw new IOException("Writer closed");
1596:                    }
1597:
1598:                    // make sure to write the last feature...
1599:                    if (currentFeature != null) {
1600:                        write();
1601:                    }
1602:
1603:                    // if the attribute reader is here, that means we may have some
1604:                    // additional tail-end file flushing to do if the Writer was closed
1605:                    // before the end of the file
1606:                    if (attReader != null && attReader.hasNext()) {
1607:                        shapeType = attReader.shp.getHeader().getShapeType();
1608:                        handler = shapeType.getShapeHandler();
1609:
1610:                        // handle the case where zero records have been written, but the
1611:                        // stream is closed and the headers
1612:                        if (records == 0) {
1613:                            shpWriter.writeHeaders(bounds, shapeType, 0, 0);
1614:                        }
1615:
1616:                        // copy array for bounds
1617:                        double[] env = new double[4];
1618:
1619:                        while (attReader.hasNext()) {
1620:                            // transfer bytes from shape
1621:                            shapefileLength += attReader.shp.transferTo(
1622:                                    shpWriter, ++records, env);
1623:
1624:                            // bounds update
1625:                            bounds.expandToInclude(env[0], env[1]);
1626:                            bounds.expandToInclude(env[2], env[3]);
1627:
1628:                            // transfer dbf bytes
1629:                            attReader.dbf.transferTo(dbfWriter);
1630:                        }
1631:                    }
1632:
1633:                    // close reader, flush headers, and copy temp files, if any
1634:                    try {
1635:                        featureReader.close();
1636:                    } finally {
1637:                        try {
1638:                            flush();
1639:                        } finally {
1640:                            shpWriter.close();
1641:                            dbfWriter.close();
1642:                            dbfChannel.close();
1643:                            if (indexedFidWriter != null)
1644:                                indexedFidWriter.close();
1645:                        }
1646:
1647:                        featureReader = null;
1648:                        shpWriter = null;
1649:                        dbfWriter = null;
1650:                        dbfChannel = null;
1651:                        indexedFidWriter = null;
1652:                        clean();
1653:
1654:                        /*
1655:                         * TODO This is added here for simplicity... index geometry
1656:                         * during shp record writes
1657:                         */
1658:                        try {
1659:                            String filename = shpURL.getFile().substring(0,
1660:                                    shpURL.getFile().length() - 4);
1661:                            File file = new File(filename + ".qix");
1662:
1663:                            if (file.exists()) {
1664:                                file.delete();
1665:                            }
1666:
1667:                            file = new File(filename + ".grx");
1668:
1669:                            if (file.exists()) {
1670:                                file.delete();
1671:                            }
1672:
1673:                            if (createIndex) {
1674:                                if (treeType == TREE_GRX) {
1675:                                    buildRTree();
1676:                                    filename = shpURL.getFile().substring(0,
1677:                                            shpURL.getFile().length() - 4);
1678:
1679:                                    File toDelete = new File(filename + ".qix");
1680:
1681:                                    if (toDelete.exists()) {
1682:                                        toDelete.delete();
1683:                                    }
1684:                                } else if (treeType == TREE_QIX) {
1685:                                    buildQuadTree(maxDepth);
1686:                                    filename = shpURL.getFile().substring(0,
1687:                                            shpURL.getFile().length() - 4);
1688:
1689:                                    File otherIndex = new File(filename
1690:                                            + ".grx");
1691:
1692:                                    if (otherIndex.exists()) {
1693:                                        otherIndex.delete();
1694:                                    }
1695:                                }
1696:                            }
1697:                        } catch (Throwable e) {
1698:                            createIndex = false;
1699:                            treeType = TREE_NONE;
1700:                            LOGGER
1701:                                    .log(Level.WARNING, "Error creating RTree",
1702:                                            e);
1703:                        }
1704:                    }
1705:                }
1706:
1707:                public org.geotools.feature.FeatureType getFeatureType() {
1708:                    return featureType;
1709:                }
1710:
1711:                public boolean hasNext() throws IOException {
1712:                    if (featureReader == null) {
1713:                        throw new IOException("Writer closed");
1714:                    }
1715:
1716:                    return featureReader.hasNext();
1717:                }
1718:
1719:                public org.geotools.feature.Feature next() throws IOException {
1720:                    // closed already, error!
1721:                    if (featureReader == null) {
1722:                        throw new IOException("Writer closed");
1723:                    }
1724:
1725:                    // we have to write the current feature back into the stream
1726:                    if (currentFeature != null) {
1727:                        write();
1728:                    }
1729:
1730:                    // is there another? If so, return it
1731:                    if (featureReader.hasNext()) {
1732:                        try {
1733:                            if (indexedFidWriter != null)
1734:                                indexedFidWriter.next();
1735:                            return currentFeature = featureReader.next();
1736:                        } catch (IllegalAttributeException iae) {
1737:                            throw new DataSourceException("Error in reading",
1738:                                    iae);
1739:                        }
1740:                    }
1741:
1742:                    long id;
1743:                    if (indexedFidWriter != null)
1744:                        id = indexedFidWriter.next();
1745:                    else
1746:                        id = this .records + 1;
1747:                    // reader has no more (no were are adding to the file)
1748:                    // so return an empty feature
1749:                    try {
1750:                        return currentFeature = DataUtilities.template(
1751:                                getFeatureType(), getCurrentTypeName() + "."
1752:                                        + id, emptyAtts);
1753:                    } catch (IllegalAttributeException iae) {
1754:                        throw new DataSourceException(
1755:                                "Error creating empty Feature", iae);
1756:                    }
1757:                }
1758:
1759:                public void remove() throws IOException {
1760:                    if (featureReader == null) {
1761:                        throw new IOException("Writer closed");
1762:                    }
1763:
1764:                    if (currentFeature == null) {
1765:                        throw new IOException("Current feature is null");
1766:                    }
1767:                    if (indexedFidWriter != null) {
1768:                        if (indexedFidWriter.isClosed()) {
1769:                            throw new IOException("Writer closed");
1770:                        }
1771:                        indexedFidWriter.remove();
1772:                    }
1773:
1774:                    // mark the current feature as null, this will result in it not
1775:                    // being rewritten to the stream
1776:                    currentFeature = null;
1777:                }
1778:
1779:                public void write() throws IOException {
1780:                    if (currentFeature == null) {
1781:                        throw new IOException("Current feature is null");
1782:                    }
1783:
1784:                    if (featureReader == null) {
1785:                        throw new IOException("Writer closed");
1786:                    }
1787:
1788:                    if (indexedFidWriter != null) {
1789:                        if (indexedFidWriter.isClosed()) {
1790:                            throw new IOException("FID Writer closed");
1791:                        }
1792:                        indexedFidWriter.write();
1793:                    }
1794:                    // writing of Geometry
1795:                    Geometry g = currentFeature.getDefaultGeometry();
1796:
1797:                    // if this is the first Geometry, find the shapeType and handler
1798:                    if (shapeType == null) {
1799:                        int dims = JTSUtilities.guessCoorinateDims(g
1800:                                .getCoordinates());
1801:
1802:                        try {
1803:                            shapeType = JTSUtilities.getShapeType(g, dims);
1804:
1805:                            // we must go back and annotate this after writing
1806:                            shpWriter.writeHeaders(new Envelope(), shapeType,
1807:                                    0, 0);
1808:                            handler = shapeType.getShapeHandler();
1809:                        } catch (ShapefileException se) {
1810:                            throw new RuntimeException("Unexpected Error", se);
1811:                        }
1812:                    }
1813:
1814:                    // convert geometry
1815:                    g = JTSUtilities.convertToCollection(g, shapeType);
1816:
1817:                    // bounds calculations
1818:                    Envelope b = g.getEnvelopeInternal();
1819:
1820:                    if (!b.isNull()) {
1821:                        bounds.expandToInclude(b);
1822:                    }
1823:
1824:                    // file length update
1825:                    shapefileLength += (handler.getLength(g) + 8);
1826:
1827:                    // write it
1828:                    shpWriter.writeGeometry(g);
1829:
1830:                    // writing of attributes
1831:                    int idx = 0;
1832:
1833:                    for (int i = 0, ii = featureType.getAttributeCount(); i < ii; i++) {
1834:                        // skip geometries
1835:                        if (writeFlags[i] > 0) {
1836:                            transferCache[idx++] = currentFeature
1837:                                    .getAttribute(i);
1838:                        }
1839:                    }
1840:
1841:                    dbfWriter.write(transferCache);
1842:
1843:                    // one more down...
1844:                    records++;
1845:
1846:                    // clear the currentFeature
1847:                    currentFeature = null;
1848:                }
1849:            }
1850:        }
www.java2java.com | Contact Us
Copyright 2009 - 12 Demo Source and Support. All rights reserved.
All other trademarks are property of their respective owners.