Source Code Cross Referenced for ShapefileDataStore.java in  » GIS » GeoTools-2.4.1 » org » geotools » data » shapefile » Java Source Code / Java DocumentationJava Source Code and Java Documentation

Java Source Code / Java Documentation
1. 6.0 JDK Core
2. 6.0 JDK Modules
3. 6.0 JDK Modules com.sun
4. 6.0 JDK Modules com.sun.java
5. 6.0 JDK Modules sun
6. 6.0 JDK Platform
7. Ajax
8. Apache Harmony Java SE
9. Aspect oriented
10. Authentication Authorization
11. Blogger System
12. Build
13. Byte Code
14. Cache
15. Chart
16. Chat
17. Code Analyzer
18. Collaboration
19. Content Management System
20. Database Client
21. Database DBMS
22. Database JDBC Connection Pool
23. Database ORM
24. Development
25. EJB Server geronimo
26. EJB Server GlassFish
27. EJB Server JBoss 4.2.1
28. EJB Server resin 3.1.5
29. ERP CRM Financial
30. ESB
31. Forum
32. GIS
33. Graphic Library
34. Groupware
35. HTML Parser
36. IDE
37. IDE Eclipse
38. IDE Netbeans
39. Installer
40. Internationalization Localization
41. Inversion of Control
42. Issue Tracking
43. J2EE
44. JBoss
45. JMS
46. JMX
47. Library
48. Mail Clients
49. Net
50. Parser
51. PDF
52. Portal
53. Profiler
54. Project Management
55. Report
56. RSS RDF
57. Rule Engine
58. Science
59. Scripting
60. Search Engine
61. Security
62. Sevlet Container
63. Source Control
64. Swing Library
65. Template Engine
66. Test Coverage
67. Testing
68. UML
69. Web Crawler
70. Web Framework
71. Web Mail
72. Web Server
73. Web Services
74. Web Services apache cxf 2.0.1
75. Web Services AXIS2
76. Wiki Engine
77. Workflow Engines
78. XML
79. XML UI
Java
Java Tutorial
Java Open Source
Jar File Download
Java Articles
Java Products
Java by API
Photoshop Tutorials
Maya Tutorials
Flash Tutorials
3ds-Max Tutorials
Illustrator Tutorials
GIMP Tutorials
C# / C Sharp
C# / CSharp Tutorial
C# / CSharp Open Source
ASP.Net
ASP.NET Tutorial
JavaScript DHTML
JavaScript Tutorial
JavaScript Reference
HTML / CSS
HTML CSS Reference
C / ANSI-C
C Tutorial
C++
C++ Tutorial
Ruby
PHP
Python
Python Tutorial
Python Open Source
SQL Server / T-SQL
SQL Server / T-SQL Tutorial
Oracle PL / SQL
Oracle PL/SQL Tutorial
PostgreSQL
SQL / MySQL
MySQL Tutorial
VB.Net
VB.Net Tutorial
Flash / Flex / ActionScript
VBA / Excel / Access / Word
XML
XML Tutorial
Microsoft Office PowerPoint 2007 Tutorial
Microsoft Office Excel 2007 Tutorial
Microsoft Office Word 2007 Tutorial
Java Source Code / Java Documentation » GIS » GeoTools 2.4.1 » org.geotools.data.shapefile 
Source Cross Referenced  Class Diagram Java Document (Java Doc) 


0001:        /*
0002:         *    GeoTools - OpenSource mapping toolkit
0003:         *    http://geotools.org
0004:         *    (C) 2002-2006, Geotools Project Managment Committee (PMC)
0005:         *
0006:         *    This library is free software; you can redistribute it and/or
0007:         *    modify it under the terms of the GNU Lesser General Public
0008:         *    License as published by the Free Software Foundation; either
0009:         *    version 2.1 of the License, or (at your option) any later version.
0010:         *
0011:         *    This library is distributed in the hope that it will be useful,
0012:         *    but WITHOUT ANY WARRANTY; without even the implied warranty of
0013:         *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
0014:         *    Lesser General Public License for more details.
0015:         */
0016:        package org.geotools.data.shapefile;
0017:
0018:        import java.io.File;
0019:        import java.io.FileInputStream;
0020:        import java.io.FileNotFoundException;
0021:        import java.io.FileOutputStream;
0022:        import java.io.FileWriter;
0023:        import java.io.IOException;
0024:        import java.io.InputStream;
0025:        import java.io.OutputStream;
0026:        import java.io.RandomAccessFile;
0027:        import java.math.BigInteger;
0028:        import java.net.URI;
0029:        import java.net.URL;
0030:        import java.nio.ByteBuffer;
0031:        import java.nio.channels.Channels;
0032:        import java.nio.channels.FileChannel;
0033:        import java.nio.channels.ReadableByteChannel;
0034:        import java.nio.channels.WritableByteChannel;
0035:        import java.util.Arrays;
0036:        import java.util.Collections;
0037:        import java.util.HashMap;
0038:        import java.util.HashSet;
0039:        import java.util.Map;
0040:        import java.util.Set;
0041:        import java.nio.charset.Charset;
0042:        import org.geotools.data.AbstractAttributeIO;
0043:        import org.geotools.data.AbstractFeatureLocking;
0044:        import org.geotools.data.AbstractFeatureSource;
0045:        import org.geotools.data.AbstractFeatureStore;
0046:        import org.geotools.data.AbstractFileDataStore;
0047:        import org.geotools.data.AttributeReader;
0048:        import org.geotools.data.DataSourceException;
0049:        import org.geotools.data.DataStore;
0050:        import org.geotools.data.DataUtilities;
0051:        import org.geotools.data.DefaultFIDReader;
0052:        import org.geotools.data.EmptyFeatureReader;
0053:        import org.geotools.data.FeatureListener;
0054:        import org.geotools.data.FeatureReader;
0055:        import org.geotools.data.FeatureSource;
0056:        import org.geotools.data.FeatureWriter;
0057:        import org.geotools.data.Query;
0058:        import org.geotools.data.Transaction;
0059:        import org.geotools.data.shapefile.dbf.DbaseFileException;
0060:        import org.geotools.data.shapefile.dbf.DbaseFileHeader;
0061:        import org.geotools.data.shapefile.dbf.DbaseFileReader;
0062:        import org.geotools.data.shapefile.dbf.DbaseFileWriter;
0063:        import org.geotools.data.shapefile.prj.PrjFileReader;
0064:        import org.geotools.data.shapefile.shp.IndexFile;
0065:        import org.geotools.data.shapefile.shp.JTSUtilities;
0066:        import org.geotools.data.shapefile.shp.ShapeHandler;
0067:        import org.geotools.data.shapefile.shp.ShapeType;
0068:        import org.geotools.data.shapefile.shp.ShapefileException;
0069:        import org.geotools.data.shapefile.shp.ShapefileHeader;
0070:        import org.geotools.data.shapefile.shp.ShapefileReader;
0071:        import org.geotools.data.shapefile.shp.ShapefileWriter;
0072:        import org.geotools.data.shapefile.shp.xml.ShpXmlFileReader;
0073:        import org.geotools.factory.Hints;
0074:        import org.geotools.feature.AttributeType;
0075:        import org.geotools.feature.AttributeTypeFactory;
0076:        import org.geotools.feature.Feature;
0077:        import org.geotools.feature.FeatureType;
0078:        import org.geotools.feature.FeatureTypes;
0079:        import org.geotools.feature.GeometryAttributeType;
0080:        import org.geotools.feature.IllegalAttributeException;
0081:        import org.geotools.feature.SchemaException;
0082:        import org.geotools.feature.type.BasicFeatureTypes;
0083:        import org.geotools.filter.CompareFilter;
0084:        import org.opengis.filter.Filter;
0085:        import org.opengis.filter.PropertyIsLessThan;
0086:        import org.opengis.filter.PropertyIsLessThanOrEqualTo;
0087:        import org.geotools.filter.FilterType;
0088:        import org.geotools.filter.Filters;
0089:        import org.geotools.filter.LengthFunction;
0090:        import org.geotools.filter.LiteralExpression;
0091:        import org.geotools.geometry.jts.JTS;
0092:        import org.geotools.geometry.jts.ReferencedEnvelope;
0093:        import org.geotools.referencing.CRS;
0094:        import org.geotools.referencing.crs.AbstractCRS;
0095:        import org.geotools.referencing.crs.DefaultGeographicCRS;
0096:        import org.geotools.styling.StyleAttributeExtractor;
0097:        import org.opengis.referencing.FactoryException;
0098:        import org.opengis.referencing.crs.CoordinateReferenceSystem;
0099:
0100:        import com.vividsolutions.jts.geom.Envelope;
0101:        import com.vividsolutions.jts.geom.Geometry;
0102:        import com.vividsolutions.jts.geom.LineString;
0103:        import com.vividsolutions.jts.geom.MultiLineString;
0104:        import com.vividsolutions.jts.geom.MultiPoint;
0105:        import com.vividsolutions.jts.geom.MultiPolygon;
0106:        import com.vividsolutions.jts.geom.Point;
0107:        import com.vividsolutions.jts.geom.Polygon;
0108:
0109:        /**
0110:         * A DataStore implementation which allows reading and writing from Shapefiles.
0111:         *
0112:         * @author Ian Schneider
0113:         *
0114:         * @todo fix file creation bug
0115:         * @source $URL: http://svn.geotools.org/geotools/tags/2.4.1/modules/plugin/shapefile/src/main/java/org/geotools/data/shapefile/ShapefileDataStore.java $
0116:         */
0117:        public class ShapefileDataStore extends AbstractFileDataStore {
0118:            public static final Charset DEFAULT_STRING_CHARSET = Charset
0119:                    .forName("ISO-8859-1");
0120:
0121:            /**
0122:             * The query hints we do support
0123:             */
0124:            private static final Set HINTS = Collections
0125:                    .unmodifiableSet(new HashSet(Arrays
0126:                            .asList(new Object[] { Hints.FEATURE_DETACHED })));
0127:
0128:            protected final URL shpURL;
0129:            protected final URL dbfURL;
0130:            protected final URL shxURL;
0131:            protected final URL prjURL;
0132:            protected final URL xmlURL;
0133:            protected Lock readWriteLock = new Lock();
0134:            protected URI namespace = null; // namespace provided by the constructor's map
0135:            protected FeatureType schema; // read only
0136:            protected boolean useMemoryMappedBuffer = true;
0137:            protected Charset dbfCharset;
0138:
0139:            /**
0140:             * Creates a new instance of ShapefileDataStore.
0141:             *
0142:             * @param url The URL of the shp file to use for this DataSource.
0143:             *
0144:             * @throws NullPointerException DOCUMENT ME!
0145:             * @throws . If computation of related URLs (dbf,shx) fails.
0146:             */
0147:            public ShapefileDataStore(URL url)
0148:                    throws java.net.MalformedURLException {
0149:                this (url, true);
0150:            }
0151:
0152:            public ShapefileDataStore(URL url, boolean useMemoryMappedBuffer)
0153:                    throws java.net.MalformedURLException {
0154:                this (url, useMemoryMappedBuffer, DEFAULT_STRING_CHARSET);
0155:            }
0156:
0157:            public ShapefileDataStore(URL url, boolean useMemoryMappedBuffer,
0158:                    Charset dbfCharset) throws java.net.MalformedURLException {
0159:                String filename = null;
0160:                shpURL = ShapefileDataStoreFactory.toShpURL(url);
0161:                dbfURL = ShapefileDataStoreFactory.toDbfURL(url);
0162:                shxURL = ShapefileDataStoreFactory.toShxURL(url);
0163:                prjURL = ShapefileDataStoreFactory.toPrjURL(url);
0164:                xmlURL = ShapefileDataStoreFactory.toXmlURL(url);
0165:                this .dbfCharset = dbfCharset;
0166:            }
0167:
0168:            /**
0169:             * this sets the datastore's namespace during construction (so the schema -
0170:             * FeatureType - will have the correct value) You can call this with
0171:             * namespace = null, but I suggest you give it an actual namespace.
0172:             *
0173:             * @param url
0174:             * @param namespace
0175:             */
0176:            public ShapefileDataStore(URL url, URI namespace)
0177:                    throws java.net.MalformedURLException {
0178:                this (url);
0179:                this .namespace = namespace;
0180:            }
0181:
0182:            /**
0183:             * this sets the datastore's namespace during construction (so the schema -
0184:             * FeatureType - will have the correct value) You can call this with
0185:             * namespace = null, but I suggest you give it an actual namespace.
0186:             *
0187:             * @param url
0188:             * @param namespace
0189:             * @param useMemoryMapped
0190:             * @param dbfCharset
0191:             */
0192:            public ShapefileDataStore(URL url, URI namespace,
0193:                    boolean useMemoryMapped, Charset dbfCharset)
0194:                    throws java.net.MalformedURLException {
0195:                this (url);
0196:                this .namespace = namespace;
0197:                this .useMemoryMappedBuffer = useMemoryMapped;
0198:                this .dbfCharset = dbfCharset;
0199:            }
0200:
0201:            /**
0202:             * this sets the datastore's namespace during construction (so the schema -
0203:             * FeatureType - will have the correct value) You can call this with
0204:             * namespace = null, but I suggest you give it an actual namespace.
0205:             *
0206:             * @param url
0207:             * @param namespace
0208:             * @param useMemoryMapped
0209:             */
0210:            public ShapefileDataStore(URL url, URI namespace,
0211:                    boolean useMemoryMapped)
0212:                    throws java.net.MalformedURLException {
0213:                this (url);
0214:                this .namespace = namespace;
0215:                this .useMemoryMappedBuffer = useMemoryMapped;
0216:            }
0217:
0218:            /**
0219:             * Set this if you need BDF strings to be decoded in a {@link Charset} other than ISO-8859-1
0220:             * @param stringCharset
0221:             * @since 2.3.3
0222:             */
0223:            public void setStringCharset(Charset stringCharset) {
0224:                this .dbfCharset = stringCharset;
0225:            }
0226:
0227:            /**
0228:             * Returns the {@link Charset} used to decode strings in the DBF file
0229:             * @return
0230:             */
0231:            public Charset getStringCharset() {
0232:                return dbfCharset;
0233:            }
0234:
0235:            /**
0236:             * Latch onto xmlURL if it is there, we may be able to get out of
0237:             * calculating the bounding box!
0238:             * 
0239:             * <p>
0240:             * This method is called by the createTypeEntry anonymous inner class
0241:             * DefaultTypeEntry.
0242:             * </p>
0243:             *
0244:             * @param typeName DOCUMENT ME!
0245:             *
0246:             * @return Map with xmlURL parsed, or an EMPTY_MAP.
0247:             */
0248:            protected Map createMetadata(String typeName) {
0249:                if (xmlURL == null) {
0250:                    return Collections.EMPTY_MAP;
0251:                }
0252:
0253:                try {
0254:                    //System.out.println("found metadata = " + xmlURL);
0255:
0256:                    ShpXmlFileReader reader = new ShpXmlFileReader(xmlURL);
0257:
0258:                    Map map = new HashMap();
0259:                    map.put("shp.xml", reader.parse());
0260:                    //System.out.println("parsed ..." + xmlURL);
0261:
0262:                    return map;
0263:                } catch (Throwable t) {
0264:                    LOGGER.warning("Could not parse " + xmlURL + ":"
0265:                            + t.getLocalizedMessage());
0266:
0267:                    return Collections.EMPTY_MAP;
0268:                }
0269:            }
0270:
0271:            /**
0272:             * Determine if the location of this shapefile is local or remote.
0273:             *
0274:             * @return true if local, false if remote
0275:             */
0276:            public boolean isLocal() {
0277:                return shpURL.getProtocol().equals("file");
0278:            }
0279:
0280:            /**
0281:             * Delete existing files.
0282:             */
0283:            private void clear() {
0284:                if (isLocal()) {
0285:                    delete(shpURL);
0286:                    delete(dbfURL);
0287:                    delete(shxURL);
0288:                    delete(prjURL);
0289:                    delete(xmlURL);
0290:                }
0291:            }
0292:
0293:            /**
0294:             * Delete a URL (file)
0295:             *
0296:             * @param u DOCUMENT ME!
0297:             */
0298:            private void delete(URL u) {
0299:                File f = DataUtilities.urlToFile(u);
0300:                f.delete();
0301:            }
0302:
0303:            /**
0304:             * Obtain a ReadableByteChannel from the given URL. If the url protocol is
0305:             * file, a FileChannel will be returned. Otherwise a generic channel will
0306:             * be obtained from the urls input stream.
0307:             *
0308:             * @param url DOCUMENT ME!
0309:             *
0310:             * @return DOCUMENT ME!
0311:             *
0312:             * @throws IOException DOCUMENT ME!
0313:             */
0314:            protected ReadableByteChannel getReadChannel(URL url)
0315:                    throws IOException {
0316:                ReadableByteChannel channel = null;
0317:
0318:                if (url.getProtocol().equals("file")) { // && useMemoryMappedBuffer) {
0319:
0320:                    File file = null;
0321:
0322:                    file = DataUtilities.urlToFile(url);
0323:
0324:                    if (!file.exists()) {
0325:                        throw new FileNotFoundException(file.toString());
0326:                    }
0327:
0328:                    if (!file.canRead()) {
0329:                        throw new IOException("File is unreadable : " + file);
0330:                    }
0331:
0332:                    FileInputStream in = new FileInputStream(file);
0333:                    channel = in.getChannel();
0334:                } else {
0335:                    InputStream in = url.openConnection().getInputStream();
0336:                    channel = Channels.newChannel(in);
0337:                }
0338:
0339:                return channel;
0340:            }
0341:
0342:            /**
0343:             * Obtain a WritableByteChannel from the given URL. If the url protocol is
0344:             * file, a FileChannel will be returned. Currently, this method will
0345:             * return a generic channel for remote urls, however both shape and dbf
0346:             * writing can only occur with a local FileChannel channel.
0347:             *
0348:             * @param url DOCUMENT ME!
0349:             *
0350:             * @return DOCUMENT ME!
0351:             *
0352:             * @throws IOException DOCUMENT ME!
0353:             */
0354:            protected WritableByteChannel getWriteChannel(URL url)
0355:                    throws IOException {
0356:                WritableByteChannel channel;
0357:
0358:                if (url.getProtocol().equals("file")) { // && useMemoryMappedBuffer) {
0359:
0360:                    File file = DataUtilities.urlToFile(url);
0361:
0362:                    RandomAccessFile raf = new RandomAccessFile(file, "rw");
0363:                    channel = raf.getChannel();
0364:
0365:                    ((FileChannel) channel).lock();
0366:
0367:                } else {
0368:                    OutputStream out = url.openConnection().getOutputStream();
0369:                    channel = Channels.newChannel(out);
0370:                }
0371:
0372:                return channel;
0373:            }
0374:
0375:            /**
0376:             * Create a FeatureReader for the provided type name.
0377:             *
0378:             * @param typeName The name of the FeatureType to create a reader for.
0379:             *
0380:             * @return A new FeatureReader.
0381:             *
0382:             * @throws IOException If an error occurs during creation
0383:             */
0384:            protected FeatureReader getFeatureReader(String typeName)
0385:                    throws IOException {
0386:                typeCheck(typeName);
0387:
0388:                return getFeatureReader();
0389:            }
0390:
0391:            protected FeatureReader getFeatureReader() throws IOException {
0392:                try {
0393:                    return createFeatureReader(getSchema().getTypeName(),
0394:                            getAttributesReader(true), schema);
0395:                } catch (SchemaException se) {
0396:                    throw new DataSourceException("Error creating schema", se);
0397:                }
0398:            }
0399:
0400:            /**
0401:             * Just like the basic version, but adds a small optimization: if no
0402:             * attributes are going to be read, don't uselessly open and read the dbf
0403:             * file. Makes sure to consider also attributes in the query.
0404:             *
0405:             * @see org.geotools.data.AbstractDataStore#getFeatureReader(java.lang.String,
0406:             *      org.geotools.data.Query)
0407:             */
0408:            protected FeatureReader getFeatureReader(String typeName,
0409:                    Query query) throws IOException {
0410:                String[] propertyNames = query.getPropertyNames();
0411:                String defaultGeomName = schema.getDefaultGeometry().getName();
0412:
0413:                // gather attributes needed by the query tool, they will be used by the query filter
0414:                StyleAttributeExtractor extractor = new StyleAttributeExtractor();
0415:                Filter filter = query.getFilter();
0416:                filter.accept(extractor, null);
0417:                String[] filterAttnames = extractor.getAttributeNames();
0418:
0419:                // check if the geometry is the one and only attribute needed
0420:                // to return attribute _and_ to run the query filter
0421:                if ((propertyNames != null)
0422:                        && (propertyNames.length == 1)
0423:                        && propertyNames[0].equals(defaultGeomName)
0424:                        && (filterAttnames.length == 0 || (filterAttnames.length == 1 && filterAttnames[0]
0425:                                .equals(defaultGeomName)))) {
0426:                    try {
0427:                        FeatureType newSchema = DataUtilities.createSubType(
0428:                                schema, propertyNames);
0429:
0430:                        return createFeatureReader(typeName,
0431:                                getAttributesReader(false), newSchema);
0432:                    } catch (SchemaException se) {
0433:                        throw new DataSourceException("Error creating schema",
0434:                                se);
0435:                    }
0436:                }
0437:
0438:                return super .getFeatureReader(typeName, query);
0439:            }
0440:
0441:            protected FeatureReader createFeatureReader(String typeName,
0442:                    Reader r, FeatureType readerSchema) throws SchemaException {
0443:                return new org.geotools.data.FIDFeatureReader(r,
0444:                        new DefaultFIDReader(typeName), readerSchema);
0445:            }
0446:
0447:            /**
0448:             * Returns the attribute reader, allowing for a pure shapefile reader, or a
0449:             * combined dbf/shp reader.
0450:             *
0451:             * @param readDbf - if true, the dbf fill will be opened and read
0452:             *
0453:             *
0454:             * @throws IOException
0455:             */
0456:            protected Reader getAttributesReader(boolean readDbf)
0457:                    throws IOException {
0458:                AttributeType[] atts = (schema == null) ? readAttributes()
0459:                        : schema.getAttributeTypes();
0460:
0461:                if (!readDbf) {
0462:                    LOGGER
0463:                            .fine("The DBF file won't be opened since no attributes will be read from it");
0464:                    atts = new AttributeType[] { schema.getDefaultGeometry() };
0465:
0466:                    return new Reader(atts, openShapeReader(), null);
0467:                }
0468:
0469:                return new Reader(atts, openShapeReader(), openDbfReader());
0470:            }
0471:
0472:            /**
0473:             * Convenience method for opening a ShapefileReader.
0474:             *
0475:             * @return A new ShapefileReader.
0476:             *
0477:             * @throws IOException If an error occurs during creation.
0478:             * @throws DataSourceException DOCUMENT ME!
0479:             */
0480:            protected ShapefileReader openShapeReader() throws IOException {
0481:                ReadableByteChannel rbc = getReadChannel(shpURL);
0482:
0483:                if (rbc == null) {
0484:                    return null;
0485:                }
0486:
0487:                try {
0488:                    return new ShapefileReader(rbc, true,
0489:                            useMemoryMappedBuffer, readWriteLock);
0490:                } catch (ShapefileException se) {
0491:                    throw new DataSourceException(
0492:                            "Error creating ShapefileReader", se);
0493:                }
0494:            }
0495:
0496:            /**
0497:             * Convenience method for opening a DbaseFileReader.
0498:             *
0499:             * @return A new DbaseFileReader
0500:             *
0501:             * @throws IOException If an error occurs during creation.
0502:             */
0503:            protected DbaseFileReader openDbfReader() throws IOException {
0504:                ReadableByteChannel rbc = getReadChannel(dbfURL);
0505:
0506:                if (rbc == null) {
0507:                    return null;
0508:                }
0509:
0510:                return new DbaseFileReader(rbc, useMemoryMappedBuffer,
0511:                        dbfCharset);
0512:            }
0513:
0514:            /**
0515:             * Convenience method for opening an index file.
0516:             * @param shxURL TODO
0517:             *
0518:             * @return An IndexFile
0519:             *
0520:             * @throws IOException
0521:             */
0522:            protected IndexFile openIndexFile(URL shxURL) throws IOException {
0523:                ReadableByteChannel rbc = getReadChannel(shxURL);
0524:
0525:                if (rbc == null) {
0526:                    return null;
0527:                }
0528:
0529:                // return new IndexFile(rbc, this.useMemoryMappedBuffer);
0530:                return new IndexFile(rbc, false);
0531:            }
0532:
0533:            /**
0534:             * Convenience method for opening a DbaseFileReader.
0535:             *
0536:             * @return A new DbaseFileReader
0537:             *
0538:             * @throws IOException If an error occurs during creation.
0539:             * @throws FactoryException DOCUMENT ME!
0540:             */
0541:            protected PrjFileReader openPrjReader() throws IOException,
0542:                    FactoryException {
0543:                ReadableByteChannel rbc = null;
0544:
0545:                try {
0546:                    rbc = getReadChannel(prjURL);
0547:                } catch (IOException e) {
0548:                    LOGGER.fine("projection (.prj) for shapefile: "
0549:                            + shpURL.toString() + " is not available");
0550:                }
0551:
0552:                if (rbc == null) {
0553:                    return null;
0554:                }
0555:
0556:                PrjFileReader prj = null;
0557:                try {
0558:                    prj = new PrjFileReader(rbc);
0559:                } catch (Exception e) {
0560:                    rbc.close();
0561:                }
0562:                return prj;
0563:            }
0564:
0565:            /**
0566:             * Get an array of type names this DataStore holds.<BR/>ShapefileDataStore
0567:             * will always return a single name.
0568:             *
0569:             * @return An array of length one containing the single type held.
0570:             */
0571:            public String[] getTypeNames() {
0572:                return new String[] { getCurrentTypeName(), };
0573:            }
0574:
0575:            /**
0576:             * Create the type name of the single FeatureType this DataStore
0577:             * represents.<BR/> For example, if the urls path is
0578:             * file:///home/billy/mytheme.shp, the type name will be mytheme.
0579:             *
0580:             * @return A name based upon the last path component of the url minus the
0581:             *         extension.
0582:             */
0583:            protected String createFeatureTypeName() {
0584:                String path = shpURL.getPath();
0585:                int slash = Math.max(0, path.lastIndexOf('/') + 1);
0586:                int dot = path.indexOf('.', slash);
0587:
0588:                if (dot < 0) {
0589:                    dot = path.length();
0590:                }
0591:
0592:                return path.substring(slash, dot);
0593:            }
0594:
0595:            protected String getCurrentTypeName() {
0596:                return (schema == null) ? createFeatureTypeName() : schema
0597:                        .getTypeName();
0598:            }
0599:
0600:            /**
0601:             * A convenience method to check if a type name is correct.
0602:             *
0603:             * @param requested The type name requested.
0604:             *
0605:             * @throws IOException If the type name is not available
0606:             */
0607:            protected void typeCheck(String requested) throws IOException {
0608:                if (!getCurrentTypeName().equals(requested)) {
0609:                    throw new IOException("No such type : " + requested);
0610:                }
0611:            }
0612:
0613:            /**
0614:             * Create a FeatureWriter for the given type name.
0615:             *
0616:             * @param typeName The typeName of the FeatureType to write
0617:             * @param transaction DOCUMENT ME!
0618:             *
0619:             * @return A new FeatureWriter.
0620:             *
0621:             * @throws IOException If the typeName is not available or some other error
0622:             *         occurs.
0623:             */
0624:            protected FeatureWriter createFeatureWriter(String typeName,
0625:                    Transaction transaction) throws IOException {
0626:                typeCheck(typeName);
0627:
0628:                return new Writer(typeName);
0629:            }
0630:
0631:            /**
0632:             * Obtain the FeatureType of the given name. ShapefileDataStore contains
0633:             * only one FeatureType.
0634:             *
0635:             * @param typeName The name of the FeatureType.
0636:             *
0637:             * @return The FeatureType that this DataStore contains.
0638:             *
0639:             * @throws IOException If a type by the requested name is not present.
0640:             */
0641:            public FeatureType getSchema(String typeName) throws IOException {
0642:                typeCheck(typeName);
0643:
0644:                return getSchema();
0645:            }
0646:
0647:            public FeatureType getSchema() throws IOException {
0648:                if (schema == null) {
0649:                    try {
0650:                        AttributeType[] types = readAttributes();
0651:                        FeatureType parent = null;
0652:                        Class geomType = types[0].getType();
0653:
0654:                        if ((geomType == Point.class)
0655:                                || (geomType == MultiPoint.class)) {
0656:                            parent = BasicFeatureTypes.POINT;
0657:                        } else if ((geomType == Polygon.class)
0658:                                || (geomType == MultiPolygon.class)) {
0659:                            parent = BasicFeatureTypes.POLYGON;
0660:                        } else if ((geomType == LineString.class)
0661:                                || (geomType == MultiLineString.class)) {
0662:                            parent = BasicFeatureTypes.LINE;
0663:                        }
0664:
0665:                        if (parent != null) {
0666:                            schema = FeatureTypes.newFeatureType(
0667:                                    readAttributes(), createFeatureTypeName(),
0668:                                    namespace, false,
0669:                                    new FeatureType[] { parent });
0670:                        } else {
0671:                            if (namespace != null) {
0672:                                schema = FeatureTypes.newFeatureType(
0673:                                        readAttributes(),
0674:                                        createFeatureTypeName(), namespace,
0675:                                        false);
0676:                            } else {
0677:                                schema = FeatureTypes.newFeatureType(
0678:                                        readAttributes(),
0679:                                        createFeatureTypeName(),
0680:                                        FeatureTypes.DEFAULT_NAMESPACE, false);
0681:                            }
0682:                        }
0683:                    } catch (SchemaException se) {
0684:                        throw new DataSourceException(
0685:                                "Error creating FeatureType", se);
0686:                    }
0687:                }
0688:
0689:                return schema;
0690:            }
0691:
0692:            /**
0693:             * Create the AttributeTypes contained within this DataStore.
0694:             *
0695:             * @return An array of new AttributeTypes
0696:             *
0697:             * @throws IOException If AttributeType reading fails
0698:             */
0699:            protected AttributeType[] readAttributes() throws IOException {
0700:                ShapefileReader shp = openShapeReader();
0701:                DbaseFileReader dbf = openDbfReader();
0702:                AbstractCRS cs = null;
0703:
0704:                PrjFileReader prj = null;
0705:                try {
0706:                    prj = openPrjReader();
0707:
0708:                    if (prj != null) {
0709:                        cs = (AbstractCRS) prj.getCoodinateSystem();
0710:                    }
0711:                } catch (FactoryException fe) {
0712:                    cs = null;
0713:                } finally {
0714:                    if (prj != null)
0715:                        prj.close();
0716:                }
0717:
0718:                try {
0719:                    GeometryAttributeType geometryAttribute = (GeometryAttributeType) AttributeTypeFactory
0720:                            .newAttributeType("the_geom", JTSUtilities
0721:                                    .findBestGeometryClass(shp.getHeader()
0722:                                            .getShapeType()), true, 0, null, cs);
0723:
0724:                    AttributeType[] atts;
0725:
0726:                    // take care of the case where no dbf and query wants all => geometry only
0727:                    if (dbf != null) {
0728:                        DbaseFileHeader header = dbf.getHeader();
0729:                        atts = new AttributeType[header.getNumFields() + 1];
0730:                        atts[0] = geometryAttribute;
0731:
0732:                        for (int i = 0, ii = header.getNumFields(); i < ii; i++) {
0733:                            Class clazz = header.getFieldClass(i);
0734:                            atts[i + 1] = AttributeTypeFactory
0735:                                    .newAttributeType(header.getFieldName(i),
0736:                                            clazz, true, header
0737:                                                    .getFieldLength(i));
0738:                        }
0739:                    } else {
0740:                        atts = new AttributeType[] { geometryAttribute };
0741:                    }
0742:
0743:                    return atts;
0744:                } finally {
0745:                    try {
0746:                        shp.close();
0747:                    } catch (IOException ioe) {
0748:                        // do nothing
0749:                    }
0750:
0751:                    try {
0752:                        dbf.close();
0753:                    } catch (IOException ioe) {
0754:                        // do nothing
0755:                    }
0756:                }
0757:            }
0758:
0759:            /**
0760:             * This method is used to force the creation of a .prj file.
0761:             * <p>
0762:             * The internally cached FeatureType will be removed, so the next call to
0763:             * getSchema() will read in the created file. This method is not thread safe
0764:             * and will have dire consequences for any other thread making use of the
0765:             * shapefile.
0766:             * <p>
0767:             * @param crs
0768:             */
0769:            public void forceSchemaCRS(CoordinateReferenceSystem crs)
0770:                    throws IOException {
0771:                if (crs == null)
0772:                    throw new NullPointerException("CRS required for .prj file");
0773:
0774:                long temp = System.currentTimeMillis();
0775:
0776:                String s = crs.toWKT();
0777:                s = s.replaceAll("\n", "").replaceAll("  ", "");
0778:                FileWriter out = new FileWriter(getStorageFile(prjURL, temp));
0779:
0780:                try {
0781:                    out.write(s);
0782:                } finally {
0783:                    out.close();
0784:                }
0785:                copyAndDelete(prjURL, temp);
0786:                schema = null;
0787:            }
0788:
0789:            /**
0790:             * Set the FeatureType of this DataStore. This method will delete any
0791:             * existing local resources or throw an IOException if the DataStore is
0792:             * remote.
0793:             *
0794:             * @param featureType The desired FeatureType.
0795:             *
0796:             * @throws IOException If the DataStore is remote.
0797:             */
0798:            public void createSchema(FeatureType featureType)
0799:                    throws IOException {
0800:                if (!isLocal()) {
0801:                    throw new IOException(
0802:                            "Cannot create FeatureType on remote shapefile");
0803:                }
0804:
0805:                clear();
0806:                schema = featureType;
0807:
0808:                CoordinateReferenceSystem cs = featureType.getDefaultGeometry()
0809:                        .getCoordinateSystem();
0810:
0811:                long temp = System.currentTimeMillis();
0812:
0813:                if (isLocal()) {
0814:                    Class geomType = featureType.getDefaultGeometry().getType();
0815:                    ShapeType shapeType;
0816:
0817:                    if (Point.class.isAssignableFrom(geomType)) {
0818:                        shapeType = ShapeType.POINT;
0819:                    } else if (MultiPoint.class.isAssignableFrom(geomType)) {
0820:                        shapeType = ShapeType.MULTIPOINT;
0821:                    } else if (LineString.class.isAssignableFrom(geomType)
0822:                            || MultiLineString.class.isAssignableFrom(geomType)) {
0823:                        shapeType = ShapeType.ARC;
0824:                    } else if (Polygon.class.isAssignableFrom(geomType)
0825:                            || MultiPolygon.class.isAssignableFrom(geomType)) {
0826:                        shapeType = ShapeType.POLYGON;
0827:                    } else {
0828:                        // can't determine what type because type is Geometry so just return.
0829:                        return;
0830:                    }
0831:
0832:                    FileChannel shpChannel = (FileChannel) getWriteChannel(getStorageURL(
0833:                            shpURL, temp));
0834:                    FileChannel shxChannel = (FileChannel) getWriteChannel(getStorageURL(
0835:                            shxURL, temp));
0836:
0837:                    ShapefileWriter writer = null;
0838:
0839:                    try {
0840:                        writer = new ShapefileWriter(shpChannel, shxChannel,
0841:                                readWriteLock);
0842:                        ReferencedEnvelope env = new ReferencedEnvelope(
0843:                                new Envelope(-179, 179, -89, 89),
0844:                                DefaultGeographicCRS.WGS84);
0845:                        ReferencedEnvelope transformedBounds;
0846:
0847:                        if (cs != null) {
0848:                            try {
0849:                                transformedBounds = env.transform(cs, true);
0850:                            } catch (Exception e) {
0851:                                cs = null;
0852:                                transformedBounds = env;
0853:                            }
0854:                        } else {
0855:                            transformedBounds = env;
0856:                        }
0857:
0858:                        writer.writeHeaders(transformedBounds, shapeType, 0,
0859:                                100);
0860:                    } finally {
0861:                        if (writer != null)
0862:                            writer.close();
0863:                    }
0864:
0865:                    DbaseFileHeader dbfheader = createDbaseHeader(featureType);
0866:
0867:                    dbfheader.setNumRecords(0);
0868:
0869:                    WritableByteChannel writeChannel = getWriteChannel(getStorageURL(
0870:                            dbfURL, temp));
0871:
0872:                    try {
0873:                        dbfheader.writeHeader(writeChannel);
0874:                    } finally {
0875:                        writeChannel.close();
0876:                    }
0877:                }
0878:
0879:                if (cs != null) {
0880:                    String s = cs.toWKT();
0881:                    //.prj files should have no carriage returns in them, this messes up
0882:                    //ESRI's ArcXXX software, so we'll be compatible
0883:                    s = s.replaceAll("\n", "").replaceAll("  ", "");
0884:
0885:                    FileWriter out = new FileWriter(
0886:                            getStorageFile(prjURL, temp));
0887:                    try {
0888:                        out.write(s);
0889:                    } finally {
0890:                        out.close();
0891:                    }
0892:                }
0893:
0894:                copyAndDelete(shpURL, temp);
0895:                copyAndDelete(shxURL, temp);
0896:                copyAndDelete(dbfURL, temp);
0897:                if (!prjURL.equals("")) {
0898:                    try {
0899:                        copyAndDelete(prjURL, temp);
0900:                    } catch (FileNotFoundException e) {
0901:                        LOGGER.warning(".prj could not be created.");
0902:                    }
0903:                }
0904:            }
0905:
0906:            /**
0907:             * Gets the bounding box of the file represented by this data store as a
0908:             * whole (that is, off all of the features in the shapefile)
0909:             * 
0910:             * @return The bounding box of the datasource or null if unknown and too
0911:             *         expensive for the method to calculate.
0912:             * 
0913:             * @throws DataSourceException
0914:             *             DOCUMENT ME!
0915:             */
0916:            protected Envelope getBounds() throws DataSourceException {
0917:                // This is way quick!!!
0918:                ReadableByteChannel in = null;
0919:
0920:                try {
0921:                    ByteBuffer buffer = ByteBuffer.allocate(100);
0922:                    in = getReadChannel(shpURL);
0923:                    in.read(buffer);
0924:                    buffer.flip();
0925:
0926:                    ShapefileHeader header = new ShapefileHeader();
0927:                    header.read(buffer, true);
0928:
0929:                    Envelope env = new Envelope(header.minX(), header.maxX(),
0930:                            header.minY(), header.maxY());
0931:
0932:                    if (schema != null) {
0933:                        return new ReferencedEnvelope(env, schema
0934:                                .getDefaultGeometry().getCoordinateSystem());
0935:                    }
0936:
0937:                    return new ReferencedEnvelope(env, null);
0938:                } catch (IOException ioe) {
0939:                    // What now? This seems arbitrarily appropriate !
0940:                    throw new DataSourceException("Problem getting Bbox", ioe);
0941:                } finally {
0942:                    try {
0943:                        if (in != null) {
0944:                            in.close();
0945:                        }
0946:                    } catch (IOException ioe) {
0947:                        // do nothing
0948:                    }
0949:                }
0950:            }
0951:
0952:            protected Envelope getBounds(Query query) throws IOException {
0953:                if (query.getFilter().equals(Filter.INCLUDE)) {
0954:                    return getBounds();
0955:                }
0956:
0957:                return null; // too expensive
0958:
0959:                // TODO should we just return the layer? matches the javadocs
0960:            }
0961:
0962:            /**
0963:             * @see org.geotools.data.DataStore#getFeatureSource(java.lang.String)
0964:             */
0965:            public FeatureSource getFeatureSource(final String typeName)
0966:                    throws IOException {
0967:                final FeatureType featureType = getSchema(typeName);
0968:
0969:                if (isWriteable) {
0970:                    if (getLockingManager() != null) {
0971:                        return new AbstractFeatureLocking(HINTS) {
0972:                            public DataStore getDataStore() {
0973:                                return ShapefileDataStore.this ;
0974:                            }
0975:
0976:                            public void addFeatureListener(
0977:                                    FeatureListener listener) {
0978:                                listenerManager.addFeatureListener(this ,
0979:                                        listener);
0980:                            }
0981:
0982:                            public void removeFeatureListener(
0983:                                    FeatureListener listener) {
0984:                                listenerManager.removeFeatureListener(this ,
0985:                                        listener);
0986:                            }
0987:
0988:                            public FeatureType getSchema() {
0989:                                return featureType;
0990:                            }
0991:
0992:                            public Envelope getBounds(Query query)
0993:                                    throws IOException {
0994:                                return ShapefileDataStore.this .getBounds(query);
0995:                            }
0996:                        };
0997:                    }
0998:
0999:                    return new AbstractFeatureStore(HINTS) {
1000:                        public DataStore getDataStore() {
1001:                            return ShapefileDataStore.this ;
1002:                        }
1003:
1004:                        public void addFeatureListener(FeatureListener listener) {
1005:                            listenerManager.addFeatureListener(this , listener);
1006:                        }
1007:
1008:                        public void removeFeatureListener(
1009:                                FeatureListener listener) {
1010:                            listenerManager.removeFeatureListener(this ,
1011:                                    listener);
1012:                        }
1013:
1014:                        public FeatureType getSchema() {
1015:                            return featureType;
1016:                        }
1017:
1018:                        public Envelope getBounds(Query query)
1019:                                throws IOException {
1020:                            return ShapefileDataStore.this .getBounds(query);
1021:                        }
1022:                    };
1023:                }
1024:
1025:                return new AbstractFeatureSource(HINTS) {
1026:                    public DataStore getDataStore() {
1027:                        return ShapefileDataStore.this ;
1028:                    }
1029:
1030:                    public void addFeatureListener(FeatureListener listener) {
1031:                        listenerManager.addFeatureListener(this , listener);
1032:                    }
1033:
1034:                    public void removeFeatureListener(FeatureListener listener) {
1035:                        listenerManager.removeFeatureListener(this , listener);
1036:                    }
1037:
1038:                    public FeatureType getSchema() {
1039:                        return featureType;
1040:                    }
1041:
1042:                    public Envelope getBounds(Query query) throws IOException {
1043:                        return ShapefileDataStore.this .getBounds(query);
1044:                    }
1045:                };
1046:            }
1047:
1048:            /**
1049:             * @see org.geotools.data.AbstractDataStore#getCount(org.geotools.data.Query)
1050:             */
1051:            public int getCount(Query query) throws IOException {
1052:                if (query.getFilter() == Filter.INCLUDE) {
1053:                    try {
1054:                        IndexFile file = openIndexFile(shxURL);
1055:                        try {
1056:                            return file.getRecordCount();
1057:                        } finally {
1058:                            file.close();
1059:                        }
1060:                    } catch (FileNotFoundException fnfe) {
1061:
1062:                        // no Index file so use the number of shapefile records
1063:                        ShapefileReader reader = new ShapefileReader(
1064:                                getReadChannel(shpURL), readWriteLock);
1065:                        int count = -1;
1066:
1067:                        try {
1068:                            count = reader.getCount(count);
1069:                        } catch (IOException e) {
1070:                            throw e;
1071:                        } finally {
1072:                            try {
1073:                                if (reader != null) {
1074:                                    reader.close();
1075:                                }
1076:                            } catch (IOException ioe) {
1077:                                // do nothing
1078:                            }
1079:                        }
1080:
1081:                        return count;
1082:                    }
1083:
1084:                }
1085:
1086:                return super .getCount(query);
1087:            }
1088:
1089:            /**
1090:             * Attempt to create a DbaseFileHeader for the FeatureType. Note, we cannot
1091:             * set the number of records until the write has completed.
1092:             * 
1093:             * @param featureType
1094:             *            DOCUMENT ME!
1095:             * 
1096:             * @return DOCUMENT ME!
1097:             * 
1098:             * @throws IOException
1099:             *             DOCUMENT ME!
1100:             * @throws DbaseFileException
1101:             *             DOCUMENT ME!
1102:             */
1103:            protected static DbaseFileHeader createDbaseHeader(
1104:                    FeatureType featureType) throws IOException,
1105:                    DbaseFileException {
1106:                DbaseFileHeader header = new DbaseFileHeader();
1107:
1108:                for (int i = 0, ii = featureType.getAttributeCount(); i < ii; i++) {
1109:                    AttributeType type = featureType.getAttributeType(i);
1110:
1111:                    Class colType = type.getType();
1112:                    String colName = type.getName();
1113:
1114:                    int fieldLen = -1;
1115:                    Filter f = type.getRestriction();
1116:
1117:                    if (f != null
1118:                            && f != Filter.EXCLUDE
1119:                            && f != Filter.INCLUDE
1120:                            && ((f instanceof  PropertyIsLessThan) || (f instanceof  PropertyIsLessThanOrEqualTo))) {
1121:                        try {
1122:                            CompareFilter cf = (CompareFilter) f;
1123:
1124:                            if (cf.getLeftValue() instanceof  LengthFunction) {
1125:                                fieldLen = Integer
1126:                                        .parseInt(((LiteralExpression) cf
1127:                                                .getRightValue()).getLiteral()
1128:                                                .toString());
1129:                            } else {
1130:                                if (cf.getRightValue() instanceof  LengthFunction) {
1131:                                    fieldLen = Integer
1132:                                            .parseInt(((LiteralExpression) cf
1133:                                                    .getLeftValue())
1134:                                                    .getLiteral().toString());
1135:                                }
1136:                            }
1137:                        } catch (NumberFormatException e) {
1138:                            fieldLen = 256;
1139:                        }
1140:                    } else {
1141:                        fieldLen = 256;
1142:                    }
1143:
1144:                    if (fieldLen <= 0) {
1145:                        fieldLen = 255;
1146:                    }
1147:
1148:                    // @todo respect field length
1149:                    if ((colType == Integer.class) || (colType == Short.class)
1150:                            || (colType == Byte.class)) {
1151:                        header
1152:                                .addColumn(colName, 'N', Math.min(fieldLen, 9),
1153:                                        0);
1154:                    } else if (colType == Long.class) {
1155:                        header.addColumn(colName, 'N', Math.min(fieldLen, 19),
1156:                                0);
1157:                    } else if (colType == BigInteger.class) {
1158:                        header.addColumn(colName, 'N', Math.min(fieldLen, 33),
1159:                                0);
1160:                    } else if (Number.class.isAssignableFrom(colType)) {
1161:                        int l = Math.min(fieldLen, 33);
1162:                        int d = Math.max(l - 2, 0);
1163:                        header.addColumn(colName, 'N', l, d);
1164:                    } else if (java.util.Date.class.isAssignableFrom(colType)) {
1165:                        header.addColumn(colName, 'D', fieldLen, 0);
1166:                    } else if (colType == Boolean.class) {
1167:                        header.addColumn(colName, 'L', 1, 0);
1168:                    } else if (CharSequence.class.isAssignableFrom(colType)) {
1169:                        // Possible fix for GEOT-42 : ArcExplorer doesn't like 0 length
1170:                        // ensure that maxLength is at least 1
1171:                        header.addColumn(colName, 'C', Math.min(254, fieldLen),
1172:                                0);
1173:                    } else if (Geometry.class.isAssignableFrom(colType)) {
1174:                        continue;
1175:                    } else {
1176:                        throw new IOException("Unable to write : "
1177:                                + colType.getName());
1178:                    }
1179:                }
1180:
1181:                return header;
1182:            }
1183:
1184:            /**
1185:             * Get a temporary URL for storage based on the one passed in
1186:             *
1187:             * @param url DOCUMENT ME!
1188:             * @param temp DOCUMENT ME!
1189:             *
1190:             * @return DOCUMENT ME!
1191:             */
1192:            protected URL getStorageURL(URL url, long temp)
1193:                    throws java.net.MalformedURLException {
1194:                return (temp == 0) ? url : getStorageFile(url, temp).toURL();
1195:            }
1196:
1197:            /**
1198:             * Get a temproray File based on the URL passed in
1199:             *
1200:             * @param url DOCUMENT ME!
1201:             * @param temp DOCUMENT ME!
1202:             *
1203:             * @return DOCUMENT ME!
1204:             */
1205:            protected File getStorageFile(URL url, long temp) {
1206:                String f = url.getFile();
1207:                f = temp + f.substring(f.lastIndexOf("/") + 1);
1208:
1209:                File tf = new File(System.getProperty("java.io.tmpdir"), f);
1210:
1211:                return tf;
1212:            }
1213:
1214:            /**
1215:             * Copy the file at the given URL to the original
1216:             *
1217:             * @param src DOCUMENT ME!
1218:             * @param temp DOCUMENT ME!
1219:             *
1220:             * @throws IOException DOCUMENT ME!
1221:             */
1222:            protected void copyAndDelete(URL src, long temp) throws IOException {
1223:                File storage = getStorageFile(src, temp);
1224:
1225:                File dest = DataUtilities.urlToFile(src);
1226:                FileChannel in = null;
1227:                FileChannel out = null;
1228:
1229:                if (storage.equals(dest))
1230:                    return;
1231:
1232:                try {
1233:                    readWriteLock.lockWrite();
1234:
1235:                    if (dest.exists()) {
1236:                        if (!dest.delete())
1237:                            throw new IOException(
1238:                                    "Unable to delete original file: " + src);
1239:                    }
1240:
1241:                    if (storage.exists() && !storage.renameTo(dest)) {
1242:                        in = new FileInputStream(storage).getChannel();
1243:                        out = new FileOutputStream(dest).getChannel();
1244:
1245:                        long len = in.size();
1246:                        long copied = out.transferFrom(in, 0, in.size());
1247:
1248:                        if (len != copied) {
1249:                            throw new IOException("unable to complete write: "
1250:                                    + src);
1251:                        }
1252:
1253:                    }
1254:                } finally {
1255:                    readWriteLock.unlockWrite();
1256:
1257:                    if (in != null) {
1258:                        in.close();
1259:                    }
1260:
1261:                    if (out != null) {
1262:                        out.close();
1263:                    }
1264:                    storage.delete();
1265:                }
1266:            }
1267:
1268:            /**
1269:             * An AttributeReader implementation for Shapefile. Pretty straightforward.
1270:             * <BR/>The default geometry is at position 0, and all dbf columns follow.
1271:             * <BR/>The dbf file may not be necessary, if not, just pass null as the
1272:             * DbaseFileReader
1273:             */
1274:            protected static class Reader extends AbstractAttributeIO implements 
1275:                    AttributeReader {
1276:                protected ShapefileReader shp;
1277:                protected DbaseFileReader dbf;
1278:                protected DbaseFileReader.Row row;
1279:                protected ShapefileReader.Record record;
1280:                int cnt;
1281:
1282:                /**
1283:                 * Create the shapefile reader
1284:                 *
1285:                 * @param atts - the attributes that we are going to read.
1286:                 * @param shp - the shapefile reader, required
1287:                 * @param dbf - the dbf file reader. May be null, in this case no
1288:                 *        attributes will be read from the dbf file
1289:                 */
1290:                public Reader(AttributeType[] atts, ShapefileReader shp,
1291:                        DbaseFileReader dbf) {
1292:                    super (atts);
1293:                    this .shp = shp;
1294:                    this .dbf = dbf;
1295:                }
1296:
1297:                public void close() throws IOException {
1298:                    try {
1299:                        if (shp != null) {
1300:                            shp.close();
1301:                        }
1302:
1303:                        if (dbf != null) {
1304:                            dbf.close();
1305:                        }
1306:                    } finally {
1307:                        row = null;
1308:                        record = null;
1309:                        shp = null;
1310:                        dbf = null;
1311:                    }
1312:                }
1313:
1314:                public boolean hasNext() throws IOException {
1315:                    int n = shp.hasNext() ? 1 : 0;
1316:
1317:                    if (dbf != null) {
1318:                        n += (dbf.hasNext() ? 2 : 0);
1319:                    }
1320:
1321:                    if ((n == 3) || ((n == 1) && (dbf == null))) {
1322:                        return true;
1323:                    }
1324:
1325:                    if (n == 0) {
1326:                        return false;
1327:                    }
1328:
1329:                    throw new IOException(((n == 1) ? "Shp" : "Dbf")
1330:                            + " has extra record");
1331:                }
1332:
1333:                public void next() throws IOException {
1334:                    record = shp.nextRecord();
1335:
1336:                    if (dbf != null) {
1337:                        row = dbf.readRow();
1338:                    }
1339:                }
1340:
1341:                public Object read(int param) throws IOException,
1342:                        java.lang.ArrayIndexOutOfBoundsException {
1343:                    switch (param) {
1344:                    case 0:
1345:                        return record.shape();
1346:
1347:                    default:
1348:
1349:                        if (row != null) {
1350:                            return row.read(param - 1);
1351:                        }
1352:
1353:                        return null;
1354:                    }
1355:                }
1356:            }
1357:
1358:            /**
1359:             * A FeatureWriter for ShapefileDataStore. Uses a write and annotate
1360:             * technique to avoid buffering attributes and geometries. Because the
1361:             * shapefile and dbf require header information which can only be obtained
1362:             * by reading the entire series of Features, the headers are updated after
1363:             * the initial write completes.
1364:             */
1365:            protected class Writer implements  FeatureWriter {
1366:                // store current time here as flag for temporary write
1367:                private long temp;
1368:
1369:                // the FeatureReader to obtain the current Feature from
1370:                protected FeatureReader featureReader;
1371:
1372:                // the AttributeReader
1373:                protected Reader attReader;
1374:
1375:                // the current Feature
1376:                private Feature currentFeature;
1377:
1378:                // the FeatureType we are representing
1379:                private FeatureType featureType;
1380:
1381:                // an array for reuse in Feature creation
1382:                private Object[] emptyAtts;
1383:
1384:                // an array for reuse in writing to dbf.
1385:                private Object[] transferCache;
1386:                private ShapeType shapeType;
1387:                private ShapeHandler handler;
1388:
1389:                // keep track of shapefile length during write, starts at 100 bytes for
1390:                // required header
1391:                private int shapefileLength = 100;
1392:
1393:                // keep track of the number of records written
1394:                private int records = 0;
1395:
1396:                // hold 1 if dbf should write the attribute at the index, 0 if not
1397:                private byte[] writeFlags;
1398:                private ShapefileWriter shpWriter;
1399:                private DbaseFileWriter dbfWriter;
1400:                private DbaseFileHeader dbfHeader;
1401:                private FileChannel dbfChannel;
1402:
1403:                // keep track of bounds during write
1404:                private Envelope bounds = new Envelope();
1405:
1406:                public Writer(String typeName) throws IOException {
1407:                    // set up reader
1408:                    try {
1409:                        attReader = getAttributesReader(true);
1410:                        featureReader = createFeatureReader(typeName,
1411:                                attReader, schema);
1412:                        temp = System.currentTimeMillis();
1413:                    } catch (Exception e) {
1414:                        getSchema(); // load it
1415:
1416:                        if (schema == null) {
1417:                            throw new IOException(
1418:                                    "To create a shapefile, you must first call createSchema()");
1419:                        }
1420:
1421:                        featureReader = new EmptyFeatureReader(schema);
1422:                        temp = 0;
1423:                    }
1424:
1425:                    this .featureType = featureReader.getFeatureType();
1426:
1427:                    // set up buffers and write flags
1428:                    emptyAtts = new Object[featureType.getAttributeCount()];
1429:                    writeFlags = new byte[featureType.getAttributeCount()];
1430:
1431:                    int cnt = 0;
1432:
1433:                    for (int i = 0, ii = featureType.getAttributeCount(); i < ii; i++) {
1434:                        // if its a geometry, we don't want to write it to the dbf...
1435:                        if (!(featureType.getAttributeType(i) instanceof  GeometryAttributeType)) {
1436:                            cnt++;
1437:                            writeFlags[i] = (byte) 1;
1438:                        }
1439:                    }
1440:
1441:                    // dbf transfer buffer
1442:                    transferCache = new Object[cnt];
1443:
1444:                    // open underlying writers
1445:                    shpWriter = new ShapefileWriter(
1446:                            (FileChannel) getWriteChannel(getStorageURL(shpURL,
1447:                                    temp)),
1448:                            (FileChannel) getWriteChannel(getStorageURL(shxURL,
1449:                                    temp)), readWriteLock);
1450:
1451:                    dbfChannel = (FileChannel) getWriteChannel(getStorageURL(
1452:                            dbfURL, temp));
1453:                    dbfHeader = createDbaseHeader(featureType);
1454:                    dbfWriter = new DbaseFileWriter(dbfHeader, dbfChannel);
1455:
1456:                    if (attReader != null && attReader.hasNext()) {
1457:                        shapeType = attReader.shp.getHeader().getShapeType();
1458:                        handler = shapeType.getShapeHandler();
1459:                        shpWriter.writeHeaders(bounds, shapeType, records,
1460:                                shapefileLength);
1461:                    }
1462:
1463:                }
1464:
1465:                /**
1466:                 * Go back and update the headers with the required info.
1467:                 *
1468:                 * @throws IOException DOCUMENT ME!
1469:                 */
1470:                protected void flush() throws IOException {
1471:                    // not sure the check for records <=0 is necessary,
1472:                    // but if records > 0 and shapeType is null there's probably
1473:                    // another problem.
1474:                    if ((records <= 0) && (shapeType == null)) {
1475:                        GeometryAttributeType geometryAttributeType = featureType
1476:                                .getDefaultGeometry();
1477:
1478:                        Class gat = geometryAttributeType.getType();
1479:                        shapeType = JTSUtilities.getShapeType(gat);
1480:                    }
1481:
1482:                    shpWriter.writeHeaders(bounds, shapeType, records,
1483:                            shapefileLength);
1484:
1485:                    dbfHeader.setNumRecords(records);
1486:                    dbfChannel.position(0);
1487:                    dbfHeader.writeHeader(dbfChannel);
1488:                }
1489:
1490:                /**
1491:                 * In case someone doesn't close me.
1492:                 *
1493:                 * @throws Throwable DOCUMENT ME!
1494:                 */
1495:                protected void finalize() throws Throwable {
1496:                    if (featureReader != null) {
1497:                        try {
1498:                            close();
1499:                        } catch (Exception e) {
1500:                            // oh well, we tried
1501:                        }
1502:                    }
1503:                }
1504:
1505:                /**
1506:                 * Clean up our temporary write if there was one
1507:                 *
1508:                 * @throws IOException DOCUMENT ME!
1509:                 */
1510:                protected void clean() throws IOException {
1511:                    if (temp == 0) {
1512:                        return;
1513:                    }
1514:
1515:                    copyAndDelete(shpURL, temp);
1516:                    copyAndDelete(shxURL, temp);
1517:                    copyAndDelete(dbfURL, temp);
1518:                }
1519:
1520:                /**
1521:                 * Release resources and flush the header information.
1522:                 *
1523:                 * @throws IOException DOCUMENT ME!
1524:                 */
1525:                public void close() throws IOException {
1526:                    if (featureReader == null) {
1527:                        throw new IOException("Writer closed");
1528:                    }
1529:
1530:                    // make sure to write the last feature...
1531:                    if (currentFeature != null) {
1532:                        write();
1533:                    }
1534:
1535:                    // if the attribute reader is here, that means we may have some
1536:                    // additional tail-end file flushing to do if the Writer was closed
1537:                    // before the end of the file
1538:                    if (attReader != null && attReader.hasNext()) {
1539:                        shapeType = attReader.shp.getHeader().getShapeType();
1540:                        handler = shapeType.getShapeHandler();
1541:
1542:                        // handle the case where zero records have been written, but the
1543:                        // stream is closed and the headers
1544:                        if (records == 0) {
1545:                            shpWriter.writeHeaders(bounds, shapeType, 0, 0);
1546:                        }
1547:
1548:                        // copy array for bounds
1549:                        double[] env = new double[4];
1550:
1551:                        while (attReader.hasNext()) {
1552:                            // transfer bytes from shapefile
1553:                            shapefileLength += attReader.shp.transferTo(
1554:                                    shpWriter, ++records, env);
1555:
1556:                            // bounds update
1557:                            bounds.expandToInclude(env[0], env[1]);
1558:                            bounds.expandToInclude(env[2], env[3]);
1559:
1560:                            // transfer dbf bytes
1561:                            attReader.dbf.transferTo(dbfWriter);
1562:                        }
1563:                    }
1564:
1565:                    // close reader, flush headers, and copy temp files, if any
1566:                    try {
1567:                        featureReader.close();
1568:                    } finally {
1569:                        try {
1570:                            flush();
1571:                        } finally {
1572:                            shpWriter.close();
1573:                            dbfWriter.close();
1574:                            dbfChannel.close();
1575:                        }
1576:
1577:                        featureReader = null;
1578:                        shpWriter = null;
1579:                        dbfWriter = null;
1580:                        dbfChannel = null;
1581:                        clean();
1582:                    }
1583:                }
1584:
1585:                public org.geotools.feature.FeatureType getFeatureType() {
1586:                    return featureType;
1587:                }
1588:
1589:                public boolean hasNext() throws IOException {
1590:                    if (featureReader == null) {
1591:                        throw new IOException("Writer closed");
1592:                    }
1593:
1594:                    return featureReader.hasNext();
1595:                }
1596:
1597:                public org.geotools.feature.Feature next() throws IOException {
1598:                    // closed already, error!
1599:                    if (featureReader == null) {
1600:                        throw new IOException("Writer closed");
1601:                    }
1602:
1603:                    // we have to write the current feature back into the stream
1604:                    if (currentFeature != null) {
1605:                        write();
1606:                    }
1607:
1608:                    // is there another? If so, return it
1609:                    if (featureReader.hasNext()) {
1610:                        try {
1611:                            return currentFeature = featureReader.next();
1612:                        } catch (IllegalAttributeException iae) {
1613:                            throw new DataSourceException("Error in reading",
1614:                                    iae);
1615:                        }
1616:                    }
1617:
1618:                    // reader has no more (no were are adding to the file)
1619:                    // so return an empty feature
1620:                    try {
1621:                        return currentFeature = DataUtilities.template(
1622:                                getFeatureType(), emptyAtts);
1623:                    } catch (IllegalAttributeException iae) {
1624:                        throw new DataSourceException(
1625:                                "Error creating empty Feature", iae);
1626:                    }
1627:                }
1628:
1629:                public void remove() throws IOException {
1630:                    if (featureReader == null) {
1631:                        throw new IOException("Writer closed");
1632:                    }
1633:
1634:                    if (currentFeature == null) {
1635:                        throw new IOException("Current feature is null");
1636:                    }
1637:
1638:                    // mark the current feature as null, this will result in it not
1639:                    // being rewritten to the stream
1640:                    currentFeature = null;
1641:                }
1642:
1643:                public void write() throws IOException {
1644:                    if (currentFeature == null) {
1645:                        throw new IOException("Current feature is null");
1646:                    }
1647:
1648:                    if (featureReader == null) {
1649:                        throw new IOException("Writer closed");
1650:                    }
1651:
1652:                    // writing of Geometry
1653:                    Geometry g = currentFeature.getDefaultGeometry();
1654:
1655:                    // if this is the first Geometry, find the shapeType and handler
1656:                    if (shapeType == null) {
1657:                        int dims = JTSUtilities.guessCoorinateDims(g
1658:                                .getCoordinates());
1659:
1660:                        try {
1661:                            shapeType = JTSUtilities.getShapeType(g, dims);
1662:
1663:                            // we must go back and annotate this after writing
1664:                            shpWriter.writeHeaders(new Envelope(), shapeType,
1665:                                    0, 0);
1666:                            handler = shapeType.getShapeHandler();
1667:                        } catch (ShapefileException se) {
1668:                            throw new RuntimeException("Unexpected Error", se);
1669:                        }
1670:                    }
1671:
1672:                    // convert geometry
1673:                    g = JTSUtilities.convertToCollection(g, shapeType);
1674:
1675:                    // bounds calculations
1676:                    Envelope b = g.getEnvelopeInternal();
1677:
1678:                    if (!b.isNull()) {
1679:                        bounds.expandToInclude(b);
1680:                    }
1681:
1682:                    // file length update
1683:                    shapefileLength += (handler.getLength(g) + 8);
1684:
1685:                    // write it
1686:                    shpWriter.writeGeometry(g);
1687:
1688:                    // writing of attributes
1689:                    int idx = 0;
1690:
1691:                    for (int i = 0, ii = featureType.getAttributeCount(); i < ii; i++) {
1692:                        // skip geometries
1693:                        if (writeFlags[i] > 0) {
1694:                            transferCache[idx++] = currentFeature
1695:                                    .getAttribute(i);
1696:                        }
1697:                    }
1698:
1699:                    dbfWriter.write(transferCache);
1700:
1701:                    // one more down...
1702:                    records++;
1703:
1704:                    // clear the currentFeature
1705:                    currentFeature = null;
1706:                }
1707:            }
1708:        }
www.java2java.com | Contact Us
Copyright 2009 - 12 Demo Source and Support. All rights reserved.
All other trademarks are property of their respective owners.