001: /*
002: * GeoTools - OpenSource mapping toolkit
003: * http://geotools.org
004: * (C) 2004-2006, GeoTools Project Managment Committee (PMC)
005: *
006: * This library is free software; you can redistribute it and/or
007: * modify it under the terms of the GNU Lesser General Public
008: * License as published by the Free Software Foundation;
009: * version 2.1 of the License.
010: *
011: * This library is distributed in the hope that it will be useful,
012: * but WITHOUT ANY WARRANTY; without even the implied warranty of
013: * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
014: * Lesser General Public License for more details.
015: */
016: package org.geotools.data.jdbc;
017:
018: import java.io.IOException;
019: import java.sql.SQLException;
020: import java.util.Collections;
021: import java.util.HashSet;
022: import java.util.Set;
023: import java.util.logging.Level;
024: import java.util.logging.Logger;
025:
026: import org.geotools.data.DataSourceException;
027: import org.geotools.data.DataUtilities;
028: import org.geotools.data.FeatureListenerManager;
029: import org.geotools.data.FeatureReader;
030: import org.geotools.data.FeatureWriter;
031: import org.geotools.data.Transaction;
032: import org.geotools.data.jdbc.fidmapper.FIDMapper;
033: import org.geotools.feature.DefaultFeatureType;
034: import org.geotools.feature.Feature;
035: import org.geotools.feature.FeatureType;
036: import org.geotools.feature.IllegalAttributeException;
037:
038: import com.vividsolutions.jts.geom.Envelope;
039:
040: /**
041: * JDBCDataStore implementation of the FeatureWriter interface
042: *
043: * @author aaime
044: * @source $URL: http://svn.geotools.org/geotools/tags/2.4.1/modules/library/jdbc/src/main/java/org/geotools/data/jdbc/JDBCFeatureWriter.java $
045: */
046: public class JDBCFeatureWriter implements FeatureWriter {
047: /** The logger for the jdbc module. */
048: private static final Logger LOGGER = org.geotools.util.logging.Logging
049: .getLogger("org.geotools.data.jdbc");
050: protected QueryData queryData;
051: protected FeatureReader reader;
052: protected Feature live; // current for FeatureWriter
053: protected Feature current; // copy of live returned to user
054: protected boolean closed;
055: protected Object[] fidAttributes;
056:
057: public JDBCFeatureWriter(FeatureReader reader, QueryData queryData) {
058: this .reader = reader;
059: this .queryData = queryData;
060: }
061:
062: /**
063: * @see org.geotools.data.FeatureWriter#getFeatureType()
064: */
065: public FeatureType getFeatureType() {
066: return reader.getFeatureType();
067: }
068:
069: /**
070: * @see org.geotools.data.FeatureWriter#next()
071: */
072: public Feature next() throws IOException {
073: if (reader == null) {
074: throw new IOException("FeatureWriter has been closed");
075: }
076:
077: FeatureType featureType = getFeatureType();
078:
079: if (hasNext()) {
080: try {
081: live = reader.next();
082: current = featureType.duplicate(live);
083: LOGGER.finer("Calling next on writer");
084: } catch (IllegalAttributeException e) {
085: throw new DataSourceException("Unable to edit "
086: + live.getID() + " of "
087: + featureType.getTypeName(), e);
088: }
089: } else {
090: // new content
091: live = null;
092:
093: try {
094: Feature temp = DataUtilities.template(featureType);
095:
096: /* Here we create a Feature with a Mutable FID.
097: * We use data utilities to create a default set of attributes
098: * for the feature and these are copied into the a new
099: * MutableFIDFeature. Thsi can probably be improved later,
100: * there is also a dependency on DefaultFeatureType here since
101: * DefaultFeature depends on it and MutableFIDFeature extends default
102: * feature. This may be an issue if someone reimplements the Feature
103: * interfaces. It could address by providing a full implementation
104: * of Feature in MutableFIDFeature at a later date.
105: *
106: */
107: current = new MutableFIDFeature(
108: (DefaultFeatureType) featureType, temp
109: .getAttributes(new Object[temp
110: .getNumberOfAttributes()]),
111: null);
112:
113: if (useQueryDataForInsert()) {
114: queryData.startInsert();
115: }
116: } catch (IllegalAttributeException e) {
117: throw new DataSourceException(
118: "Unable to add additional Features of "
119: + featureType.getTypeName(), e);
120: } catch (SQLException e) {
121: throw new DataSourceException(
122: "Unable to move to insert row. "
123: + e.getMessage(), e);
124: }
125: }
126:
127: return current;
128: }
129:
130: /**
131: * Returns true if QueryData is used to insert rows, false if some other
132: * means is used
133: *
134: */
135: protected boolean useQueryDataForInsert() {
136: return true;
137: }
138:
139: /**
140: * @see org.geotools.data.FeatureWriter#remove()
141: */
142: public void remove() throws IOException {
143: if (closed) {
144: throw new IOException("FeatureWriter has been closed");
145: }
146:
147: if (current == null) {
148: throw new IOException("No feature available to remove");
149: }
150:
151: if (live != null) {
152: LOGGER.fine("Removing " + live);
153:
154: Envelope bounds = live.getBounds();
155: live = null;
156: current = null;
157:
158: Transaction transaction = queryData.getTransaction();
159: try {
160: queryData.deleteCurrentRow();
161: queryData.fireChangeRemoved(bounds, false);
162: } catch (SQLException sqle) {
163: String message = "problem deleting row";
164:
165: if (transaction != Transaction.AUTO_COMMIT) {
166: transaction.rollback();
167: message += "(transaction canceled)";
168: }
169:
170: throw new DataSourceException(message, sqle);
171: }
172: } else {
173: // cancel add new content
174: current = null;
175: }
176: }
177:
178: /**
179: * @see org.geotools.data.FeatureWriter#write()
180: */
181: public void write() throws IOException {
182: if (closed) {
183: throw new IOException("FeatureWriter has been closed");
184: }
185:
186: if (current == null) {
187: throw new IOException("No feature available to write");
188: }
189:
190: LOGGER.fine("write called, live is " + live + " and cur is "
191: + current);
192:
193: if (live != null) {
194: if (live.equals(current)) {
195: // no modifications made to current
196: live = null;
197: current = null;
198: } else {
199: try {
200: doUpdate(live, current);
201:
202: Envelope bounds = new Envelope();
203: bounds.expandToInclude(live.getBounds());
204: bounds.expandToInclude(current.getBounds());
205:
206: queryData.fireFeaturesChanged(bounds, false);
207:
208: } catch (SQLException sqlException) {
209: queryData.close(sqlException);
210: throw new DataSourceException("Error updating row",
211: sqlException);
212: }
213: live = null;
214: current = null;
215: }
216: } else {
217: LOGGER.fine("doing insert in jdbc featurewriter");
218:
219: try {
220: doInsert((MutableFIDFeature) current);
221: queryData.fireFeaturesAdded(current.getBounds(), false);
222: } catch (SQLException e) {
223: throw new DataSourceException("Row adding failed.", e);
224: }
225: current = null;
226: }
227: }
228:
229: protected void doUpdate(Feature live, Feature current)
230: throws IOException, SQLException {
231: try {
232: // Can we create for array getAttributes more efficiently?
233: for (int i = 0; i < current.getNumberOfAttributes(); i++) {
234: Object currAtt = current.getAttribute(i);
235: Object liveAtt = live.getAttribute(i);
236:
237: if ((live == null)
238: || !DataUtilities.attributesEqual(liveAtt,
239: currAtt)) {
240: if (LOGGER.isLoggable(Level.INFO)) {
241: LOGGER.info("modifying att# " + i + " to "
242: + currAtt);
243: }
244:
245: queryData.write(i, currAtt);
246: }
247: }
248: } catch (IOException ioe) {
249: String message = "problem modifying row";
250:
251: if (queryData.getTransaction() != Transaction.AUTO_COMMIT) {
252: queryData.getTransaction().rollback();
253: message += "(transaction canceled)";
254: }
255:
256: throw ioe;
257: }
258: queryData.updateRow();
259: }
260:
261: /**
262: * Inserts a feature into the database.
263: *
264: * <p>
265: * This method should both insert a Feature, and update its FID in case the
266: * FIDMapper works over database generated ids like autoincrement fields,
267: * sequences, and object ids.
268: * </p>
269: *
270: * <p>
271: * Postgis needs to do this seperately. With updates it can just override
272: * the geometry stuff, using a direct sql update statement, but for
273: * inserts it can't update a row that doesn't exist yet.
274: * </p>
275: *
276: * @param mutable
277: *
278: * @throws IOException
279: * @throws SQLException
280: */
281: protected void doInsert(MutableFIDFeature mutable)
282: throws IOException, SQLException {
283: queryData.startInsert();
284:
285: // primary key generation
286: FIDMapper mapper = queryData.getMapper();
287:
288: // read the new fid into the Feature
289: Set autoincrementColumns = null;
290: if ((mapper.getColumnCount() > 0)
291: && !mapper.returnFIDColumnsAsAttributes()) {
292: autoincrementColumns = Collections.EMPTY_SET;
293: String ID = mapper.createID(queryData.getConnection(),
294: mutable, null);
295: fidAttributes = mapper.getPKAttributes(ID);
296:
297: if (fidAttributes != null) {
298: mutable.setID(ID);
299:
300: for (int i = 0; i < fidAttributes.length; i++) {
301: Object fidAttribute = fidAttributes[i];
302:
303: // if a column is of type auto increment, we should not update it
304: if (!mapper.isAutoIncrement(i)) {
305: queryData.writeFidColumn(i, fidAttribute);
306: }
307: }
308: }
309: } else {
310: autoincrementColumns = new HashSet();
311: for (int i = 0; i < mapper.getColumnCount(); i++) {
312: if (mapper.isAutoIncrement(i)) {
313: autoincrementColumns.add(mapper.getColumnName(i));
314: }
315: }
316: }
317:
318: // set up attributes and write row
319: for (int i = 0; i < current.getNumberOfAttributes(); i++) {
320: Object currAtt = current.getAttribute(i);
321: String attName = current.getFeatureType().getAttributeType(
322: i).getName();
323: if (!autoincrementColumns.contains(attName))
324: queryData.write(i, currAtt);
325: }
326:
327: queryData.doInsert();
328:
329: // should the ID be generated during an insert, we need to read it back
330: // and set it into the feature
331: if (((mapper.getColumnCount() > 0) && mapper
332: .hasAutoIncrementColumns())) {
333: fidAttributes = new Object[mapper.getColumnCount()];
334:
335: for (int i = 0; i < fidAttributes.length; i++) {
336: fidAttributes[i] = queryData.readFidColumn(i);
337: }
338:
339: mutable.setID(mapper.getID(fidAttributes));
340: }
341: }
342:
343: /**
344: * @see org.geotools.data.FeatureWriter#hasNext()
345: */
346: public boolean hasNext() throws IOException {
347: if (queryData.isClosed()) {
348: throw new IOException("Feature writer is closed");
349: }
350:
351: return reader.hasNext();
352: }
353:
354: /**
355: * @see org.geotools.data.FeatureWriter#close()
356: */
357: public void close() throws IOException {
358: //changed this from throwing an exception if already closed to just
359: //issuing a warning. Mysql was having trouble with this, but I see
360: //no great harm in not throwing an exception, since this will only
361: //be in clean-up.
362: if (queryData.isClosed()) {
363: LOGGER
364: .warning("Feature writer calling close when queryData is "
365: + " already closed");
366: } else {
367: reader.close();
368: }
369: }
370:
371: }
|