001: /*
002: * Geotools2 - OpenSource mapping toolkit
003: * http://geotools.org
004: * (C) 2002-2006, Geotools Project Managment Committee (PMC)
005: *
006: * This library is free software; you can redistribute it and/or
007: * modify it under the terms of the GNU Lesser General Public
008: * License as published by the Free Software Foundation;
009: * version 2.1 of the License.
010: *
011: * This library is distributed in the hope that it will be useful,
012: * but WITHOUT ANY WARRANTY; without even the implied warranty of
013: * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
014: * Lesser General Public License for more details.
015: *
016: */
017: package org.geotools.arcsde.data;
018:
019: import java.io.IOException;
020: import java.util.ArrayList;
021: import java.util.Calendar;
022: import java.util.Date;
023: import java.util.List;
024: import java.util.logging.Level;
025: import java.util.logging.Logger;
026:
027: import org.geotools.arcsde.pool.ArcSDEPooledConnection;
028: import org.geotools.arcsde.pool.UnavailableArcSDEConnectionException;
029: import org.geotools.data.DataSourceException;
030: import org.geotools.data.FeatureWriter;
031: import org.geotools.factory.CommonFactoryFinder;
032: import org.geotools.feature.AttributeType;
033: import org.geotools.feature.DefaultFeatureType;
034: import org.geotools.feature.Feature;
035: import org.geotools.feature.FeatureType;
036: import org.geotools.feature.IllegalAttributeException;
037: import org.opengis.filter.FilterFactory;
038:
039: import com.esri.sde.sdk.client.SeColumnDefinition;
040: import com.esri.sde.sdk.client.SeConnection;
041: import com.esri.sde.sdk.client.SeCoordinateReference;
042: import com.esri.sde.sdk.client.SeDelete;
043: import com.esri.sde.sdk.client.SeException;
044: import com.esri.sde.sdk.client.SeInsert;
045: import com.esri.sde.sdk.client.SeLayer;
046: import com.esri.sde.sdk.client.SeObjectId;
047: import com.esri.sde.sdk.client.SeRow;
048: import com.esri.sde.sdk.client.SeShape;
049: import com.esri.sde.sdk.client.SeTable;
050: import com.esri.sde.sdk.client.SeUpdate;
051: import com.vividsolutions.jts.geom.Geometry;
052:
053: /**
054: * Implementation fo the FeatureWriter interface for use with the
055: * ArcSDEDataStore class.
056: *
057: * @author Jake Fear, jfear@polexis.com
058: * @source $URL: http://svn.geotools.org/geotools/tags/2.4.1/modules/unsupported/arcsde/datastore/src/main/java/org/geotools/arcsde/data/ArcSDEFeatureWriter.java $
059: * @version
060: */
061: class ArcSDEFeatureWriter implements FeatureWriter {
062: private static final Logger LOGGER = org.geotools.util.logging.Logging
063: .getLogger(ArcSDEFeatureWriter.class.getPackage().getName());
064:
065: private ArcSDEDataStore dataStore;
066:
067: private FIDReader fidStrategy;
068:
069: private ArcTransactionState transactionState;
070:
071: private SeLayer layer;
072:
073: private SeColumnDefinition[] columnDefinitions;
074:
075: private List features;
076:
077: // Pointer into the current List of features
078: private int currentIndex;
079:
080: // Indicates that the current has not yet been added to the database
081: // when the values is true.
082: private boolean notInserted;
083:
084: // Because not all attributes are mutable we use
085: // these two variables to indicate the column name
086: // and FeatureType attribute index of the values
087: // that are mutable. We consult the state of these
088: // arrays before we attempt to insert a row of data.
089: private String[] columns;
090:
091: // Used to create "pointers" to attributes that are mutable.
092: private Integer[] mutableAttributeIndexes;
093:
094: /**
095: * the 'id' column for this featureType
096: */
097: private String rowIdColumnName;
098:
099: private FilterFactory ff = CommonFactoryFinder
100: .getFilterFactory(null);
101:
102: /**
103: * Creates a new ArcSDEFeatureWriter.
104: *
105: * @param store
106: * @param state
107: * DOCUMENT ME!
108: * @param layer
109: * @param features
110: */
111: public ArcSDEFeatureWriter(ArcSDEDataStore store,
112: FIDReader fidStrategy, ArcTransactionState state,
113: SeLayer layer, List features) throws DataSourceException {
114: transactionState = state;
115: this .fidStrategy = fidStrategy;
116:
117: if (features != null) {
118: this .features = features;
119: } else {
120: this .features = new ArrayList();
121: }
122:
123: this .dataStore = store;
124: this .layer = layer;
125: this .rowIdColumnName = fidStrategy.getFidColumn();
126: this .currentIndex = -1;
127: }
128:
129: /**
130: * Creates a new ArcSDEFeatureWriter object.
131: *
132: * @param store
133: * DOCUMENT ME!
134: * @param state
135: * DOCUMENT ME!
136: * @param layer
137: * DOCUMENT ME!
138: */
139: public ArcSDEFeatureWriter(ArcSDEDataStore store,
140: FIDReader fidStrategy, ArcTransactionState state,
141: SeLayer layer) throws DataSourceException {
142: this (store, fidStrategy, state, layer, null);
143: }
144:
145: /**
146: * Provides the <code>FeatureType</code> that is acceptable for features
147: * handled by this <code>FeatureWriter</code>
148: *
149: * @return DOCUMENT ME!
150: *
151: * @throws RuntimeException
152: * DOCUMENT ME!
153: */
154: public FeatureType getFeatureType() {
155: try {
156: return ArcSDEAdapter.fetchSchema(this .dataStore
157: .getConnectionPool(),
158: this .layer.getQualifiedName(), this .dataStore
159: .getNamespace());
160: } catch (SeException e) {
161: LOGGER.log(Level.WARNING, e.getMessage(), e);
162: throw new RuntimeException(e.getMessage());
163: } catch (IOException e) {
164: LOGGER.log(Level.WARNING, e.getMessage(), e);
165: throw new RuntimeException(e.getMessage());
166: }
167: }
168:
169: /**
170: * Implements an operation similar to next in java.util.Iterator. This
171: * allows the caller to iterate over features obtained from the backing
172: * store and to instantiate new features and add them to the layer in the
173: * backing store.
174: *
175: * @return If no 'next feature' is available then a new feature object is
176: * created and returned to the caller so that they may modify its
177: * contents and later write it to the backing store.
178: *
179: * @throws IOException
180: */
181: public synchronized Feature next() throws IOException {
182: Feature feature;
183:
184: if (!hasNext()) {
185: // In this case we must instantiate a new feature and add it
186: // to our internal list, thus allowing it to be modified...
187: // It is not clear that this cast will always be safe, but it seems
188: // to
189: // be a safe approach for the default implementation provided.
190: DefaultFeatureType featureType = (DefaultFeatureType) getFeatureType();
191: Object[] attributes = new Object[featureType
192: .getAttributeCount()];
193:
194: try {
195: feature = featureType.create(attributes);
196: } catch (IllegalAttributeException iae) {
197: LOGGER.log(Level.WARNING, iae.getMessage(), iae);
198: throw new IOException(iae.getMessage());
199: }
200:
201: this .features.add(feature);
202: this .currentIndex++;
203: this .notInserted = true;
204: } else {
205: // Simply return the next feature in the list...
206: feature = (Feature) this .features.get(++this .currentIndex);
207: }
208:
209: return feature;
210: }
211:
212: /**
213: * Removes the current feature from the backing store.
214: *
215: * @throws IOException
216: * If there are no features or if the backing store throws an
217: * exception.
218: */
219: public synchronized void remove() throws IOException {
220: if ((this .features == null)
221: || (this .currentIndex >= this .features.size())) {
222: throw new IOException("No current feature available.");
223: }
224:
225: if (this .notInserted) {
226: this .features.remove(this .currentIndex--);
227: this .notInserted = false;
228: } else {
229: Feature feature = (Feature) this .features
230: .get(this .currentIndex);
231: ArcSDEPooledConnection connection = null;
232:
233: try {
234: connection = getConnection();
235:
236: SeDelete seDelete = new SeDelete(connection);
237:
238: long featureId = ArcSDEAdapter.getNumericFid(ff
239: .featureId(feature.getID()));
240: SeObjectId objectID = new SeObjectId(featureId);
241: seDelete.byId(this .layer.getQualifiedName(), objectID);
242: //this.dataStore.fireRemoved(feature);
243: } catch (Exception e) {
244: LOGGER.log(Level.SEVERE, e.getMessage(), e);
245: throw new IOException(e.getMessage());
246: } finally {
247: releaseConnection(connection);
248: }
249: }
250: }
251:
252: /**
253: * Writes the feature at the current index to the backing store. If this
254: * feature is not yet in the backing store it will be inserted.
255: *
256: * @throws IOException
257: * In the case that it is not possible to write the feature to
258: * the backing store because it either does not exist or the
259: * backing store throws its own exception that indicates an
260: * error.
261: */
262: public synchronized void write() throws IOException {
263: if ((this .features == null) || (this .features.size() == 0)) {
264: throw new IOException("No feature to be written.");
265: }
266:
267: ArcSDEPooledConnection connection = null;
268:
269: try {
270: Feature feature = (Feature) this .features
271: .get(this .currentIndex);
272: FeatureType featureType = feature.getFeatureType();
273: AttributeType[] attributeTypes = featureType
274: .getAttributeTypes();
275: connection = getConnection();
276:
277: if (this .notInserted) {
278: // We must insert the record into ArcSDE
279: SeInsert insert = new SeInsert(connection);
280: String[] cols = getColumns(attributeTypes, connection);
281: insert.intoTable(this .layer.getQualifiedName(), cols);
282: insert.setWriteMode(true);
283:
284: SeRow row = insert.getRowToSet();
285:
286: // Now set the values for the new row here...
287: for (int i = 0; i < cols.length; i++) {
288: setRowValue(
289: row,
290: i,
291: feature
292: .getAttribute(this .mutableAttributeIndexes[i]
293: .intValue()));
294: }
295:
296: // Now "commit" the changes.
297: insert.execute();
298: insert.close();
299: //this.dataStore.fireAdded(feature);
300: } else {
301: // The record is already inserted, so we will be updating
302: // the values associated with the given record.
303: SeUpdate update = new SeUpdate(connection);
304: String[] cols = getColumns(attributeTypes, connection);
305: String featureId = feature.getID().substring(
306: feature.getID().lastIndexOf('.') + 1,
307: feature.getID().length());
308: update.toTable(this .layer.getQualifiedName(), cols,
309: this .rowIdColumnName + " = " + featureId);
310: update.setWriteMode(true);
311:
312: SeRow row = update.getRowToSet();
313:
314: // Set values on rows here.....
315: for (int i = 0; i < cols.length; i++) {
316: Object value = feature
317: .getAttribute(this .mutableAttributeIndexes[i]
318: .intValue());
319: setRowValue(row, i, value);
320: }
321:
322: update.execute();
323: update.close();
324:
325: }
326: } catch (Exception e) {
327:
328: LOGGER.log(Level.WARNING, e.getMessage(), e);
329: if (LOGGER.isLoggable(Level.FINE)) {
330: e.printStackTrace();
331: }
332: throw new DataSourceException(e.getMessage(), e);
333: } finally {
334: releaseConnection(connection);
335: }
336: }
337:
338: /**
339: * Indicates whether or not this <code>FeatureWriter</code> contains more
340: * feature instances or not.
341: *
342: * @return true if the next call to <code>next()</code> will return an
343: * already existing feature from the backing store.
344: *
345: * @throws IOException
346: */
347: public boolean hasNext() throws IOException {
348: int size = this .features.size();
349:
350: return ((this .features != null) && (size > 0) && ((this .currentIndex + 1) < size));
351: }
352:
353: /**
354: * DOCUMENT ME!
355: *
356: * @throws IOException
357: * DOCUMENT ME!
358: */
359: public void close() throws IOException {
360: this .dataStore = null;
361: this .layer = null;
362: this .features = null;
363: this .currentIndex = 0;
364: }
365:
366: /**
367: * DOCUMENT ME!
368: *
369: * @param attributeTypes
370: * DOCUMENT ME!
371: * @param connection
372: * DOCUMENT ME!
373: *
374: * @return DOCUMENT ME!
375: *
376: * @throws SeException
377: * DOCUMENT ME!
378: */
379: private synchronized String[] getColumns(
380: AttributeType[] attributeTypes, SeConnection connection)
381: throws SeException {
382: if (this .columnDefinitions == null) {
383: SeTable table = new SeTable(connection, this .layer
384: .getQualifiedName());
385:
386: // We are going to inspect the column defintions in order to
387: // determine which attributes are actually mutable...
388: this .columnDefinitions = table.describe();
389:
390: ArrayList columnList = new ArrayList();
391: ArrayList indexes = new ArrayList();
392:
393: for (int i = 0; i < attributeTypes.length; i++) {
394: if (attributeTypes[i]
395: .getName()
396: .equals(
397: this .layer
398: .getShapeAttributeName(SeLayer.SE_SHAPE_ATTRIBUTE_FID))) {
399: //this is an attribute added to the featuretype
400: //solely to support FIDs. It isn't an actual attribute
401: //on the underlying SDE table, and as such it can't
402: //be written to. Skip it!
403: continue;
404: }
405: // We need to exclude read only types from the set of "mutable"
406: // column names. See the ArcSDE documentation for the
407: // explanation
408: // of "1", if they provided a symbolic constant I would use
409: // it...
410: // As it is, I think this is easier to understand along with
411: // their
412: // documentation. 1 indicates an ArcSDE managed field.
413: if (this .columnDefinitions[i].getRowIdType() != 1) {
414: columnList.add(attributeTypes[i].getName()
415: .toUpperCase());
416: indexes.add(new Integer(i));
417: }
418: }
419:
420: this .columns = new String[columnList.size()];
421: this .mutableAttributeIndexes = new Integer[indexes.size()];
422: columnList.toArray(this .columns);
423: indexes.toArray(this .mutableAttributeIndexes);
424: }
425:
426: return this .columns;
427: }
428:
429: /**
430: * Used to set a value on an SeRow object. The values is converted to the
431: * appropriate type based on an inspection of the SeColumnDefintion object.
432: *
433: * @param row
434: * @param index
435: * @param value
436: *
437: * @throws SeException
438: * DOCUMENT ME!
439: * @throws IOException
440: * DOCUMENT ME!
441: */
442: private void setRowValue(SeRow row, int index, Object value)
443: throws SeException, IOException {
444: SeColumnDefinition seColumnDefinition = null;
445: seColumnDefinition = row.getColumnDef(index);
446:
447: final int colType = seColumnDefinition.getType();
448:
449: if (colType == SeColumnDefinition.TYPE_INTEGER) {
450: if (value != null) {
451: row.setInteger(index, new Integer(value.toString()));
452: } else {
453: row.setInteger(index, null);
454: }
455: } else if (colType == SeColumnDefinition.TYPE_SMALLINT) {
456: if (value != null) {
457: row.setShort(index, new Short(value.toString()));
458: } else {
459: row.setShort(index, null);
460: }
461: } else if (colType == SeColumnDefinition.TYPE_FLOAT) {
462: if (value != null) {
463: row.setFloat(index, new Float(value.toString()));
464: } else {
465: row.setFloat(index, null);
466: }
467: } else if (colType == SeColumnDefinition.TYPE_DOUBLE) {
468: if (value != null) {
469: row.setDouble(index, new Double(value.toString()));
470: } else {
471: row.setDouble(index, null);
472: }
473: } else if (colType == SeColumnDefinition.TYPE_STRING) {
474: if (value != null) {
475: row.setString(index, value.toString());
476: } else {
477: row.setString(index, null);
478: }
479: } else if (colType == SeColumnDefinition.TYPE_DATE) {
480: if (value != null) {
481: Calendar calendar = Calendar.getInstance();
482: calendar.setTime((Date) value);
483: row.setTime(index, calendar);
484: } else {
485: row.setTime(index, null);
486: }
487: } else if (colType == SeColumnDefinition.TYPE_SHAPE) {
488: if (value != null) {
489: try {
490: ArcSDEGeometryBuilder geometryBuilder = ArcSDEGeometryBuilder
491: .builderFor(value.getClass());
492: SeCoordinateReference coordRef = this .layer
493: .getCoordRef();
494: Geometry geom = (Geometry) value;
495: SeShape shape = geometryBuilder.constructShape(
496: geom, coordRef);
497: row.setShape(index, shape);
498: } catch (Exception e) {
499: String msg = e instanceof SeException ? ((SeException) e)
500: .getSeError().getErrDesc()
501: : e.getMessage();
502: LOGGER.log(Level.WARNING, msg, e);
503: throw new DataSourceException(msg, e);
504: }
505: } else {
506: row.setShape(index, null);
507: }
508: }
509: }
510:
511: /**
512: * DOCUMENT ME!
513: *
514: * @return DOCUMENT ME!
515: *
516: * @throws DataSourceException
517: * DOCUMENT ME!
518: * @throws UnavailableArcSDEConnectionException
519: * DOCUMENT ME!
520: */
521: private synchronized ArcSDEPooledConnection getConnection()
522: throws DataSourceException,
523: UnavailableArcSDEConnectionException {
524: if (this .transactionState != null) {
525: return this .transactionState.getConnection();
526: }
527: return this .dataStore.getConnectionPool().getConnection();
528:
529: }
530:
531: /**
532: * DOCUMENT ME!
533: *
534: * @param connection
535: */
536: private synchronized void releaseConnection(
537: ArcSDEPooledConnection connection) {
538: if (this .transactionState != null) {
539: // NO-OP, the transactionState object will release the connection
540: // after it commits or rollsback the operations.
541: } else {
542: connection.close();
543: }
544: }
545: }
|