Source Code Cross Referenced for DeleteHandler.java in  » GIS » deegree » org » deegree » io » datastore » sql » transaction » delete » Java Source Code / Java DocumentationJava Source Code and Java Documentation

Java Source Code / Java Documentation
1. 6.0 JDK Core
2. 6.0 JDK Modules
3. 6.0 JDK Modules com.sun
4. 6.0 JDK Modules com.sun.java
5. 6.0 JDK Modules sun
6. 6.0 JDK Platform
7. Ajax
8. Apache Harmony Java SE
9. Aspect oriented
10. Authentication Authorization
11. Blogger System
12. Build
13. Byte Code
14. Cache
15. Chart
16. Chat
17. Code Analyzer
18. Collaboration
19. Content Management System
20. Database Client
21. Database DBMS
22. Database JDBC Connection Pool
23. Database ORM
24. Development
25. EJB Server geronimo
26. EJB Server GlassFish
27. EJB Server JBoss 4.2.1
28. EJB Server resin 3.1.5
29. ERP CRM Financial
30. ESB
31. Forum
32. GIS
33. Graphic Library
34. Groupware
35. HTML Parser
36. IDE
37. IDE Eclipse
38. IDE Netbeans
39. Installer
40. Internationalization Localization
41. Inversion of Control
42. Issue Tracking
43. J2EE
44. JBoss
45. JMS
46. JMX
47. Library
48. Mail Clients
49. Net
50. Parser
51. PDF
52. Portal
53. Profiler
54. Project Management
55. Report
56. RSS RDF
57. Rule Engine
58. Science
59. Scripting
60. Search Engine
61. Security
62. Sevlet Container
63. Source Control
64. Swing Library
65. Template Engine
66. Test Coverage
67. Testing
68. UML
69. Web Crawler
70. Web Framework
71. Web Mail
72. Web Server
73. Web Services
74. Web Services apache cxf 2.0.1
75. Web Services AXIS2
76. Wiki Engine
77. Workflow Engines
78. XML
79. XML UI
Java
Java Tutorial
Java Open Source
Jar File Download
Java Articles
Java Products
Java by API
Photoshop Tutorials
Maya Tutorials
Flash Tutorials
3ds-Max Tutorials
Illustrator Tutorials
GIMP Tutorials
C# / C Sharp
C# / CSharp Tutorial
C# / CSharp Open Source
ASP.Net
ASP.NET Tutorial
JavaScript DHTML
JavaScript Tutorial
JavaScript Reference
HTML / CSS
HTML CSS Reference
C / ANSI-C
C Tutorial
C++
C++ Tutorial
Ruby
PHP
Python
Python Tutorial
Python Open Source
SQL Server / T-SQL
SQL Server / T-SQL Tutorial
Oracle PL / SQL
Oracle PL/SQL Tutorial
PostgreSQL
SQL / MySQL
MySQL Tutorial
VB.Net
VB.Net Tutorial
Flash / Flex / ActionScript
VBA / Excel / Access / Word
XML
XML Tutorial
Microsoft Office PowerPoint 2007 Tutorial
Microsoft Office Excel 2007 Tutorial
Microsoft Office Word 2007 Tutorial
Java Source Code / Java Documentation » GIS » deegree » org.deegree.io.datastore.sql.transaction.delete 
Source Cross Referenced  Class Diagram Java Document (Java Doc) 


001:        //$HeadURL: https://svn.wald.intevation.org/svn/deegree/base/trunk/src/org/deegree/io/datastore/sql/transaction/delete/DeleteHandler.java $
002:        /*----------------    FILE HEADER  ------------------------------------------
003:
004:         This file is part of deegree.
005:         Copyright (C) 2001-2008 by:
006:         EXSE, Department of Geography, University of Bonn
007:         http://www.giub.uni-bonn.de/deegree/
008:         lat/lon GmbH
009:         http://www.lat-lon.de
010:
011:         This library is free software; you can redistribute it and/or
012:         modify it under the terms of the GNU Lesser General Public
013:         License as published by the Free Software Foundation; either
014:         version 2.1 of the License, or (at your option) any later version.
015:
016:         This library is distributed in the hope that it will be useful,
017:         but WITHOUT ANY WARRANTY; without even the implied warranty of
018:         MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
019:         Lesser General Public License for more details.
020:
021:         You should have received a copy of the GNU Lesser General Public
022:         License along with this library; if not, write to the Free Software
023:         Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
024:
025:         Contact:
026:
027:         Andreas Poth
028:         lat/lon GmbH
029:         Aennchenstraße 19
030:         53177 Bonn
031:         Germany
032:         E-Mail: poth@lat-lon.de
033:
034:         Prof. Dr. Klaus Greve
035:         Department of Geography
036:         University of Bonn
037:         Meckenheimer Allee 166
038:         53115 Bonn
039:         Germany
040:         E-Mail: greve@giub.uni-bonn.de
041:        
042:         ---------------------------------------------------------------------------*/
043:        package org.deegree.io.datastore.sql.transaction.delete;
044:
045:        import java.sql.Connection;
046:        import java.sql.PreparedStatement;
047:        import java.sql.ResultSet;
048:        import java.sql.SQLException;
049:        import java.util.ArrayList;
050:        import java.util.Collection;
051:        import java.util.List;
052:
053:        import org.deegree.datatypes.Types;
054:        import org.deegree.framework.log.ILogger;
055:        import org.deegree.framework.log.LoggerFactory;
056:        import org.deegree.i18n.Messages;
057:        import org.deegree.io.datastore.Datastore;
058:        import org.deegree.io.datastore.DatastoreException;
059:        import org.deegree.io.datastore.FeatureId;
060:        import org.deegree.io.datastore.schema.MappedFeaturePropertyType;
061:        import org.deegree.io.datastore.schema.MappedFeatureType;
062:        import org.deegree.io.datastore.schema.MappedGMLSchema;
063:        import org.deegree.io.datastore.schema.MappedPropertyType;
064:        import org.deegree.io.datastore.schema.TableRelation;
065:        import org.deegree.io.datastore.schema.content.MappingField;
066:        import org.deegree.io.datastore.sql.AbstractRequestHandler;
067:        import org.deegree.io.datastore.sql.StatementBuffer;
068:        import org.deegree.io.datastore.sql.TableAliasGenerator;
069:        import org.deegree.io.datastore.sql.transaction.SQLTransaction;
070:        import org.deegree.io.datastore.sql.transaction.UpdateHandler;
071:        import org.deegree.model.feature.schema.FeatureType;
072:        import org.deegree.model.feature.schema.PropertyType;
073:        import org.deegree.model.filterencoding.Filter;
074:        import org.deegree.ogcwebservices.wfs.operation.transaction.Delete;
075:        import org.deegree.ogcwebservices.wfs.operation.transaction.Transaction;
076:
077:        /**
078:         * Handler for {@link Delete} operations (which usually occur as parts of {@link Transaction}
079:         * requests).
080:         * <p>
081:         * When a {@link Delete} operation is performed, the following actions are taken:
082:         * <ul>
083:         * <li>the {@link FeatureId}s of all (root) feature instances that match the associated
084:         * {@link Filter} are determined</li>
085:         * <li>the {@link FeatureGraph} is built in order to determine which features may be deleted
086:         * without removing subfeatures of independent features</li>
087:         * <li>the {@link TableGraph} is built that contains explicit information on all table rows that
088:         * have to be deleted (and their dependencies)</li>
089:         * <li>the {@link TableNode}s of the {@link TableGraph} are sorted in topological order, i.e. they
090:         * may be deleted in that order without violating any foreign key constraints</li>
091:         * </ul>
092:         * 
093:         * @see FeatureGraph
094:         * @see TableGraph
095:         * 
096:         * @author <a href="mailto:schneider@lat-lon.de">Markus Schneider</a>
097:         * @author last edited by: $Author: apoth $
098:         * 
099:         * @version $Revision: 9342 $, $Date: 2007-12-27 04:32:57 -0800 (Thu, 27 Dec 2007) $
100:         */
101:        public class DeleteHandler extends AbstractRequestHandler {
102:
103:            private static final ILogger LOG = LoggerFactory
104:                    .getLogger(DeleteHandler.class);
105:
106:            private String lockId;
107:
108:            /**
109:             * Creates a new <code>DeleteHandler</code> from the given parameters.
110:             * 
111:             * @param dsTa
112:             * @param aliasGenerator
113:             * @param conn
114:             * @param lockId
115:             *            optional id of associated lock (may be null)
116:             */
117:            public DeleteHandler(SQLTransaction dsTa,
118:                    TableAliasGenerator aliasGenerator, Connection conn,
119:                    String lockId) {
120:                super (dsTa.getDatastore(), aliasGenerator, conn);
121:                this .lockId = lockId;
122:            }
123:
124:            /**
125:             * Deletes the features from the {@link Datastore} that have a certain type and are matched by
126:             * the given filter.
127:             * 
128:             * @param ft
129:             *            non-abstract feature type of the features to be deleted
130:             * @param filter
131:             *            constraints the feature instances to be deleted
132:             * @return number of deleted feature instances
133:             * @throws DatastoreException
134:             */
135:            public int performDelete(MappedFeatureType ft, Filter filter)
136:                    throws DatastoreException {
137:
138:                assert !ft.isAbstract();
139:
140:                if (!ft.isDeletable()) {
141:                    String msg = Messages.getMessage(
142:                            "DATASTORE_FT_NOT_DELETABLE", ft.getName());
143:                    throw new DatastoreException(msg);
144:                }
145:
146:                List<FeatureId> fids = determineAffectedAndModifiableFIDs(ft,
147:                        filter, this .lockId);
148:
149:                if (LOG.getLevel() == ILogger.LOG_DEBUG) {
150:                    LOG.logDebug("Affected fids:");
151:                    for (FeatureId fid : fids) {
152:                        LOG.logDebug("" + fid);
153:                    }
154:                }
155:
156:                FeatureGraph featureGraph = new FeatureGraph(fids, this );
157:                TableGraph tableGraph = new TableGraph(featureGraph, this );
158:
159:                if (LOG.getLevel() == ILogger.LOG_DEBUG) {
160:                    LOG.logDebug("FeatureGraph: " + featureGraph);
161:                    LOG.logDebug("TableGraph: " + tableGraph);
162:                }
163:
164:                List<TableNode> sortedNodes = tableGraph
165:                        .getNodesInTopologicalOrder();
166:                for (TableNode node : sortedNodes) {
167:                    boolean delete = true;
168:                    if (node.isDeleteVetoPossible()) {
169:                        List<TableNode> referencingRows = getReferencingRows(node);
170:                        if (referencingRows.size() > 0) {
171:                            delete = false;
172:                            LOG.logDebug("Skipping delete of " + node + ": "
173:                                    + referencingRows.size()
174:                                    + " reference(s) exist.");
175:                            for (TableNode referencingNode : referencingRows) {
176:                                LOG.logDebug("Referenced by: "
177:                                        + referencingNode);
178:                            }
179:                        }
180:                    }
181:                    if (delete) {
182:                        performDelete(node);
183:                    }
184:                }
185:
186:                int deletedFeatures = tableGraph.getDeletableRootFeatureCount();
187:
188:                if (deletedFeatures != fids.size()) {
189:                    String msg = Messages
190:                            .getMessage("DATASTORE_COULD_NOT_DELETE_ALL");
191:                    LOG.logInfo(msg);
192:                }
193:
194:                // return count of actually deleted (root) features
195:                return deletedFeatures;
196:            }
197:
198:            /**
199:             * Deletes the table entry from the SQL database that is represented by the given
200:             * {@link TableNode}.
201:             * 
202:             * @param node
203:             * @throws DatastoreException
204:             */
205:            private void performDelete(TableNode node)
206:                    throws DatastoreException {
207:
208:                StatementBuffer query = new StatementBuffer();
209:                query.append("DELETE FROM ");
210:                query.append(node.getTable());
211:                query.append(" WHERE ");
212:                boolean first = true;
213:                for (KeyColumn column : node.getKeyColumns()) {
214:                    if (first) {
215:                        first = false;
216:                    } else {
217:                        query.append(" AND ");
218:                    }
219:                    query.append(column.getName());
220:                    query.append("=?");
221:                    query.addArgument(column.getValue(), column.getTypeCode());
222:                }
223:
224:                PreparedStatement stmt = null;
225:                try {
226:                    stmt = this .datastore.prepareStatement(conn, query);
227:                    LOG.logDebug("Deleting row: " + query);
228:                    stmt.execute();
229:                } catch (SQLException e) {
230:                    String msg = "Error performing delete '" + query + "': "
231:                            + e.getMessage();
232:                    LOG.logInfo(msg, e);
233:                    throw new DatastoreException(msg);
234:                } finally {
235:                    if (stmt != null) {
236:                        try {
237:                            stmt.close();
238:                        } catch (SQLException e) {
239:                            String msg = "Error closing statement: "
240:                                    + e.getMessage();
241:                            LOG.logError(msg, e);
242:                        }
243:                    }
244:                }
245:            }
246:
247:            /**
248:             * Adds nodes to the given {@link TableNode} that represent the simple/geometry properties in
249:             * the property table attached by the given {@link TableRelation}.
250:             * 
251:             * @param fid
252:             *            id of the feature that owns the properties
253:             * @param relation
254:             *            describes how the property table is joined to the feature table
255:             * @throws DatastoreException
256:             */
257:            List<TableNode> determinePropNodes(FeatureId fid,
258:                    TableRelation relation) throws DatastoreException {
259:
260:                List<TableNode> propEntries = new ArrayList<TableNode>();
261:
262:                this .aliasGenerator.reset();
263:                String fromAlias = this .aliasGenerator.generateUniqueAlias();
264:                String toAlias = this .aliasGenerator.generateUniqueAlias();
265:                MappingField[] fromFields = relation.getFromFields();
266:                MappingField[] toFields = relation.getToFields();
267:
268:                StatementBuffer query = new StatementBuffer();
269:                query.append("SELECT DISTINCT ");
270:                for (int i = 0; i < toFields.length; i++) {
271:                    query.append(toAlias);
272:                    query.append(".");
273:                    query.append(toFields[i].getField());
274:                    if (i != toFields.length - 1) {
275:                        query.append(',');
276:                    }
277:                }
278:                query.append(" FROM ");
279:                query.append(fid.getFeatureType().getTable());
280:                query.append(" ");
281:                query.append(fromAlias);
282:                query.append(" INNER JOIN ");
283:                query.append(relation.getToTable());
284:                query.append(" ");
285:                query.append(toAlias);
286:                query.append(" ON ");
287:                for (int j = 0; j < fromFields.length; j++) {
288:                    query.append(fromAlias);
289:                    query.append('.');
290:                    query.append(fromFields[j].getField());
291:                    query.append('=');
292:                    query.append(toAlias);
293:                    query.append('.');
294:                    query.append(toFields[j].getField());
295:                }
296:                query.append(" WHERE ");
297:                appendFeatureIdConstraint(query, fid, fromAlias);
298:
299:                PreparedStatement stmt = null;
300:                ResultSet rs = null;
301:                try {
302:                    stmt = this .datastore.prepareStatement(conn, query);
303:                    LOG.logDebug("Performing: " + query);
304:                    rs = stmt.executeQuery();
305:                    while (rs.next()) {
306:                        Collection<KeyColumn> keyColumns = new ArrayList<KeyColumn>();
307:                        for (int i = 0; i < toFields.length; i++) {
308:                            KeyColumn column = new KeyColumn(toFields[i]
309:                                    .getField(), toFields[i].getType(), rs
310:                                    .getObject(i + 1));
311:                            keyColumns.add(column);
312:                        }
313:                        TableNode propEntry = new TableNode(relation
314:                                .getToTable(), keyColumns);
315:                        propEntries.add(propEntry);
316:                    }
317:                } catch (SQLException e) {
318:                    LOG.logInfo(e.getMessage(), e);
319:                    throw new DatastoreException(
320:                            "Error in addPropertyNodes(): " + e.getMessage());
321:                } finally {
322:                    try {
323:                        if (rs != null) {
324:                            try {
325:                                rs.close();
326:                            } catch (SQLException e) {
327:                                LOG.logError("Error closing result set: '"
328:                                        + e.getMessage() + "'.", e);
329:                            }
330:                        }
331:                    } finally {
332:                        if (stmt != null) {
333:                            try {
334:                                stmt.close();
335:                            } catch (SQLException e) {
336:                                LOG.logError("Error closing statement: '"
337:                                        + e.getMessage() + "'.", e);
338:                            }
339:                        }
340:                    }
341:                }
342:                return propEntries;
343:            }
344:
345:            /**
346:             * Determines the row in the join table that connects a certain feature with a subfeature.
347:             * 
348:             * @param fid
349:             *            id of the (super-) feature
350:             * @param subFid
351:             *            id of the subfeature
352:             * @param relation1
353:             *            describes how the join table is attached
354:             * @param relation2
355:             *            describes how the subfeature table is joined
356:             * @throws DatastoreException
357:             */
358:            TableNode determineJTNode(FeatureId fid, FeatureId subFid,
359:                    TableRelation relation1, TableRelation relation2)
360:                    throws DatastoreException {
361:
362:                LOG.logDebug("Determining join table entry for feature " + fid
363:                        + " and subfeature " + subFid);
364:                TableNode jtEntry = null;
365:
366:                this .aliasGenerator.reset();
367:
368:                String featureTableAlias = this .aliasGenerator
369:                        .generateUniqueAlias();
370:                String joinTableAlias = this .aliasGenerator
371:                        .generateUniqueAlias();
372:                String subFeatureTableAlias = this .aliasGenerator
373:                        .generateUniqueAlias();
374:
375:                MappingField[] fromFields = relation1.getFromFields();
376:                MappingField[] fromFields2 = relation2.getFromFields();
377:                MappingField[] toFields = relation1.getToFields();
378:                MappingField[] toFields2 = relation2.getToFields();
379:
380:                // need to select 'from' fields of second relation element as well
381:                MappingField[] selectFields = new MappingField[toFields.length
382:                        + fromFields2.length];
383:                for (int i = 0; i < toFields.length; i++) {
384:                    selectFields[i] = toFields[i];
385:                }
386:                for (int i = 0; i < fromFields2.length; i++) {
387:                    selectFields[i + toFields.length] = fromFields2[i];
388:                }
389:
390:                StatementBuffer query = new StatementBuffer();
391:                query.append("SELECT DISTINCT ");
392:                for (int i = 0; i < selectFields.length; i++) {
393:                    query.append(joinTableAlias);
394:                    query.append(".");
395:                    query.append(selectFields[i].getField());
396:                    if (i != selectFields.length - 1) {
397:                        query.append(',');
398:                    }
399:                }
400:                query.append(" FROM ");
401:                query.append(fid.getFeatureType().getTable());
402:                query.append(" ");
403:                query.append(featureTableAlias);
404:                query.append(" INNER JOIN ");
405:                query.append(relation1.getToTable());
406:                query.append(" ");
407:                query.append(joinTableAlias);
408:                query.append(" ON ");
409:                for (int j = 0; j < fromFields.length; j++) {
410:                    query.append(featureTableAlias);
411:                    query.append('.');
412:                    query.append(fromFields[j].getField());
413:                    query.append('=');
414:                    query.append(joinTableAlias);
415:                    query.append('.');
416:                    query.append(toFields[j].getField());
417:                }
418:                query.append(" INNER JOIN ");
419:                query.append(subFid.getFeatureType().getTable());
420:                query.append(" ");
421:                query.append(subFeatureTableAlias);
422:                query.append(" ON ");
423:                for (int j = 0; j < fromFields2.length; j++) {
424:                    query.append(joinTableAlias);
425:                    query.append('.');
426:                    query.append(fromFields2[j].getField());
427:                    query.append('=');
428:                    query.append(subFeatureTableAlias);
429:                    query.append('.');
430:                    query.append(toFields2[j].getField());
431:                }
432:
433:                query.append(" WHERE ");
434:                appendFeatureIdConstraint(query, fid, featureTableAlias);
435:                query.append(" AND ");
436:                appendFeatureIdConstraint(query, subFid, subFeatureTableAlias);
437:
438:                PreparedStatement stmt = null;
439:                ResultSet rs = null;
440:                try {
441:                    stmt = this .datastore.prepareStatement(conn, query);
442:                    LOG.logDebug("Determining join table row: " + query);
443:                    rs = stmt.executeQuery();
444:                    if (rs.next()) {
445:                        Collection<KeyColumn> keyColumns = new ArrayList<KeyColumn>(
446:                                selectFields.length);
447:                        for (int i = 0; i < selectFields.length; i++) {
448:                            KeyColumn column = new KeyColumn(selectFields[i]
449:                                    .getField(), selectFields[i].getType(), rs
450:                                    .getObject(i + 1));
451:                            keyColumns.add(column);
452:                        }
453:
454:                        if (subFid.getFeatureType().hasSeveralImplementations()) {
455:                            String localSubFtName = subFid.getFeatureType()
456:                                    .getName().getLocalName();
457:                            KeyColumn column = new KeyColumn(FT_COLUMN,
458:                                    Types.VARCHAR, localSubFtName);
459:                            keyColumns.add(column);
460:                        }
461:                        jtEntry = new TableNode(relation1.getToTable(),
462:                                keyColumns);
463:                    } else {
464:                        String msg = "This is impossible: No join table row between feature and subfeature!?";
465:                        throw new DatastoreException(msg);
466:                    }
467:                } catch (SQLException e) {
468:                    LOG.logInfo(e.getMessage(), e);
469:                    throw new DatastoreException("Error in determineJTNode(): "
470:                            + e.getMessage());
471:                } finally {
472:                    try {
473:                        if (rs != null) {
474:                            try {
475:                                rs.close();
476:                            } catch (SQLException e) {
477:                                LOG.logError("Error closing result set: '"
478:                                        + e.getMessage() + "'.", e);
479:                            }
480:                        }
481:                    } finally {
482:                        if (stmt != null) {
483:                            try {
484:                                stmt.close();
485:                            } catch (SQLException e) {
486:                                LOG.logError("Error closing statement: '"
487:                                        + e.getMessage() + "'.", e);
488:                            }
489:                        }
490:                    }
491:                }
492:                return jtEntry;
493:            }
494:
495:            /**
496:             * Delete orphaned rows in the specified property table (target table of the given
497:             * {@link TableRelation}).
498:             * <p>
499:             * Only used by the {@link UpdateHandler}.
500:             * 
501:             * @param relation
502:             * @param keyValues
503:             * @throws DatastoreException
504:             */
505:            public void deleteOrphanedPropertyRows(TableRelation relation,
506:                    Object[] keyValues) throws DatastoreException {
507:                Collection<KeyColumn> keyColumns = new ArrayList<KeyColumn>(
508:                        keyValues.length);
509:                for (int i = 0; i < keyValues.length; i++) {
510:                    KeyColumn keyColumn = new KeyColumn(
511:                            relation.getToFields()[i].getField(), relation
512:                                    .getToFields()[i].getType(), keyValues[i]);
513:                    keyColumns.add(keyColumn);
514:                }
515:                TableNode node = new TableNode(relation.getToTable(),
516:                        keyColumns);
517:                if (getReferencingRows(node).size() == 0) {
518:                    performDelete(node);
519:                }
520:            }
521:
522:            /**
523:             * Returns all table rows that reference the given table row ({@link TableNode}).
524:             * 
525:             * @param node
526:             * @return all table rows that reference the given table row
527:             * @throws DatastoreException
528:             */
529:            private List<TableNode> getReferencingRows(TableNode node)
530:                    throws DatastoreException {
531:
532:                List<TableNode> rows = new ArrayList<TableNode>();
533:                for (TableReference tableReference : getReferencingTables(node
534:                        .getTable())) {
535:                    rows.addAll(getReferencingRows(node, tableReference));
536:                }
537:                return rows;
538:            }
539:
540:            /**
541:             * Returns all stored rows (as {@link TableNode}s) that reference the given row ({@link TableNode})
542:             * via the also given reference relation.
543:             * 
544:             * @param node
545:             * @param ref
546:             * @return all stored rows that reference the given row
547:             * @throws DatastoreException
548:             */
549:            private List<TableNode> getReferencingRows(TableNode node,
550:                    TableReference ref) throws DatastoreException {
551:
552:                List<TableNode> referencingRows = new ArrayList<TableNode>();
553:                this .aliasGenerator.reset();
554:                String fromAlias = this .aliasGenerator.generateUniqueAlias();
555:                String toAlias = this .aliasGenerator.generateUniqueAlias();
556:                MappingField[] fromFields = ref.getFkColumns();
557:                MappingField[] toFields = ref.getKeyColumns();
558:
559:                StatementBuffer query = new StatementBuffer();
560:                query.append("SELECT DISTINCT ");
561:                for (int i = 0; i < fromFields.length; i++) {
562:                    query.append(fromAlias);
563:                    query.append(".");
564:                    query.append(fromFields[i].getField());
565:                    if (i != fromFields.length - 1) {
566:                        query.append(',');
567:                    }
568:                }
569:                query.append(" FROM ");
570:                query.append(ref.getFromTable());
571:                query.append(" ");
572:                query.append(fromAlias);
573:                query.append(" INNER JOIN ");
574:                query.append(ref.getToTable());
575:                query.append(" ");
576:                query.append(toAlias);
577:                query.append(" ON ");
578:                for (int j = 0; j < fromFields.length; j++) {
579:                    query.append(fromAlias);
580:                    query.append('.');
581:                    query.append(fromFields[j].getField());
582:                    query.append('=');
583:                    query.append(toAlias);
584:                    query.append('.');
585:                    query.append(toFields[j].getField());
586:                }
587:                query.append(" WHERE ");
588:                int i = node.getKeyColumns().size();
589:                for (KeyColumn column : node.getKeyColumns()) {
590:                    query.append(toAlias);
591:                    query.append('.');
592:                    query.append(column.getName());
593:                    query.append("=?");
594:                    query.addArgument(column.getValue(), column.getTypeCode());
595:                    if (--i != 0) {
596:                        query.append(" AND ");
597:                    }
598:                }
599:
600:                PreparedStatement stmt = null;
601:                ResultSet rs = null;
602:                try {
603:                    stmt = this .datastore.prepareStatement(conn, query);
604:                    LOG.logDebug("Performing: " + query);
605:                    rs = stmt.executeQuery();
606:                    while (rs.next()) {
607:                        Collection<KeyColumn> keyColumns = new ArrayList<KeyColumn>(
608:                                fromFields.length);
609:                        for (i = 0; i < fromFields.length; i++) {
610:                            KeyColumn column = new KeyColumn(fromFields[i]
611:                                    .getField(), fromFields[i].getType(), rs
612:                                    .getObject(i + 1));
613:                            keyColumns.add(column);
614:                        }
615:                        TableNode referencingRow = new TableNode(ref
616:                                .getFromTable(), keyColumns);
617:                        referencingRows.add(referencingRow);
618:                    }
619:                } catch (SQLException e) {
620:                    throw new DatastoreException(
621:                            "Error in getReferencingRows(): " + e.getMessage());
622:                } finally {
623:                    try {
624:                        if (rs != null) {
625:                            try {
626:                                rs.close();
627:                            } catch (SQLException e) {
628:                                LOG.logError("Error closing result set: '"
629:                                        + e.getMessage() + "'.", e);
630:                            }
631:                        }
632:                    } finally {
633:                        if (stmt != null) {
634:                            try {
635:                                stmt.close();
636:                            } catch (SQLException e) {
637:                                LOG.logError("Error closing statement: '"
638:                                        + e.getMessage() + "'.", e);
639:                            }
640:                        }
641:                    }
642:                }
643:                return referencingRows;
644:            }
645:
646:            /**
647:             * Returns all tables that reference the given table.
648:             * 
649:             * TODO cache search
650:             * 
651:             * @param table
652:             * @return all tables that reference the given table
653:             */
654:            private List<TableReference> getReferencingTables(String table) {
655:
656:                List<TableReference> tables = new ArrayList<TableReference>();
657:                MappedGMLSchema[] schemas = this .datastore.getSchemas();
658:                for (int i = 0; i < schemas.length; i++) {
659:                    MappedGMLSchema schema = schemas[i];
660:                    FeatureType[] fts = schema.getFeatureTypes();
661:                    for (int j = 0; j < fts.length; j++) {
662:                        MappedFeatureType ft = (MappedFeatureType) fts[j];
663:                        if (!ft.isAbstract()) {
664:                            PropertyType[] props = ft.getProperties();
665:                            for (int k = 0; k < props.length; k++) {
666:                                tables.addAll(getReferencingTables(
667:                                        (MappedPropertyType) props[k], table));
668:                            }
669:                        }
670:                    }
671:                }
672:                return tables;
673:            }
674:
675:            /**
676:             * Returns all tables that reference the given table and that are defined in the mapping of the
677:             * given property type.
678:             * 
679:             * @param property
680:             * @param table
681:             * @return all tables that reference the given table
682:             */
683:            private List<TableReference> getReferencingTables(
684:                    MappedPropertyType property, String table) {
685:
686:                List<TableReference> tables = new ArrayList<TableReference>();
687:                if (property instanceof  MappedFeaturePropertyType
688:                        && ((MappedFeaturePropertyType) property)
689:                                .getFeatureTypeReference().getFeatureType()
690:                                .isAbstract()) {
691:                    TableRelation[] relations = property.getTableRelations();
692:                    for (int j = 0; j < relations.length - 1; j++) {
693:                        TableReference ref = new TableReference(relations[j]);
694:                        if (ref.getToTable().equals(table)) {
695:                            tables.add(ref);
696:                        }
697:                    }
698:                    MappedFeaturePropertyType pt = (MappedFeaturePropertyType) property;
699:                    MappedFeatureType abstractFt = pt.getFeatureTypeReference()
700:                            .getFeatureType();
701:                    MappedFeatureType[] substitutions = abstractFt
702:                            .getConcreteSubstitutions();
703:                    for (MappedFeatureType concreteType : substitutions) {
704:                        TableRelation finalStep = relations[relations.length - 1];
705:                        TableReference ref = new TableReference(
706:                                getTableRelation(finalStep, concreteType
707:                                        .getTable()));
708:                        if (ref.getToTable().equals(table)) {
709:                            tables.add(ref);
710:                        }
711:                    }
712:
713:                } else {
714:                    TableRelation[] relations = property.getTableRelations();
715:                    for (int j = 0; j < relations.length; j++) {
716:                        TableReference ref = new TableReference(relations[j]);
717:                        if (ref.getToTable().equals(table)) {
718:                            tables.add(ref);
719:                        }
720:                    }
721:                }
722:                return tables;
723:            }
724:
725:            private TableRelation getTableRelation(
726:                    TableRelation toAbstractSubFt, String table) {
727:                MappingField[] toConcreteFields = new MappingField[toAbstractSubFt
728:                        .getToFields().length];
729:                for (int i = 0; i < toConcreteFields.length; i++) {
730:                    MappingField toAbstractField = toAbstractSubFt
731:                            .getToFields()[i];
732:                    toConcreteFields[i] = new MappingField(table,
733:                            toAbstractField.getField(), toAbstractField
734:                                    .getType());
735:                }
736:                TableRelation toConcreteSubFt = new TableRelation(
737:                        toAbstractSubFt.getFromFields(), toConcreteFields,
738:                        toAbstractSubFt.getFKInfo(), toAbstractSubFt
739:                                .getIdGenerator());
740:                return toConcreteSubFt;
741:            }
742:        }
w__w___w___.j_a___v__a__2___s_._com___ | Contact Us
Copyright 2009 - 12 Demo Source and Support. All rights reserved.
All other trademarks are property of their respective owners.