0001: // Spatial Index Library
0002: //
0003: // Copyright (C) 2002 Navel Ltd.
0004: //
0005: // This library is free software; you can redistribute it and/or
0006: // modify it under the terms of the GNU Lesser General Public
0007: // License as published by the Free Software Foundation; either
0008: // version 2.1 of the License, or (at your option) any later version.
0009: //
0010: // This library is distributed in the hope that it will be useful,
0011: // but WITHOUT ANY WARRANTY; without even the implied warranty of
0012: // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
0013: // Lesser General Public License for more details.
0014: //
0015: // You should have received a copy of the GNU Lesser General Public
0016: // License aint with this library; if not, write to the Free Software
0017: // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
0018: //
0019: // Contact information:
0020: // Mailing address:
0021: // Marios Hadjieleftheriou
0022: // University of California, Riverside
0023: // Department of Computer Science
0024: // Surge Building, Room 310
0025: // Riverside, CA 92521
0026: //
0027: // Email:
0028: // marioh@cs.ucr.edu
0029: package org.geotools.caching.spatialindex.rtree;
0030:
0031: import org.geotools.caching.spatialindex.spatialindex.*;
0032: import org.geotools.caching.spatialindex.storagemanager.*;
0033:
0034: import java.io.*;
0035:
0036: import java.util.*;
0037:
0038: public class RTree implements ISpatialIndex {
0039: RWLock m_rwLock;
0040: IStorageManager m_pStorageManager;
0041: int m_rootID;
0042: int m_headerID;
0043: int m_treeVariant;
0044: double m_fillFactor;
0045: int m_indexCapacity;
0046: int m_leafCapacity;
0047: int m_nearMinimumOverlapFactor;
0048:
0049: // The R*-Tree 'p' constant, for calculating nearly minimum overlap cost.
0050: // [Beckmann, Kriegel, Schneider, Seeger 'The R*-tree: An efficient and Robust Access Method
0051: // for Points and Rectangles, Section 4.1]
0052: double m_splitDistributionFactor;
0053:
0054: // The R*-Tree 'm' constant, for calculating spliting distributions.
0055: // [Beckmann, Kriegel, Schneider, Seeger 'The R*-tree: An efficient and Robust Access Method
0056: // for Points and Rectangles, Section 4.2]
0057: double m_reinsertFactor;
0058:
0059: // The R*-Tree 'p' constant, for removing entries at reinserts.
0060: // [Beckmann, Kriegel, Schneider, Seeger 'The R*-tree: An efficient and Robust Access Method
0061: // for Points and Rectangles, Section 4.3]
0062: int m_dimension;
0063: Region m_infiniteRegion;
0064: Statistics m_stats;
0065: ArrayList m_writeNodeCommands = new ArrayList();
0066: ArrayList m_readNodeCommands = new ArrayList();
0067: ArrayList m_deleteNodeCommands = new ArrayList();
0068:
0069: public RTree(PropertySet ps, IStorageManager sm) {
0070: m_rwLock = new RWLock();
0071: m_pStorageManager = sm;
0072: m_rootID = IStorageManager.NewPage;
0073: m_headerID = IStorageManager.NewPage;
0074: m_treeVariant = SpatialIndex.RtreeVariantRstar;
0075: m_fillFactor = 0.7f;
0076: m_indexCapacity = 100;
0077: m_leafCapacity = 100;
0078: m_nearMinimumOverlapFactor = 32;
0079: m_splitDistributionFactor = 0.4f;
0080: m_reinsertFactor = 0.3f;
0081: m_dimension = 2;
0082:
0083: m_infiniteRegion = new Region();
0084: m_stats = new Statistics();
0085:
0086: Object var = ps.getProperty("IndexIdentifier");
0087:
0088: if (var != null) {
0089: if (!(var instanceof Integer)) {
0090: throw new IllegalArgumentException(
0091: "Property IndexIdentifier must an Integer");
0092: }
0093:
0094: m_headerID = ((Integer) var).intValue();
0095:
0096: try {
0097: initOld(ps);
0098: } catch (IOException e) {
0099: System.err.println(e);
0100: throw new IllegalStateException(
0101: "initOld failed with IOException");
0102: }
0103: } else {
0104: try {
0105: initNew(ps);
0106: } catch (IOException e) {
0107: System.err.println(e);
0108: throw new IllegalStateException(
0109: "initNew failed with IOException");
0110: }
0111:
0112: Integer i = new Integer(m_headerID);
0113: ps.setProperty("IndexIdentifier", i);
0114: }
0115: }
0116:
0117: // added by CR
0118: //
0119: public void deleteLeaf(Leaf l) {
0120: Stack pathBuffer = new Stack();
0121:
0122: for (int i = 0; i < l.getChildrenCount(); i++) {
0123: //l.deleteEntry(i) ;
0124: //this.deleteData(l.getChildShape(i), l.getChildIdentifier(i)) ;
0125: l.deleteData(l.getChildIdentifier(i), pathBuffer);
0126: m_stats.m_data--;
0127: }
0128: }
0129:
0130: public void deleteLeaf(Index node, int leafIndex) {
0131: node.deleteEntry(leafIndex);
0132: }
0133:
0134: public List readLeaf(Leaf leaf) {
0135: List ret = new ArrayList();
0136:
0137: for (int i = 0; i < leaf.getChildrenCount(); i++) {
0138: ret.add(new String(leaf.m_pData[i]));
0139: }
0140:
0141: return ret;
0142: }
0143:
0144: //
0145: // ISpatialIndex interface
0146: //
0147: public void insertData(final byte[] data, final IShape shape, int id) {
0148: if (shape.getDimension() != m_dimension) {
0149: throw new IllegalArgumentException(
0150: "insertData: Shape has the wrong number of dimensions.");
0151: }
0152:
0153: m_rwLock.write_lock();
0154:
0155: try {
0156: Region mbr = shape.getMBR();
0157:
0158: byte[] buffer = null;
0159:
0160: if ((data != null) && (data.length > 0)) {
0161: buffer = new byte[data.length];
0162: System.arraycopy(data, 0, buffer, 0, data.length);
0163: }
0164:
0165: insertData_impl(buffer, mbr, id);
0166:
0167: // the buffer is stored in the tree. Do not delete here.
0168: } finally {
0169: m_rwLock.write_unlock();
0170: }
0171: }
0172:
0173: public boolean deleteData(final IShape shape, int id) {
0174: if (shape.getDimension() != m_dimension) {
0175: throw new IllegalArgumentException(
0176: "deleteData: Shape has the wrong number of dimensions.");
0177: }
0178:
0179: m_rwLock.write_lock();
0180:
0181: try {
0182: Region mbr = shape.getMBR();
0183:
0184: return deleteData_impl(mbr, id);
0185: } finally {
0186: m_rwLock.write_unlock();
0187: }
0188: }
0189:
0190: public void containmentQuery(final IShape query, final IVisitor v) {
0191: if (query.getDimension() != m_dimension) {
0192: throw new IllegalArgumentException(
0193: "containmentQuery: Shape has the wrong number of dimensions.");
0194: }
0195:
0196: rangeQuery(SpatialIndex.ContainmentQuery, query, v);
0197: }
0198:
0199: public void intersectionQuery(final IShape query, final IVisitor v) {
0200: if (query.getDimension() != m_dimension) {
0201: throw new IllegalArgumentException(
0202: "intersectionQuery: Shape has the wrong number of dimensions.");
0203: }
0204:
0205: rangeQuery(SpatialIndex.IntersectionQuery, query, v);
0206: }
0207:
0208: public void pointLocationQuery(final IShape query, final IVisitor v) {
0209: if (query.getDimension() != m_dimension) {
0210: throw new IllegalArgumentException(
0211: "pointLocationQuery: Shape has the wrong number of dimensions.");
0212: }
0213:
0214: Region r = null;
0215:
0216: if (query instanceof Point) {
0217: r = new Region((Point) query, (Point) query);
0218: } else if (query instanceof Region) {
0219: r = (Region) query;
0220: } else {
0221: throw new IllegalArgumentException(
0222: "pointLocationQuery: IShape can be Point or Region only.");
0223: }
0224:
0225: rangeQuery(SpatialIndex.IntersectionQuery, r, v);
0226: }
0227:
0228: public void nearestNeighborQuery(int k, final IShape query,
0229: final IVisitor v, final INearestNeighborComparator nnc) {
0230: if (query.getDimension() != m_dimension) {
0231: throw new IllegalArgumentException(
0232: "nearestNeighborQuery: Shape has the wrong number of dimensions.");
0233: }
0234:
0235: m_rwLock.read_lock();
0236:
0237: try {
0238: // I need a priority queue here. It turns out that TreeSet sorts unique keys only and since I am
0239: // sorting according to distances, it is not assured that all distances will be unique. TreeMap
0240: // also sorts unique keys. Thus, I am simulating a priority queue using an ArrayList and binarySearch.
0241: ArrayList queue = new ArrayList();
0242:
0243: Node n = readNode(m_rootID);
0244: queue.add(new NNEntry(n, 0.0));
0245:
0246: int count = 0;
0247: double knearest = 0.0;
0248:
0249: while (queue.size() != 0) {
0250: NNEntry first = (NNEntry) queue.remove(0);
0251:
0252: if (first.m_pEntry instanceof Node) {
0253: n = (Node) first.m_pEntry;
0254: v.visitNode((INode) n);
0255:
0256: for (int cChild = 0; cChild < n.m_children; cChild++) {
0257: IEntry e;
0258:
0259: if (n.m_level == 0) {
0260: e = new Data(n.m_pData[cChild],
0261: n.m_pMBR[cChild],
0262: n.m_pIdentifier[cChild]);
0263: } else {
0264: e = (IEntry) readNode(n.m_pIdentifier[cChild]);
0265: }
0266:
0267: NNEntry e2 = new NNEntry(e, nnc
0268: .getMinimumDistance(query, e));
0269:
0270: // Why don't I use a TreeSet here? See comment above...
0271: int loc = Collections.binarySearch(queue, e2,
0272: new NNEntryComparator());
0273:
0274: if (loc >= 0) {
0275: queue.add(loc, e2);
0276: } else {
0277: queue.add((-loc - 1), e2);
0278: }
0279: }
0280: } else {
0281: // report all nearest neighbors with equal furthest distances.
0282: // (neighbors can be more than k, if many happen to have the same
0283: // furthest distance).
0284: if ((count >= k) && (first.m_minDist > knearest)) {
0285: break;
0286: }
0287:
0288: v.visitData((IData) first.m_pEntry);
0289: m_stats.m_queryResults++;
0290: count++;
0291: knearest = first.m_minDist;
0292: }
0293: }
0294: } finally {
0295: m_rwLock.read_unlock();
0296: }
0297: }
0298:
0299: public void nearestNeighborQuery(int k, final IShape query,
0300: final IVisitor v) {
0301: if (query.getDimension() != m_dimension) {
0302: throw new IllegalArgumentException(
0303: "nearestNeighborQuery: Shape has the wrong number of dimensions.");
0304: }
0305:
0306: NNComparator nnc = new NNComparator();
0307: nearestNeighborQuery(k, query, v, nnc);
0308: }
0309:
0310: public void queryStrategy(final IQueryStrategy qs) {
0311: m_rwLock.read_lock();
0312:
0313: int[] next = new int[] { m_rootID };
0314:
0315: try {
0316: while (true) {
0317: Node n = readNode(next[0]);
0318: boolean[] hasNext = new boolean[] { false };
0319: qs.getNextEntry(n, next, hasNext);
0320:
0321: if (hasNext[0] == false) {
0322: break;
0323: }
0324: }
0325: } finally {
0326: m_rwLock.read_unlock();
0327: }
0328: }
0329:
0330: public PropertySet getIndexProperties() {
0331: PropertySet pRet = new PropertySet();
0332:
0333: // dimension
0334: pRet.setProperty("Dimension", new Integer(m_dimension));
0335:
0336: // index capacity
0337: pRet.setProperty("IndexCapacity", new Integer(m_indexCapacity));
0338:
0339: // leaf capacity
0340: pRet.setProperty("LeafCapacity", new Integer(m_leafCapacity));
0341:
0342: // R-tree variant
0343: pRet.setProperty("TreeVariant", new Integer(m_treeVariant));
0344:
0345: // fill factor
0346: pRet.setProperty("FillFactor", new Double(m_fillFactor));
0347:
0348: // near minimum overlap factor
0349: pRet.setProperty("NearMinimumOverlapFactor", new Integer(
0350: m_nearMinimumOverlapFactor));
0351:
0352: // split distribution factor
0353: pRet.setProperty("SplitDistributionFactor", new Double(
0354: m_splitDistributionFactor));
0355:
0356: // reinsert factor
0357: pRet
0358: .setProperty("ReinsertFactor", new Double(
0359: m_reinsertFactor));
0360:
0361: return pRet;
0362: }
0363:
0364: public void addWriteNodeCommand(INodeCommand nc) {
0365: m_writeNodeCommands.add(nc);
0366: }
0367:
0368: public void addReadNodeCommand(INodeCommand nc) {
0369: m_readNodeCommands.add(nc);
0370: }
0371:
0372: public void addDeleteNodeCommand(INodeCommand nc) {
0373: m_deleteNodeCommands.add(nc);
0374: }
0375:
0376: public boolean isIndexValid() {
0377: boolean ret = true;
0378: Stack st = new Stack();
0379: Node root = readNode(m_rootID);
0380:
0381: if (root.m_level != (m_stats.m_treeHeight - 1)) {
0382: System.err.println("Invalid tree height");
0383:
0384: return false;
0385: }
0386:
0387: HashMap nodesInLevel = new HashMap();
0388: nodesInLevel.put(new Integer(root.m_level), new Integer(1));
0389:
0390: ValidateEntry e = new ValidateEntry(root.m_nodeMBR, root);
0391: st.push(e);
0392:
0393: while (!st.empty()) {
0394: e = (ValidateEntry) st.pop();
0395:
0396: Region tmpRegion = (Region) m_infiniteRegion.clone();
0397:
0398: for (int cDim = 0; cDim < m_dimension; cDim++) {
0399: tmpRegion.m_pLow[cDim] = Double.POSITIVE_INFINITY;
0400: tmpRegion.m_pHigh[cDim] = Double.NEGATIVE_INFINITY;
0401:
0402: for (int cChild = 0; cChild < e.m_pNode.m_children; cChild++) {
0403: tmpRegion.m_pLow[cDim] = Math.min(
0404: tmpRegion.m_pLow[cDim],
0405: e.m_pNode.m_pMBR[cChild].m_pLow[cDim]);
0406: tmpRegion.m_pHigh[cDim] = Math.max(
0407: tmpRegion.m_pHigh[cDim],
0408: e.m_pNode.m_pMBR[cChild].m_pHigh[cDim]);
0409: }
0410: }
0411:
0412: if (!(tmpRegion.equals(e.m_pNode.m_nodeMBR))) {
0413: System.err.println("Invalid parent information");
0414: ret = false;
0415: } else if (!(tmpRegion.equals(e.m_parentMBR))) {
0416: System.err.println("Error in parent");
0417: ret = false;
0418: }
0419:
0420: if (e.m_pNode.m_level != 0) {
0421: for (int cChild = 0; cChild < e.m_pNode.m_children; cChild++) {
0422: ValidateEntry tmpEntry = new ValidateEntry(
0423: e.m_pNode.m_pMBR[cChild],
0424: readNode(e.m_pNode.m_pIdentifier[cChild]));
0425:
0426: if (!nodesInLevel.containsKey(new Integer(
0427: tmpEntry.m_pNode.m_level))) {
0428: nodesInLevel.put(new Integer(
0429: tmpEntry.m_pNode.m_level), new Integer(
0430: 1));
0431: } else {
0432: int i = ((Integer) nodesInLevel
0433: .get(new Integer(
0434: tmpEntry.m_pNode.m_level)))
0435: .intValue();
0436: nodesInLevel.put(new Integer(
0437: tmpEntry.m_pNode.m_level), new Integer(
0438: i + 1));
0439: }
0440:
0441: st.push(tmpEntry);
0442: }
0443: }
0444: }
0445:
0446: int nodes = 0;
0447:
0448: for (int cLevel = 0; cLevel < m_stats.m_treeHeight; cLevel++) {
0449: int i1 = ((Integer) nodesInLevel.get(new Integer(cLevel)))
0450: .intValue();
0451: int i2 = ((Integer) m_stats.m_nodesInLevel.get(cLevel))
0452: .intValue();
0453:
0454: if (i1 != i2) {
0455: System.err.println("Invalid nodesInLevel information");
0456: ret = false;
0457: }
0458:
0459: nodes += i2;
0460: }
0461:
0462: if (nodes != m_stats.m_nodes) {
0463: System.err.println("Invalid number of nodes information");
0464: ret = false;
0465: }
0466:
0467: return ret;
0468: }
0469:
0470: public IStatistics getStatistics() {
0471: return (IStatistics) m_stats.clone();
0472: }
0473:
0474: public void flush() throws IllegalStateException {
0475: try {
0476: storeHeader();
0477: m_pStorageManager.flush();
0478: } catch (IOException e) {
0479: System.err.println(e);
0480: throw new IllegalStateException(
0481: "flush failed with IOException");
0482: }
0483: }
0484:
0485: //
0486: // Internals
0487: //
0488: private void initNew(PropertySet ps) throws IOException {
0489: Object var;
0490:
0491: // tree variant.
0492: var = ps.getProperty("TreeVariant");
0493:
0494: if (var != null) {
0495: if (var instanceof Integer) {
0496: int i = ((Integer) var).intValue();
0497:
0498: if ((i != SpatialIndex.RtreeVariantLinear)
0499: && (i != SpatialIndex.RtreeVariantQuadratic)
0500: && (i != SpatialIndex.RtreeVariantRstar)) {
0501: throw new IllegalArgumentException(
0502: "Property TreeVariant not a valid variant");
0503: }
0504:
0505: m_treeVariant = i;
0506: } else {
0507: throw new IllegalArgumentException(
0508: "Property TreeVariant must be an Integer");
0509: }
0510: }
0511:
0512: // fill factor.
0513: var = ps.getProperty("FillFactor");
0514:
0515: if (var != null) {
0516: if (var instanceof Double) {
0517: double f = ((Double) var).doubleValue();
0518:
0519: if ((f <= 0.0f) || (f >= 1.0f)) {
0520: throw new IllegalArgumentException(
0521: "Property FillFactor must be in (0.0, 1.0)");
0522: }
0523:
0524: m_fillFactor = f;
0525: } else {
0526: throw new IllegalArgumentException(
0527: "Property FillFactor must be a Double");
0528: }
0529: }
0530:
0531: // index capacity.
0532: var = ps.getProperty("IndexCapacity");
0533:
0534: if (var != null) {
0535: if (var instanceof Integer) {
0536: int i = ((Integer) var).intValue();
0537:
0538: if (i < 3) {
0539: throw new IllegalArgumentException(
0540: "Property IndexCapacity must be >= 3");
0541: }
0542:
0543: m_indexCapacity = i;
0544: } else {
0545: throw new IllegalArgumentException(
0546: "Property IndexCapacity must be an Integer");
0547: }
0548: }
0549:
0550: // leaf capacity.
0551: var = ps.getProperty("LeafCapacity");
0552:
0553: if (var != null) {
0554: if (var instanceof Integer) {
0555: int i = ((Integer) var).intValue();
0556:
0557: if (i < 3) {
0558: throw new IllegalArgumentException(
0559: "Property LeafCapacity must be >= 3");
0560: }
0561:
0562: m_leafCapacity = i;
0563: } else {
0564: throw new IllegalArgumentException(
0565: "Property LeafCapacity must be an Integer");
0566: }
0567: }
0568:
0569: // near minimum overlap factor.
0570: var = ps.getProperty("NearMinimumOverlapFactor");
0571:
0572: if (var != null) {
0573: if (var instanceof Integer) {
0574: int i = ((Integer) var).intValue();
0575:
0576: if ((i < 1) || (i > m_indexCapacity)
0577: || (i > m_leafCapacity)) {
0578: throw new IllegalArgumentException(
0579: "Property NearMinimumOverlapFactor must be less than both index and leaf capacities");
0580: }
0581:
0582: m_nearMinimumOverlapFactor = i;
0583: } else {
0584: throw new IllegalArgumentException(
0585: "Property NearMinimumOverlapFactor must be an Integer");
0586: }
0587: }
0588:
0589: // split distribution factor.
0590: var = ps.getProperty("SplitDistributionFactor");
0591:
0592: if (var != null) {
0593: if (var instanceof Double) {
0594: double f = ((Double) var).doubleValue();
0595:
0596: if ((f <= 0.0f) || (f >= 1.0f)) {
0597: throw new IllegalArgumentException(
0598: "Property SplitDistributionFactor must be in (0.0, 1.0)");
0599: }
0600:
0601: m_splitDistributionFactor = f;
0602: } else {
0603: throw new IllegalArgumentException(
0604: "Property SplitDistriburionFactor must be a Double");
0605: }
0606: }
0607:
0608: // reinsert factor.
0609: var = ps.getProperty("ReinsertFactor");
0610:
0611: if (var != null) {
0612: if (var instanceof Double) {
0613: double f = ((Double) var).doubleValue();
0614:
0615: if ((f <= 0.0f) || (f >= 1.0f)) {
0616: throw new IllegalArgumentException(
0617: "Property ReinsertFactor must be in (0.0, 1.0)");
0618: }
0619:
0620: m_reinsertFactor = f;
0621: } else {
0622: throw new IllegalArgumentException(
0623: "Property ReinsertFactor must be a Double");
0624: }
0625: }
0626:
0627: // dimension
0628: var = ps.getProperty("Dimension");
0629:
0630: if (var != null) {
0631: if (var instanceof Integer) {
0632: int i = ((Integer) var).intValue();
0633:
0634: if (i <= 1) {
0635: throw new IllegalArgumentException(
0636: "Property Dimension must be >= 1");
0637: }
0638:
0639: m_dimension = i;
0640: } else {
0641: throw new IllegalArgumentException(
0642: "Property Dimension must be an Integer");
0643: }
0644: }
0645:
0646: m_infiniteRegion.m_pLow = new double[m_dimension];
0647: m_infiniteRegion.m_pHigh = new double[m_dimension];
0648:
0649: for (int cDim = 0; cDim < m_dimension; cDim++) {
0650: m_infiniteRegion.m_pLow[cDim] = Double.POSITIVE_INFINITY;
0651: m_infiniteRegion.m_pHigh[cDim] = Double.NEGATIVE_INFINITY;
0652: }
0653:
0654: m_stats.m_treeHeight = 1;
0655: m_stats.m_nodesInLevel.add(new Integer(0));
0656:
0657: Leaf root = new Leaf(this , -1);
0658: m_rootID = writeNode(root);
0659:
0660: storeHeader();
0661: }
0662:
0663: private void initOld(PropertySet ps) throws IOException {
0664: loadHeader();
0665:
0666: // only some of the properties may be changed.
0667: // the rest are just ignored.
0668: Object var;
0669:
0670: // tree variant.
0671: var = ps.getProperty("TreeVariant");
0672:
0673: if (var != null) {
0674: if (var instanceof Integer) {
0675: int i = ((Integer) var).intValue();
0676:
0677: if ((i != SpatialIndex.RtreeVariantLinear)
0678: && (i != SpatialIndex.RtreeVariantQuadratic)
0679: && (i != SpatialIndex.RtreeVariantRstar)) {
0680: throw new IllegalArgumentException(
0681: "Property TreeVariant not a valid variant");
0682: }
0683:
0684: m_treeVariant = i;
0685: } else {
0686: throw new IllegalArgumentException(
0687: "Property TreeVariant must be an Integer");
0688: }
0689: }
0690:
0691: // near minimum overlap factor.
0692: var = ps.getProperty("NearMinimumOverlapFactor");
0693:
0694: if (var != null) {
0695: if (var instanceof Integer) {
0696: int i = ((Integer) var).intValue();
0697:
0698: if ((i < 1) || (i > m_indexCapacity)
0699: || (i > m_leafCapacity)) {
0700: throw new IllegalArgumentException(
0701: "Property NearMinimumOverlapFactor must be less than both index and leaf capacities");
0702: }
0703:
0704: m_nearMinimumOverlapFactor = i;
0705: } else {
0706: throw new IllegalArgumentException(
0707: "Property NearMinimumOverlapFactor must be an Integer");
0708: }
0709: }
0710:
0711: // split distribution factor.
0712: var = ps.getProperty("SplitDistributionFactor");
0713:
0714: if (var != null) {
0715: if (var instanceof Double) {
0716: double f = ((Double) var).doubleValue();
0717:
0718: if ((f <= 0.0f) || (f >= 1.0f)) {
0719: throw new IllegalArgumentException(
0720: "Property SplitDistributionFactor must be in (0.0, 1.0)");
0721: }
0722:
0723: m_splitDistributionFactor = f;
0724: } else {
0725: throw new IllegalArgumentException(
0726: "Property SplitDistriburionFactor must be a Double");
0727: }
0728: }
0729:
0730: // reinsert factor.
0731: var = ps.getProperty("ReinsertFactor");
0732:
0733: if (var != null) {
0734: if (var instanceof Double) {
0735: double f = ((Double) var).doubleValue();
0736:
0737: if ((f <= 0.0f) || (f >= 1.0f)) {
0738: throw new IllegalArgumentException(
0739: "Property ReinsertFactor must be in (0.0, 1.0)");
0740: }
0741:
0742: m_reinsertFactor = f;
0743: } else {
0744: throw new IllegalArgumentException(
0745: "Property ReinsertFactor must be a Double");
0746: }
0747: }
0748:
0749: m_infiniteRegion.m_pLow = new double[m_dimension];
0750: m_infiniteRegion.m_pHigh = new double[m_dimension];
0751:
0752: for (int cDim = 0; cDim < m_dimension; cDim++) {
0753: m_infiniteRegion.m_pLow[cDim] = Double.POSITIVE_INFINITY;
0754: m_infiniteRegion.m_pHigh[cDim] = Double.NEGATIVE_INFINITY;
0755: }
0756: }
0757:
0758: private void storeHeader() throws IOException {
0759: ByteArrayOutputStream bs = new ByteArrayOutputStream();
0760: DataOutputStream ds = new DataOutputStream(bs);
0761:
0762: ds.writeInt(m_rootID);
0763: ds.writeInt(m_treeVariant);
0764: ds.writeDouble(m_fillFactor);
0765: ds.writeInt(m_indexCapacity);
0766: ds.writeInt(m_leafCapacity);
0767: ds.writeInt(m_nearMinimumOverlapFactor);
0768: ds.writeDouble(m_splitDistributionFactor);
0769: ds.writeDouble(m_reinsertFactor);
0770: ds.writeInt(m_dimension);
0771: ds.writeLong(m_stats.m_nodes);
0772: ds.writeLong(m_stats.m_data);
0773: ds.writeInt(m_stats.m_treeHeight);
0774:
0775: for (int cLevel = 0; cLevel < m_stats.m_treeHeight; cLevel++) {
0776: ds.writeInt(((Integer) m_stats.m_nodesInLevel.get(cLevel))
0777: .intValue());
0778: }
0779:
0780: ds.flush();
0781: m_headerID = m_pStorageManager.storeByteArray(m_headerID, bs
0782: .toByteArray());
0783: }
0784:
0785: private void loadHeader() throws IOException {
0786: byte[] data = m_pStorageManager.loadByteArray(m_headerID);
0787: DataInputStream ds = new DataInputStream(
0788: new ByteArrayInputStream(data));
0789:
0790: m_rootID = ds.readInt();
0791: m_treeVariant = ds.readInt();
0792: m_fillFactor = ds.readDouble();
0793: m_indexCapacity = ds.readInt();
0794: m_leafCapacity = ds.readInt();
0795: m_nearMinimumOverlapFactor = ds.readInt();
0796: m_splitDistributionFactor = ds.readDouble();
0797: m_reinsertFactor = ds.readDouble();
0798: m_dimension = ds.readInt();
0799: m_stats.m_nodes = ds.readLong();
0800: m_stats.m_data = ds.readLong();
0801: m_stats.m_treeHeight = ds.readInt();
0802:
0803: for (int cLevel = 0; cLevel < m_stats.m_treeHeight; cLevel++) {
0804: m_stats.m_nodesInLevel.add(new Integer(ds.readInt()));
0805: }
0806: }
0807:
0808: protected void insertData_impl(byte[] pData, Region mbr, int id) {
0809: assert mbr.getDimension() == m_dimension;
0810:
0811: boolean[] overflowTable;
0812:
0813: Stack pathBuffer = new Stack();
0814:
0815: Node root = readNode(m_rootID);
0816:
0817: overflowTable = new boolean[root.m_level];
0818:
0819: for (int cLevel = 0; cLevel < root.m_level; cLevel++)
0820: overflowTable[cLevel] = false;
0821:
0822: Node l = root.chooseSubtree(mbr, 0, pathBuffer);
0823: l.insertData(pData, mbr, id, pathBuffer, overflowTable);
0824:
0825: m_stats.m_data++;
0826: }
0827:
0828: protected void insertData_impl(byte[] pData, Region mbr, int id,
0829: int level, boolean[] overflowTable) {
0830: assert mbr.getDimension() == m_dimension;
0831:
0832: Stack pathBuffer = new Stack();
0833:
0834: Node root = readNode(m_rootID);
0835: Node n = root.chooseSubtree(mbr, level, pathBuffer);
0836: n.insertData(pData, mbr, id, pathBuffer, overflowTable);
0837: }
0838:
0839: protected boolean deleteData_impl(final Region mbr, int id) {
0840: assert mbr.getDimension() == m_dimension;
0841:
0842: boolean bRet = false;
0843:
0844: Stack pathBuffer = new Stack();
0845:
0846: Node root = readNode(m_rootID);
0847: Leaf l = root.findLeaf(mbr, id, pathBuffer);
0848:
0849: if (l != null) {
0850: l.deleteData(id, pathBuffer);
0851: m_stats.m_data--;
0852: bRet = true;
0853: }
0854:
0855: return bRet;
0856: }
0857:
0858: protected int writeNode(Node n) throws IllegalStateException {
0859: byte[] buffer = null;
0860:
0861: try {
0862: buffer = n.store();
0863: } catch (IOException e) {
0864: System.err.println(e);
0865: throw new IllegalStateException(
0866: "writeNode failed with IOException");
0867: }
0868:
0869: int page;
0870:
0871: if (n.m_identifier < 0) {
0872: page = IStorageManager.NewPage;
0873: } else {
0874: page = n.m_identifier;
0875: }
0876:
0877: try {
0878: page = m_pStorageManager.storeByteArray(page, buffer);
0879: } catch (InvalidPageException e) {
0880: System.err.println(e);
0881: throw new IllegalStateException(
0882: "writeNode failed with InvalidPageException");
0883: }
0884:
0885: if (n.m_identifier < 0) {
0886: n.m_identifier = page;
0887: m_stats.m_nodes++;
0888:
0889: int i = ((Integer) m_stats.m_nodesInLevel.get(n.m_level))
0890: .intValue();
0891: m_stats.m_nodesInLevel.set(n.m_level, new Integer(i + 1));
0892: }
0893:
0894: m_stats.m_writes++;
0895:
0896: for (int cIndex = 0; cIndex < m_writeNodeCommands.size(); cIndex++) {
0897: ((INodeCommand) m_writeNodeCommands.get(cIndex)).execute(n);
0898: }
0899:
0900: return page;
0901: }
0902:
0903: protected Node readNode(int id) {
0904: byte[] buffer;
0905: DataInputStream ds = null;
0906: int nodeType = -1;
0907: Node n = null;
0908:
0909: try {
0910: buffer = m_pStorageManager.loadByteArray(id);
0911: ds = new DataInputStream(new ByteArrayInputStream(buffer));
0912: nodeType = ds.readInt();
0913:
0914: if (nodeType == SpatialIndex.PersistentIndex) {
0915: n = new Index(this , -1, 0);
0916: } else if (nodeType == SpatialIndex.PersistentLeaf) {
0917: n = new Leaf(this , -1);
0918: } else {
0919: throw new IllegalStateException(
0920: "readNode failed reading the correct node type information");
0921: }
0922:
0923: n.m_pTree = this ;
0924: n.m_identifier = id;
0925: n.load(buffer);
0926:
0927: m_stats.m_reads++;
0928: } catch (InvalidPageException e) {
0929: System.err.println(e);
0930: throw new IllegalStateException(
0931: "readNode failed with InvalidPageException");
0932: } catch (IOException e) {
0933: System.err.println(e);
0934: throw new IllegalStateException(
0935: "readNode failed with IOException");
0936: }
0937:
0938: for (int cIndex = 0; cIndex < m_readNodeCommands.size(); cIndex++) {
0939: ((INodeCommand) m_readNodeCommands.get(cIndex)).execute(n);
0940: }
0941:
0942: return n;
0943: }
0944:
0945: protected void deleteNode(Node n) {
0946: try {
0947: m_pStorageManager.deleteByteArray(n.m_identifier);
0948: } catch (InvalidPageException e) {
0949: System.err.println(e);
0950: throw new IllegalStateException(
0951: "deleteNode failed with InvalidPageException");
0952: }
0953:
0954: m_stats.m_nodes--;
0955:
0956: int i = ((Integer) m_stats.m_nodesInLevel.get(n.m_level))
0957: .intValue();
0958: m_stats.m_nodesInLevel.set(n.m_level, new Integer(i - 1));
0959:
0960: for (int cIndex = 0; cIndex < m_deleteNodeCommands.size(); cIndex++) {
0961: ((INodeCommand) m_deleteNodeCommands.get(cIndex))
0962: .execute(n);
0963: }
0964: }
0965:
0966: private void rangeQuery(int type, final IShape query,
0967: final IVisitor v) {
0968: m_rwLock.read_lock();
0969:
0970: try {
0971: Stack st = new Stack();
0972: Node root = readNode(m_rootID);
0973:
0974: if ((root.m_children > 0)
0975: && query.intersects(root.m_nodeMBR)) {
0976: st.push(root);
0977: }
0978:
0979: while (!st.empty()) {
0980: Node n = (Node) st.pop();
0981:
0982: if (n.m_level == 0) {
0983: v.visitNode((INode) n);
0984:
0985: for (int cChild = 0; cChild < n.m_children; cChild++) {
0986: boolean b;
0987:
0988: if (type == SpatialIndex.ContainmentQuery) {
0989: b = query.contains(n.m_pMBR[cChild]);
0990: } else {
0991: b = query.intersects(n.m_pMBR[cChild]);
0992: }
0993:
0994: if (b) {
0995: Data data = new Data(n.m_pData[cChild],
0996: n.m_pMBR[cChild],
0997: n.m_pIdentifier[cChild]);
0998: v.visitData(data);
0999: m_stats.m_queryResults++;
1000: }
1001: }
1002: } else {
1003: v.visitNode((INode) n);
1004:
1005: for (int cChild = 0; cChild < n.m_children; cChild++) {
1006: if (query.intersects(n.m_pMBR[cChild])) {
1007: st.push(readNode(n.m_pIdentifier[cChild]));
1008: }
1009: }
1010: }
1011: }
1012: } finally {
1013: m_rwLock.read_unlock();
1014: }
1015: }
1016:
1017: public String toString() {
1018: String s = "Dimension: " + m_dimension + "\n" + "Fill factor: "
1019: + m_fillFactor + "\n" + "Index capacity: "
1020: + m_indexCapacity + "\n" + "Leaf capacity: "
1021: + m_leafCapacity + "\n";
1022:
1023: if (m_treeVariant == SpatialIndex.RtreeVariantRstar) {
1024: s += ("Near minimum overlap factor: "
1025: + m_nearMinimumOverlapFactor + "\n"
1026: + "Reinsert factor: " + m_reinsertFactor + "\n"
1027: + "Split distribution factor: "
1028: + m_splitDistributionFactor + "\n");
1029: }
1030:
1031: s += ("Utilization: "
1032: + ((100 * m_stats.getNumberOfData()) / (m_stats
1033: .getNumberOfNodesInLevel(0) * m_leafCapacity))
1034: + "%" + "\n" + m_stats);
1035:
1036: return s;
1037: }
1038:
1039: class NNEntry {
1040: IEntry m_pEntry;
1041: double m_minDist;
1042:
1043: NNEntry(IEntry e, double f) {
1044: m_pEntry = e;
1045: m_minDist = f;
1046: }
1047: }
1048:
1049: class NNEntryComparator implements Comparator {
1050: public int compare(Object o1, Object o2) {
1051: NNEntry n1 = (NNEntry) o1;
1052: NNEntry n2 = (NNEntry) o2;
1053:
1054: if (n1.m_minDist < n2.m_minDist) {
1055: return -1;
1056: }
1057:
1058: if (n1.m_minDist > n2.m_minDist) {
1059: return 1;
1060: }
1061:
1062: return 0;
1063: }
1064: }
1065:
1066: class NNComparator implements INearestNeighborComparator {
1067: public double getMinimumDistance(IShape query, IEntry e) {
1068: IShape s = e.getShape();
1069:
1070: return query.getMinimumDistance(s);
1071: }
1072: }
1073:
1074: class ValidateEntry {
1075: Region m_parentMBR;
1076: Node m_pNode;
1077:
1078: ValidateEntry(Region r, Node pNode) {
1079: m_parentMBR = r;
1080: m_pNode = pNode;
1081: }
1082: }
1083:
1084: class Data implements IData {
1085: int m_id;
1086: Region m_shape;
1087: byte[] m_pData;
1088:
1089: Data(byte[] pData, Region mbr, int id) {
1090: m_id = id;
1091: m_shape = mbr;
1092: m_pData = pData;
1093: }
1094:
1095: public int getIdentifier() {
1096: return m_id;
1097: }
1098:
1099: public IShape getShape() {
1100: return new Region(m_shape);
1101: }
1102:
1103: public byte[] getData() {
1104: byte[] data = new byte[m_pData.length];
1105: System.arraycopy(m_pData, 0, data, 0, m_pData.length);
1106:
1107: return data;
1108: }
1109: }
1110: }
|