Source Code Cross Referenced for HashDirectory.java in  » Database-DBMS » JDBM » jdbm » htree » Java Source Code / Java DocumentationJava Source Code and Java Documentation

Java Source Code / Java Documentation
1. 6.0 JDK Core
2. 6.0 JDK Modules
3. 6.0 JDK Modules com.sun
4. 6.0 JDK Modules com.sun.java
5. 6.0 JDK Modules sun
6. 6.0 JDK Platform
7. Ajax
8. Apache Harmony Java SE
9. Aspect oriented
10. Authentication Authorization
11. Blogger System
12. Build
13. Byte Code
14. Cache
15. Chart
16. Chat
17. Code Analyzer
18. Collaboration
19. Content Management System
20. Database Client
21. Database DBMS
22. Database JDBC Connection Pool
23. Database ORM
24. Development
25. EJB Server geronimo
26. EJB Server GlassFish
27. EJB Server JBoss 4.2.1
28. EJB Server resin 3.1.5
29. ERP CRM Financial
30. ESB
31. Forum
32. GIS
33. Graphic Library
34. Groupware
35. HTML Parser
36. IDE
37. IDE Eclipse
38. IDE Netbeans
39. Installer
40. Internationalization Localization
41. Inversion of Control
42. Issue Tracking
43. J2EE
44. JBoss
45. JMS
46. JMX
47. Library
48. Mail Clients
49. Net
50. Parser
51. PDF
52. Portal
53. Profiler
54. Project Management
55. Report
56. RSS RDF
57. Rule Engine
58. Science
59. Scripting
60. Search Engine
61. Security
62. Sevlet Container
63. Source Control
64. Swing Library
65. Template Engine
66. Test Coverage
67. Testing
68. UML
69. Web Crawler
70. Web Framework
71. Web Mail
72. Web Server
73. Web Services
74. Web Services apache cxf 2.0.1
75. Web Services AXIS2
76. Wiki Engine
77. Workflow Engines
78. XML
79. XML UI
Java
Java Tutorial
Java Open Source
Jar File Download
Java Articles
Java Products
Java by API
Photoshop Tutorials
Maya Tutorials
Flash Tutorials
3ds-Max Tutorials
Illustrator Tutorials
GIMP Tutorials
C# / C Sharp
C# / CSharp Tutorial
C# / CSharp Open Source
ASP.Net
ASP.NET Tutorial
JavaScript DHTML
JavaScript Tutorial
JavaScript Reference
HTML / CSS
HTML CSS Reference
C / ANSI-C
C Tutorial
C++
C++ Tutorial
Ruby
PHP
Python
Python Tutorial
Python Open Source
SQL Server / T-SQL
SQL Server / T-SQL Tutorial
Oracle PL / SQL
Oracle PL/SQL Tutorial
PostgreSQL
SQL / MySQL
MySQL Tutorial
VB.Net
VB.Net Tutorial
Flash / Flex / ActionScript
VBA / Excel / Access / Word
XML
XML Tutorial
Microsoft Office PowerPoint 2007 Tutorial
Microsoft Office Excel 2007 Tutorial
Microsoft Office Word 2007 Tutorial
Java Source Code / Java Documentation » Database DBMS » JDBM » jdbm.htree 
Source Cross Referenced  Class Diagram Java Document (Java Doc) 


001:        /**
002:         * JDBM LICENSE v1.00
003:         *
004:         * Redistribution and use of this software and associated documentation
005:         * ("Software"), with or without modification, are permitted provided
006:         * that the following conditions are met:
007:         *
008:         * 1. Redistributions of source code must retain copyright
009:         *    statements and notices.  Redistributions must also contain a
010:         *    copy of this document.
011:         *
012:         * 2. Redistributions in binary form must reproduce the
013:         *    above copyright notice, this list of conditions and the
014:         *    following disclaimer in the documentation and/or other
015:         *    materials provided with the distribution.
016:         *
017:         * 3. The name "JDBM" must not be used to endorse or promote
018:         *    products derived from this Software without prior written
019:         *    permission of Cees de Groot.  For written permission,
020:         *    please contact cg@cdegroot.com.
021:         *
022:         * 4. Products derived from this Software may not be called "JDBM"
023:         *    nor may "JDBM" appear in their names without prior written
024:         *    permission of Cees de Groot.
025:         *
026:         * 5. Due credit should be given to the JDBM Project
027:         *    (http://jdbm.sourceforge.net/).
028:         *
029:         * THIS SOFTWARE IS PROVIDED BY THE JDBM PROJECT AND CONTRIBUTORS
030:         * ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT
031:         * NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
032:         * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL
033:         * CEES DE GROOT OR ANY CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
034:         * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
035:         * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
036:         * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
037:         * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
038:         * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
039:         * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
040:         * OF THE POSSIBILITY OF SUCH DAMAGE.
041:         *
042:         * Copyright 2000 (C) Cees de Groot. All Rights Reserved.
043:         * Contributions are Copyright (C) 2000 by their associated contributors.
044:         *
045:         */package jdbm.htree;
046:
047:        import jdbm.RecordManager;
048:
049:        import jdbm.helper.FastIterator;
050:        import jdbm.helper.IterationException;
051:
052:        import java.io.Externalizable;
053:        import java.io.IOException;
054:        import java.io.ObjectInput;
055:        import java.io.ObjectOutput;
056:
057:        import java.util.ArrayList;
058:        import java.util.Iterator;
059:
060:        /**
061:         *  Hashtable directory page.
062:         *
063:         *  @author <a href="mailto:boisvert@exoffice.com">Alex Boisvert</a>
064:         *  @version $Id: HashDirectory.java,v 1.5 2005/06/25 23:12:32 doomdark Exp $
065:         */
066:        final class HashDirectory extends HashNode implements  Externalizable {
067:
068:            static final long serialVersionUID = 1L;
069:
070:            /**
071:             * Maximum number of children in a directory.
072:             *
073:             * (Must be a power of 2 -- if you update this value, you must also
074:             *  update BIT_SIZE and MAX_DEPTH.)
075:             */
076:            static final int MAX_CHILDREN = 256;
077:
078:            /**
079:             * Number of significant bits per directory level.
080:             */
081:            static final int BIT_SIZE = 8; // log2(256) = 8
082:
083:            /**
084:             * Maximum number of levels (zero-based)
085:             *
086:             * (4 * 8 bits = 32 bits, which is the size of an "int", and as
087:             *  you know, hashcodes in Java are "ints")
088:             */
089:            static final int MAX_DEPTH = 3; // 4 levels
090:
091:            /**
092:             * Record ids of children pages.
093:             */
094:            private long[] _children;
095:
096:            /**
097:             * Depth of this directory page, zero-based
098:             */
099:            private byte _depth;
100:
101:            /**
102:             * PageManager used to persist changes in directory and buckets
103:             */
104:            private transient RecordManager _recman;
105:
106:            /**
107:             * This directory's record ID in the PageManager.  (transient)
108:             */
109:            private transient long _recid;
110:
111:            /**
112:             * Public constructor used by serialization
113:             */
114:            public HashDirectory() {
115:                // empty
116:            }
117:
118:            /**
119:             * Construct a HashDirectory
120:             *
121:             * @param depth Depth of this directory page.
122:             */
123:            HashDirectory(byte depth) {
124:                _depth = depth;
125:                _children = new long[MAX_CHILDREN];
126:            }
127:
128:            /**
129:             * Sets persistence context.  This method must be called before any
130:             * persistence-related operation.
131:             *
132:             * @param recman RecordManager which stores this directory
133:             * @param recid Record id of this directory.
134:             */
135:            void setPersistenceContext(RecordManager recman, long recid) {
136:                this ._recman = recman;
137:                this ._recid = recid;
138:            }
139:
140:            /**
141:             * Get the record identifier used to load this hashtable.
142:             */
143:            long getRecid() {
144:                return _recid;
145:            }
146:
147:            /**
148:             * Returns whether or not this directory is empty.  A directory
149:             * is empty when it no longer contains buckets or sub-directories.
150:             */
151:            boolean isEmpty() {
152:                for (int i = 0; i < _children.length; i++) {
153:                    if (_children[i] != 0) {
154:                        return false;
155:                    }
156:                }
157:                return true;
158:            }
159:
160:            /**
161:             * Returns the value which is associated with the given key. Returns
162:             * <code>null</code> if there is not association for this key.
163:             *
164:             * @param key key whose associated value is to be returned
165:             */
166:            Object get(Object key) throws IOException {
167:                int hash = hashCode(key);
168:                long child_recid = _children[hash];
169:                if (child_recid == 0) {
170:                    // not bucket/page --> not found
171:                    return null;
172:                } else {
173:                    HashNode node = (HashNode) _recman.fetch(child_recid);
174:                    // System.out.println("HashDirectory.get() child is : "+node);
175:
176:                    if (node instanceof  HashDirectory) {
177:                        // recurse into next directory level
178:                        HashDirectory dir = (HashDirectory) node;
179:                        dir.setPersistenceContext(_recman, child_recid);
180:                        return dir.get(key);
181:                    } else {
182:                        // node is a bucket
183:                        HashBucket bucket = (HashBucket) node;
184:                        return bucket.getValue(key);
185:                    }
186:                }
187:            }
188:
189:            /**
190:             * Associates the specified value with the specified key.
191:             *
192:             * @param key key with which the specified value is to be assocated.
193:             * @param value value to be associated with the specified key.
194:             * @return object which was previously associated with the given key,
195:             *          or <code>null</code> if no association existed.
196:             */
197:            Object put(Object key, Object value) throws IOException {
198:                if (value == null) {
199:                    return remove(key);
200:                }
201:                int hash = hashCode(key);
202:                long child_recid = _children[hash];
203:                if (child_recid == 0) {
204:                    // no bucket/page here yet, let's create a bucket
205:                    HashBucket bucket = new HashBucket(_depth + 1);
206:
207:                    // insert (key,value) pair in bucket
208:                    Object existing = bucket.addElement(key, value);
209:
210:                    long b_recid = _recman.insert(bucket);
211:                    _children[hash] = b_recid;
212:
213:                    _recman.update(_recid, this );
214:
215:                    // System.out.println("Added: "+bucket);
216:                    return existing;
217:                } else {
218:                    HashNode node = (HashNode) _recman.fetch(child_recid);
219:
220:                    if (node instanceof  HashDirectory) {
221:                        // recursive insert in next directory level
222:                        HashDirectory dir = (HashDirectory) node;
223:                        dir.setPersistenceContext(_recman, child_recid);
224:                        return dir.put(key, value);
225:                    } else {
226:                        // node is a bucket
227:                        HashBucket bucket = (HashBucket) node;
228:                        if (bucket.hasRoom()) {
229:                            Object existing = bucket.addElement(key, value);
230:                            _recman.update(child_recid, bucket);
231:                            // System.out.println("Added: "+bucket);
232:                            return existing;
233:                        } else {
234:                            // overflow, so create a new directory
235:                            if (_depth == MAX_DEPTH) {
236:                                throw new RuntimeException(
237:                                        "Cannot create deeper directory. "
238:                                                + "Depth=" + _depth);
239:                            }
240:                            HashDirectory dir = new HashDirectory(
241:                                    (byte) (_depth + 1));
242:                            long dir_recid = _recman.insert(dir);
243:                            dir.setPersistenceContext(_recman, dir_recid);
244:
245:                            _children[hash] = dir_recid;
246:                            _recman.update(_recid, this );
247:
248:                            // discard overflown bucket
249:                            _recman.delete(child_recid);
250:
251:                            // migrate existing bucket elements
252:                            ArrayList keys = bucket.getKeys();
253:                            ArrayList values = bucket.getValues();
254:                            int entries = keys.size();
255:                            for (int i = 0; i < entries; i++) {
256:                                dir.put(keys.get(i), values.get(i));
257:                            }
258:
259:                            // (finally!) insert new element
260:                            return dir.put(key, value);
261:                        }
262:                    }
263:                }
264:            }
265:
266:            /**
267:             * Remove the value which is associated with the given key.  If the
268:             * key does not exist, this method simply ignores the operation.
269:             *
270:             * @param key key whose associated value is to be removed
271:             * @return object which was associated with the given key, or
272:             *          <code>null</code> if no association existed with given key.
273:             */
274:            Object remove(Object key) throws IOException {
275:                int hash = hashCode(key);
276:                long child_recid = _children[hash];
277:                if (child_recid == 0) {
278:                    // not bucket/page --> not found
279:                    return null;
280:                } else {
281:                    HashNode node = (HashNode) _recman.fetch(child_recid);
282:                    // System.out.println("HashDirectory.remove() child is : "+node);
283:
284:                    if (node instanceof  HashDirectory) {
285:                        // recurse into next directory level
286:                        HashDirectory dir = (HashDirectory) node;
287:                        dir.setPersistenceContext(_recman, child_recid);
288:                        Object existing = dir.remove(key);
289:                        if (existing != null) {
290:                            if (dir.isEmpty()) {
291:                                // delete empty directory
292:                                _recman.delete(child_recid);
293:                                _children[hash] = 0;
294:                                _recman.update(_recid, this );
295:                            }
296:                        }
297:                        return existing;
298:                    } else {
299:                        // node is a bucket
300:                        HashBucket bucket = (HashBucket) node;
301:                        Object existing = bucket.removeElement(key);
302:                        if (existing != null) {
303:                            if (bucket.getElementCount() >= 1) {
304:                                _recman.update(child_recid, bucket);
305:                            } else {
306:                                // delete bucket, it's empty
307:                                _recman.delete(child_recid);
308:                                _children[hash] = 0;
309:                                _recman.update(_recid, this );
310:                            }
311:                        }
312:                        return existing;
313:                    }
314:                }
315:            }
316:
317:            /**
318:             * Calculates the hashcode of a key, based on the current directory
319:             * depth.
320:             */
321:            private int hashCode(Object key) {
322:                int hashMask = hashMask();
323:                int hash = key.hashCode();
324:                hash = hash & hashMask;
325:                hash = hash >>> ((MAX_DEPTH - _depth) * BIT_SIZE);
326:                hash = hash % MAX_CHILDREN;
327:                /*
328:                System.out.println("HashDirectory.hashCode() is: 0x"
329:                                   +Integer.toHexString(hash)
330:                                   +" for object hashCode() 0x"
331:                                   +Integer.toHexString(key.hashCode()));
332:                 */
333:                return hash;
334:            }
335:
336:            /**
337:             * Calculates the hashmask of this directory.  The hashmask is the
338:             * bit mask applied to a hashcode to retain only bits that are
339:             * relevant to this directory level.
340:             */
341:            int hashMask() {
342:                int bits = MAX_CHILDREN - 1;
343:                int hashMask = bits << ((MAX_DEPTH - _depth) * BIT_SIZE);
344:                /*
345:                System.out.println("HashDirectory.hashMask() is: 0x"
346:                                   +Integer.toHexString(hashMask));
347:                 */
348:                return hashMask;
349:            }
350:
351:            /**
352:             * Returns an enumeration of the keys contained in this
353:             */
354:            FastIterator keys() throws IOException {
355:                return new HDIterator(true);
356:            }
357:
358:            /**
359:             * Returns an enumeration of the values contained in this
360:             */
361:            FastIterator values() throws IOException {
362:                return new HDIterator(false);
363:            }
364:
365:            /**
366:             * Implement Externalizable interface
367:             */
368:            public void writeExternal(ObjectOutput out) throws IOException {
369:                out.writeByte(_depth);
370:                out.writeObject(_children);
371:            }
372:
373:            /**
374:             * Implement Externalizable interface
375:             */
376:            public synchronized void readExternal(ObjectInput in)
377:                    throws IOException, ClassNotFoundException {
378:                _depth = in.readByte();
379:                _children = (long[]) in.readObject();
380:            }
381:
382:            ////////////////////////////////////////////////////////////////////////
383:            // INNER CLASS
384:            ////////////////////////////////////////////////////////////////////////
385:
386:            /**
387:             * Utility class to enumerate keys/values in a HTree
388:             */
389:            public class HDIterator extends FastIterator {
390:
391:                /**
392:                 * True if we're iterating on keys, False if enumerating on values.
393:                 */
394:                private boolean _iterateKeys;
395:
396:                /**
397:                 * Stacks of directories & last enumerated child position
398:                 */
399:                private ArrayList _dirStack;
400:                private ArrayList _childStack;
401:
402:                /**
403:                 * Current HashDirectory in the hierarchy
404:                 */
405:                private HashDirectory _dir;
406:
407:                /**
408:                 * Current child position
409:                 */
410:                private int _child;
411:
412:                /**
413:                 * Current bucket iterator
414:                 */
415:                private Iterator _iter;
416:
417:                /**
418:                 * Construct an iterator on this directory.
419:                 *
420:                 * @param iterateKeys True if iteration supplies keys, False
421:                 *                  if iterateKeys supplies values.
422:                 */
423:                HDIterator(boolean iterateKeys) throws IOException {
424:                    _dirStack = new ArrayList();
425:                    _childStack = new ArrayList();
426:                    _dir = HashDirectory.this ;
427:                    _child = -1;
428:                    _iterateKeys = iterateKeys;
429:
430:                    prepareNext();
431:                }
432:
433:                /**
434:                 * Returns the next object.
435:                 */
436:                public Object next() {
437:                    Object next = null;
438:                    if (_iter != null && _iter.hasNext()) {
439:                        next = _iter.next();
440:                    } else {
441:                        try {
442:                            prepareNext();
443:                        } catch (IOException except) {
444:                            throw new IterationException(except);
445:                        }
446:                        if (_iter != null && _iter.hasNext()) {
447:                            return next();
448:                        }
449:                    }
450:                    return next;
451:                }
452:
453:                /**
454:                 * Prepare internal state so we can answer <code>hasMoreElements</code>
455:                 *
456:                 * Actually, this code prepares an Enumeration on the next
457:                 * Bucket to enumerate.   If no following bucket is found,
458:                 * the next Enumeration is set to <code>null</code>.
459:                 */
460:                private void prepareNext() throws IOException {
461:                    long child_recid = 0;
462:
463:                    // find next bucket/directory to enumerate
464:                    do {
465:                        _child++;
466:                        if (_child >= MAX_CHILDREN) {
467:
468:                            if (_dirStack.isEmpty()) {
469:                                // no more directory in the stack, we're finished
470:                                return;
471:                            }
472:
473:                            // try next page
474:                            _dir = (HashDirectory) _dirStack.remove(_dirStack
475:                                    .size() - 1);
476:                            _child = ((Integer) _childStack.remove(_childStack
477:                                    .size() - 1)).intValue();
478:                            continue;
479:                        }
480:                        child_recid = _dir._children[_child];
481:                    } while (child_recid == 0);
482:
483:                    if (child_recid == 0) {
484:                        throw new Error("child_recid cannot be 0");
485:                    }
486:
487:                    HashNode node = (HashNode) _recman.fetch(child_recid);
488:                    // System.out.println("HDEnumeration.get() child is : "+node);
489:
490:                    if (node instanceof  HashDirectory) {
491:                        // save current position
492:                        _dirStack.add(_dir);
493:                        _childStack.add(new Integer(_child));
494:
495:                        _dir = (HashDirectory) node;
496:                        _child = -1;
497:
498:                        // recurse into
499:                        _dir.setPersistenceContext(_recman, child_recid);
500:                        prepareNext();
501:                    } else {
502:                        // node is a bucket
503:                        HashBucket bucket = (HashBucket) node;
504:                        if (_iterateKeys) {
505:                            _iter = bucket.getKeys().iterator();
506:                        } else {
507:                            _iter = bucket.getValues().iterator();
508:                        }
509:                    }
510:                }
511:            }
512:
513:        }
www.java2java.com | Contact Us
Copyright 2009 - 12 Demo Source and Support. All rights reserved.
All other trademarks are property of their respective owners.