001: /*
002: * Primitive Collections for Java.
003: * Copyright (C) 2002, 2003 Søren Bak
004: *
005: * This library is free software; you can redistribute it and/or
006: * modify it under the terms of the GNU Lesser General Public
007: * License as published by the Free Software Foundation; either
008: * version 2.1 of the License, or (at your option) any later version.
009: *
010: * This library is distributed in the hope that it will be useful,
011: * but WITHOUT ANY WARRANTY; without even the implied warranty of
012: * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
013: * Lesser General Public License for more details.
014: *
015: * You should have received a copy of the GNU Lesser General Public
016: * License along with this library; if not, write to the Free Software
017: * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
018: */
019: package bak.pcj.map;
020:
021: import bak.pcj.DoubleIterator;
022: import bak.pcj.AbstractDoubleCollection;
023: import bak.pcj.set.AbstractDoubleSet;
024: import bak.pcj.set.DoubleSet;
025: import bak.pcj.hash.DoubleHashFunction;
026: import bak.pcj.hash.DefaultDoubleHashFunction;
027: import bak.pcj.util.Exceptions;
028:
029: import java.util.Collection;
030: import java.util.AbstractCollection;
031: import java.util.Iterator;
032:
033: import java.io.Serializable;
034: import java.io.IOException;
035: import java.io.ObjectInputStream;
036: import java.io.ObjectOutputStream;
037:
038: /**
039: * This class represents open addressing hash table based maps from
040: * double values to objects.
041: *
042: * @see DoubleKeyChainedHashMap
043: * @see java.util.Map
044: *
045: * @author Søren Bak
046: * @version 1.3 21-08-2003 19:45
047: * @since 1.0
048: */
049: public class DoubleKeyOpenHashMap extends AbstractDoubleKeyMap
050: implements DoubleKeyMap, Cloneable, Serializable {
051:
052: /** Constant indicating relative growth policy. */
053: private static final int GROWTH_POLICY_RELATIVE = 0;
054:
055: /** Constant indicating absolute growth policy. */
056: private static final int GROWTH_POLICY_ABSOLUTE = 1;
057:
058: /**
059: * The default growth policy of this map.
060: * @see #GROWTH_POLICY_RELATIVE
061: * @see #GROWTH_POLICY_ABSOLUTE
062: */
063: private static final int DEFAULT_GROWTH_POLICY = GROWTH_POLICY_RELATIVE;
064:
065: /** The default factor with which to increase the capacity of this map. */
066: public static final double DEFAULT_GROWTH_FACTOR = 1.0;
067:
068: /** The default chunk size with which to increase the capacity of this map. */
069: public static final int DEFAULT_GROWTH_CHUNK = 10;
070:
071: /** The default capacity of this map. */
072: public static final int DEFAULT_CAPACITY = 11;
073:
074: /** The default load factor of this map. */
075: public static final double DEFAULT_LOAD_FACTOR = 0.75;
076:
077: /**
078: * The hash function used to hash keys in this map.
079: * @serial
080: */
081: private DoubleHashFunction keyhash;
082:
083: /**
084: * The size of this map.
085: * @serial
086: */
087: private int size;
088:
089: /**
090: * The keys of this map. Contains key values directly.
091: * Due to the use of a secondary hash function, the length of this
092: * array must be a prime.
093: */
094: private transient double[] keys;
095:
096: /**
097: * The values of this map. Contains values directly.
098: * Due to the use of a secondary hash function, the length of this
099: * array must be a prime.
100: */
101: private transient Object[] values;
102:
103: /** The states of each cell in the keys[] and values[]. */
104: private transient byte[] states;
105:
106: private static final byte EMPTY = 0;
107: private static final byte OCCUPIED = 1;
108: private static final byte REMOVED = 2;
109:
110: /** The number of entries in use (removed or occupied). */
111: private transient int used;
112:
113: /**
114: * The growth policy of this map (0 is relative growth, 1 is absolute growth).
115: * @serial
116: */
117: private int growthPolicy;
118:
119: /**
120: * The growth factor of this map, if the growth policy is
121: * relative.
122: * @serial
123: */
124: private double growthFactor;
125:
126: /**
127: * The growth chunk size of this map, if the growth policy is
128: * absolute.
129: * @serial
130: */
131: private int growthChunk;
132:
133: /**
134: * The load factor of this map.
135: * @serial
136: */
137: private double loadFactor;
138:
139: /**
140: * The next size at which to expand the data[].
141: * @serial
142: */
143: private int expandAt;
144:
145: /** A set view of the keys of this map. */
146: private transient DoubleSet ckeys;
147:
148: /** A collection view of the values of this map. */
149: private transient Collection cvalues;
150:
151: private DoubleKeyOpenHashMap(DoubleHashFunction keyhash,
152: int capacity, int growthPolicy, double growthFactor,
153: int growthChunk, double loadFactor) {
154: if (keyhash == null)
155: Exceptions.nullArgument("hash function");
156: if (capacity < 0)
157: Exceptions.negativeArgument("capacity", String
158: .valueOf(capacity));
159: if (growthFactor <= 0.0)
160: Exceptions.negativeOrZeroArgument("growthFactor", String
161: .valueOf(growthFactor));
162: if (growthChunk <= 0)
163: Exceptions.negativeOrZeroArgument("growthChunk", String
164: .valueOf(growthChunk));
165: if (loadFactor <= 0.0)
166: Exceptions.negativeOrZeroArgument("loadFactor", String
167: .valueOf(loadFactor));
168: this .keyhash = keyhash;
169: capacity = bak.pcj.hash.Primes.nextPrime(capacity);
170: keys = new double[capacity];
171: values = new Object[capacity];
172: this .states = new byte[capacity];
173: size = 0;
174: expandAt = (int) Math.round(loadFactor * capacity);
175: this .used = 0;
176: this .growthPolicy = growthPolicy;
177: this .growthFactor = growthFactor;
178: this .growthChunk = growthChunk;
179: this .loadFactor = loadFactor;
180: }
181:
182: private DoubleKeyOpenHashMap(int capacity, int growthPolicy,
183: double growthFactor, int growthChunk, double loadFactor) {
184: this (DefaultDoubleHashFunction.INSTANCE, capacity,
185: growthPolicy, growthFactor, growthChunk, loadFactor);
186: }
187:
188: /**
189: * Creates a new hash map with capacity 11, a relative
190: * growth factor of 1.0, and a load factor of 75%.
191: */
192: public DoubleKeyOpenHashMap() {
193: this (DEFAULT_CAPACITY);
194: }
195:
196: /**
197: * Creates a new hash map with the same mappings as a specified map.
198: *
199: * @param map
200: * the map whose mappings to put into the new map.
201: *
202: * @throws NullPointerException
203: * if <tt>map</tt> is <tt>null</tt>.
204: */
205: public DoubleKeyOpenHashMap(DoubleKeyMap map) {
206: this ();
207: putAll(map);
208: }
209:
210: /**
211: * Creates a new hash map with a specified capacity, a relative
212: * growth factor of 1.0, and a load factor of 75%.
213: *
214: * @param capacity
215: * the initial capacity of the map.
216: *
217: * @throws IllegalArgumentException
218: * if <tt>capacity</tt> is negative.
219: */
220: public DoubleKeyOpenHashMap(int capacity) {
221: this (capacity, DEFAULT_GROWTH_POLICY, DEFAULT_GROWTH_FACTOR,
222: DEFAULT_GROWTH_CHUNK, DEFAULT_LOAD_FACTOR);
223: }
224:
225: /**
226: * Creates a new hash map with a capacity of 11, a relative
227: * growth factor of 1.0, and a specified load factor.
228: *
229: * @param loadFactor
230: * the load factor of the map.
231: *
232: * @throws IllegalArgumentException
233: * if <tt>capacity</tt> is negative;
234: * if <tt>loadFactor</tt> is not positive.
235: */
236: public DoubleKeyOpenHashMap(double loadFactor) {
237: this (DEFAULT_CAPACITY, DEFAULT_GROWTH_POLICY,
238: DEFAULT_GROWTH_FACTOR, DEFAULT_GROWTH_CHUNK, loadFactor);
239: }
240:
241: /**
242: * Creates a new hash map with a specified capacity and
243: * load factor, and a relative growth factor of 1.0.
244: *
245: * @param capacity
246: * the initial capacity of the map.
247: *
248: * @param loadFactor
249: * the load factor of the map.
250: *
251: * @throws IllegalArgumentException
252: * if <tt>capacity</tt> is negative;
253: * if <tt>loadFactor</tt> is not positive.
254: */
255: public DoubleKeyOpenHashMap(int capacity, double loadFactor) {
256: this (capacity, DEFAULT_GROWTH_POLICY, DEFAULT_GROWTH_FACTOR,
257: DEFAULT_GROWTH_CHUNK, loadFactor);
258: }
259:
260: /**
261: * Creates a new hash map with a specified capacity,
262: * load factor, and relative growth factor.
263: *
264: * <p>The map capacity increases to <tt>capacity()*(1+growthFactor)</tt>.
265: * This strategy is good for avoiding many capacity increases, but
266: * the amount of wasted memory is approximately the size of the map.
267: *
268: * @param capacity
269: * the initial capacity of the map.
270: *
271: * @param loadFactor
272: * the load factor of the map.
273: *
274: * @param growthFactor
275: * the relative amount with which to increase the
276: * the capacity when a capacity increase is needed.
277: *
278: * @throws IllegalArgumentException
279: * if <tt>capacity</tt> is negative;
280: * if <tt>loadFactor</tt> is not positive;
281: * if <tt>growthFactor</tt> is not positive.
282: */
283: public DoubleKeyOpenHashMap(int capacity, double loadFactor,
284: double growthFactor) {
285: this (capacity, GROWTH_POLICY_RELATIVE, growthFactor,
286: DEFAULT_GROWTH_CHUNK, loadFactor);
287: }
288:
289: /**
290: * Creates a new hash map with a specified capacity,
291: * load factor, and absolute growth factor.
292: *
293: * <p>The map capacity increases to <tt>capacity()+growthChunk</tt>.
294: * This strategy is good for avoiding wasting memory. However, an
295: * overhead is potentially introduced by frequent capacity increases.
296: *
297: * @param capacity
298: * the initial capacity of the map.
299: *
300: * @param loadFactor
301: * the load factor of the map.
302: *
303: * @param growthChunk
304: * the absolute amount with which to increase the
305: * the capacity when a capacity increase is needed.
306: *
307: * @throws IllegalArgumentException
308: * if <tt>capacity</tt> is negative;
309: * if <tt>loadFactor</tt> is not positive;
310: * if <tt>growthChunk</tt> is not positive.
311: */
312: public DoubleKeyOpenHashMap(int capacity, double loadFactor,
313: int growthChunk) {
314: this (capacity, GROWTH_POLICY_ABSOLUTE, DEFAULT_GROWTH_FACTOR,
315: growthChunk, loadFactor);
316: }
317:
318: // ---------------------------------------------------------------
319: // Constructors with hash function argument
320: // ---------------------------------------------------------------
321:
322: /**
323: * Creates a new hash map with capacity 11, a relative
324: * growth factor of 1.0, and a load factor of 75%.
325: *
326: * @param keyhash
327: * the hash function to use when hashing keys.
328: *
329: * @throws NullPointerException
330: * if <tt>keyhash</tt> is <tt>null</tt>.
331: */
332: public DoubleKeyOpenHashMap(DoubleHashFunction keyhash) {
333: this (keyhash, DEFAULT_CAPACITY, DEFAULT_GROWTH_POLICY,
334: DEFAULT_GROWTH_FACTOR, DEFAULT_GROWTH_CHUNK,
335: DEFAULT_LOAD_FACTOR);
336: }
337:
338: /**
339: * Creates a new hash map with a specified capacity, a relative
340: * growth factor of 1.0, and a load factor of 75%.
341: *
342: * @param keyhash
343: * the hash function to use when hashing keys.
344: *
345: * @param capacity
346: * the initial capacity of the map.
347: *
348: * @throws IllegalArgumentException
349: * if <tt>capacity</tt> is negative.
350: *
351: * @throws NullPointerException
352: * if <tt>keyhash</tt> is <tt>null</tt>.
353: */
354: public DoubleKeyOpenHashMap(DoubleHashFunction keyhash, int capacity) {
355: this (keyhash, capacity, DEFAULT_GROWTH_POLICY,
356: DEFAULT_GROWTH_FACTOR, DEFAULT_GROWTH_CHUNK,
357: DEFAULT_LOAD_FACTOR);
358: }
359:
360: /**
361: * Creates a new hash map with a capacity of 11, a relative
362: * growth factor of 1.0, and a specified load factor.
363: *
364: * @param keyhash
365: * the hash function to use when hashing keys.
366: *
367: * @param loadFactor
368: * the load factor of the map.
369: *
370: * @throws IllegalArgumentException
371: * if <tt>capacity</tt> is negative;
372: * if <tt>loadFactor</tt> is not positive.
373: *
374: * @throws NullPointerException
375: * if <tt>keyhash</tt> is <tt>null</tt>.
376: */
377: public DoubleKeyOpenHashMap(DoubleHashFunction keyhash,
378: double loadFactor) {
379: this (keyhash, DEFAULT_CAPACITY, DEFAULT_GROWTH_POLICY,
380: DEFAULT_GROWTH_FACTOR, DEFAULT_GROWTH_CHUNK, loadFactor);
381: }
382:
383: /**
384: * Creates a new hash map with a specified capacity and
385: * load factor, and a relative growth factor of 1.0.
386: *
387: * @param keyhash
388: * the hash function to use when hashing keys.
389: *
390: * @param capacity
391: * the initial capacity of the map.
392: *
393: * @param loadFactor
394: * the load factor of the map.
395: *
396: * @throws IllegalArgumentException
397: * if <tt>capacity</tt> is negative;
398: * if <tt>loadFactor</tt> is not positive.
399: *
400: * @throws NullPointerException
401: * if <tt>keyhash</tt> is <tt>null</tt>.
402: */
403: public DoubleKeyOpenHashMap(DoubleHashFunction keyhash,
404: int capacity, double loadFactor) {
405: this (keyhash, capacity, DEFAULT_GROWTH_POLICY,
406: DEFAULT_GROWTH_FACTOR, DEFAULT_GROWTH_CHUNK, loadFactor);
407: }
408:
409: /**
410: * Creates a new hash map with a specified capacity,
411: * load factor, and relative growth factor.
412: *
413: * <p>The map capacity increases to <tt>capacity()*(1+growthFactor)</tt>.
414: * This strategy is good for avoiding many capacity increases, but
415: * the amount of wasted memory is approximately the size of the map.
416: *
417: * @param keyhash
418: * the hash function to use when hashing keys.
419: *
420: * @param capacity
421: * the initial capacity of the map.
422: *
423: * @param loadFactor
424: * the load factor of the map.
425: *
426: * @param growthFactor
427: * the relative amount with which to increase the
428: * the capacity when a capacity increase is needed.
429: *
430: * @throws IllegalArgumentException
431: * if <tt>capacity</tt> is negative;
432: * if <tt>loadFactor</tt> is not positive;
433: * if <tt>growthFactor</tt> is not positive.
434: *
435: * @throws NullPointerException
436: * if <tt>keyhash</tt> is <tt>null</tt>.
437: */
438: public DoubleKeyOpenHashMap(DoubleHashFunction keyhash,
439: int capacity, double loadFactor, double growthFactor) {
440: this (keyhash, capacity, GROWTH_POLICY_RELATIVE, growthFactor,
441: DEFAULT_GROWTH_CHUNK, loadFactor);
442: }
443:
444: /**
445: * Creates a new hash map with a specified capacity,
446: * load factor, and absolute growth factor.
447: *
448: * <p>The map capacity increases to <tt>capacity()+growthChunk</tt>.
449: * This strategy is good for avoiding wasting memory. However, an
450: * overhead is potentially introduced by frequent capacity increases.
451: *
452: * @param keyhash
453: * the hash function to use when hashing keys.
454: *
455: * @param capacity
456: * the initial capacity of the map.
457: *
458: * @param loadFactor
459: * the load factor of the map.
460: *
461: * @param growthChunk
462: * the absolute amount with which to increase the
463: * the capacity when a capacity increase is needed.
464: *
465: * @throws IllegalArgumentException
466: * if <tt>capacity</tt> is negative;
467: * if <tt>loadFactor</tt> is not positive;
468: * if <tt>growthChunk</tt> is not positive.
469: *
470: * @throws NullPointerException
471: * if <tt>keyhash</tt> is <tt>null</tt>.
472: */
473: public DoubleKeyOpenHashMap(DoubleHashFunction keyhash,
474: int capacity, double loadFactor, int growthChunk) {
475: this (keyhash, capacity, GROWTH_POLICY_ABSOLUTE,
476: DEFAULT_GROWTH_FACTOR, growthChunk, loadFactor);
477: }
478:
479: // ---------------------------------------------------------------
480: // Hash table management
481: // ---------------------------------------------------------------
482:
483: private void ensureCapacity(int elements) {
484: if (elements >= expandAt) {
485: int newcapacity;
486: if (growthPolicy == GROWTH_POLICY_RELATIVE)
487: newcapacity = (int) (keys.length * (1.0 + growthFactor));
488: else
489: newcapacity = keys.length + growthChunk;
490: if (newcapacity * loadFactor < elements)
491: newcapacity = (int) Math
492: .round(((double) elements / loadFactor));
493: newcapacity = bak.pcj.hash.Primes.nextPrime(newcapacity);
494: expandAt = (int) Math.round(loadFactor * newcapacity);
495:
496: double[] newkeys = new double[newcapacity];
497: Object[] newvalues = new Object[newcapacity];
498: byte[] newstates = new byte[newcapacity];
499:
500: used = 0;
501: // re-hash
502: for (int i = 0; i < keys.length; i++) {
503: if (states[i] == OCCUPIED) {
504: used++;
505: double k = keys[i];
506: Object v = values[i];
507: // first hash
508: int h = Math.abs(keyhash.hash(k));
509: int n = h % newcapacity;
510: if (newstates[n] == OCCUPIED) {
511: // second hash
512: int c = 1 + (h % (newcapacity - 2));
513: for (;;) {
514: n -= c;
515: if (n < 0)
516: n += newcapacity;
517: if (newstates[n] == EMPTY)
518: break;
519: }
520: }
521: newstates[n] = OCCUPIED;
522: newvalues[n] = v;
523: newkeys[n] = k;
524: }
525: }
526:
527: keys = newkeys;
528: values = newvalues;
529: states = newstates;
530: }
531: }
532:
533: // ---------------------------------------------------------------
534: // Operations not supported by abstract implementation
535: // ---------------------------------------------------------------
536:
537: public DoubleSet keySet() {
538: if (ckeys == null)
539: ckeys = new KeySet();
540: return ckeys;
541: }
542:
543: public Object put(double key, Object value) {
544: Object result;
545:
546: // first hash
547: int h = Math.abs(keyhash.hash(key));
548: int i = h % keys.length;
549: if (states[i] == OCCUPIED) {
550: if (keys[i] == key) {
551: Object oldValue = values[i];
552: values[i] = value;
553: return oldValue;
554: }
555: // second hash
556: int c = 1 + (h % (keys.length - 2));
557: for (;;) {
558: i -= c;
559: if (i < 0)
560: i += keys.length;
561: // Empty entries are re-used
562: if (states[i] == EMPTY || states[i] == REMOVED)
563: break;
564: if (states[i] == OCCUPIED && keys[i] == key) {
565: Object oldValue = values[i];
566: values[i] = value;
567: return oldValue;
568: }
569: }
570: }
571:
572: if (states[i] == EMPTY)
573: used++;
574: states[i] = OCCUPIED;
575: keys[i] = key;
576: values[i] = value;
577: size++;
578: ensureCapacity(used);
579: return null;
580: }
581:
582: public Collection values() {
583: if (cvalues == null)
584: cvalues = new ValueCollection();
585: return cvalues;
586: }
587:
588: /**
589: * Returns a clone of this hash map.
590: *
591: * @return a clone of this hash map.
592: *
593: * @since 1.1
594: */
595: public Object clone() {
596: try {
597: DoubleKeyOpenHashMap c = (DoubleKeyOpenHashMap) super
598: .clone();
599: c.keys = new double[keys.length];
600: System.arraycopy(keys, 0, c.keys, 0, keys.length);
601: c.values = new Object[values.length];
602: System.arraycopy(values, 0, c.values, 0, values.length);
603: c.states = new byte[states.length];
604: System.arraycopy(states, 0, c.states, 0, states.length);
605: // The views should not refer to this map's views
606: c.cvalues = null;
607: c.ckeys = null;
608: return c;
609: } catch (CloneNotSupportedException e) {
610: Exceptions.cloning();
611: return null;
612: }
613: }
614:
615: public DoubleKeyMapIterator entries() {
616: return new DoubleKeyMapIterator() {
617: int nextEntry = nextEntry(0);
618: int lastEntry = -1;
619:
620: int nextEntry(int index) {
621: while (index < keys.length && states[index] != OCCUPIED)
622: index++;
623: return index;
624: }
625:
626: public boolean hasNext() {
627: return nextEntry < keys.length;
628: }
629:
630: public void next() {
631: if (!hasNext())
632: Exceptions.endOfIterator();
633: lastEntry = nextEntry;
634: nextEntry = nextEntry(nextEntry + 1);
635: }
636:
637: public void remove() {
638: if (lastEntry == -1)
639: Exceptions.noElementToRemove();
640: states[lastEntry] = REMOVED;
641: values[lastEntry] = null; // GC
642: size--;
643: lastEntry = -1;
644: }
645:
646: public double getKey() {
647: if (lastEntry == -1)
648: Exceptions.noElementToGet();
649: return keys[lastEntry];
650: }
651:
652: public Object getValue() {
653: if (lastEntry == -1)
654: Exceptions.noElementToGet();
655: return values[lastEntry];
656: }
657: };
658: }
659:
660: private class KeySet extends AbstractDoubleSet {
661:
662: public void clear() {
663: DoubleKeyOpenHashMap.this .clear();
664: }
665:
666: public boolean contains(double v) {
667: return containsKey(v);
668: }
669:
670: public DoubleIterator iterator() {
671: return new DoubleIterator() {
672: int nextEntry = nextEntry(0);
673: int lastEntry = -1;
674:
675: int nextEntry(int index) {
676: while (index < keys.length
677: && states[index] != OCCUPIED)
678: index++;
679: return index;
680: }
681:
682: public boolean hasNext() {
683: return nextEntry < keys.length;
684: }
685:
686: public double next() {
687: if (!hasNext())
688: Exceptions.endOfIterator();
689: lastEntry = nextEntry;
690: nextEntry = nextEntry(nextEntry + 1);
691: return keys[lastEntry];
692: }
693:
694: public void remove() {
695: if (lastEntry == -1)
696: Exceptions.noElementToRemove();
697: states[lastEntry] = REMOVED;
698: values[lastEntry] = null; // GC
699: size--;
700: lastEntry = -1;
701: }
702: };
703: }
704:
705: public boolean remove(double v) {
706: boolean result = containsKey(v);
707: if (result)
708: DoubleKeyOpenHashMap.this .remove(v);
709: return result;
710: }
711:
712: public int size() {
713: return size;
714: }
715:
716: }
717:
718: private class ValueCollection extends AbstractCollection {
719:
720: public void clear() {
721: DoubleKeyOpenHashMap.this .clear();
722: }
723:
724: public boolean contains(Object v) {
725: return containsValue(v);
726: }
727:
728: public Iterator iterator() {
729: return new Iterator() {
730: int nextEntry = nextEntry(0);
731: int lastEntry = -1;
732:
733: int nextEntry(int index) {
734: while (index < keys.length
735: && states[index] != OCCUPIED)
736: index++;
737: return index;
738: }
739:
740: public boolean hasNext() {
741: return nextEntry < keys.length;
742: }
743:
744: public Object next() {
745: if (!hasNext())
746: Exceptions.endOfIterator();
747: lastEntry = nextEntry;
748: nextEntry = nextEntry(nextEntry + 1);
749: return values[lastEntry];
750: }
751:
752: public void remove() {
753: if (lastEntry == -1)
754: Exceptions.noElementToRemove();
755: states[lastEntry] = REMOVED;
756: values[lastEntry] = null; // GC
757: size--;
758: lastEntry = -1;
759: }
760: };
761: }
762:
763: public int size() {
764: return size;
765: }
766:
767: }
768:
769: // ---------------------------------------------------------------
770: // Operations overwritten for efficiency
771: // ---------------------------------------------------------------
772:
773: public void clear() {
774: java.util.Arrays.fill(states, EMPTY);
775: java.util.Arrays.fill(values, null); // GC
776: size = 0;
777: used = 0;
778: }
779:
780: public boolean containsKey(double key) {
781: int h = Math.abs(keyhash.hash(key));
782: int i = h % keys.length;
783: if (states[i] != EMPTY) {
784: if (states[i] == OCCUPIED && keys[i] == key)
785: return true;
786:
787: // second hash
788: int c = 1 + (h % (keys.length - 2));
789: for (;;) {
790: i -= c;
791: if (i < 0)
792: i += keys.length;
793: if (states[i] == EMPTY)
794: return false;
795: if (states[i] == OCCUPIED && keys[i] == key)
796: return true;
797: }
798: }
799: return false;
800: }
801:
802: public boolean containsValue(Object value) {
803: if (value == null) {
804: for (int i = 0; i < states.length; i++)
805: if (states[i] == OCCUPIED && values[i] == null)
806: return true;
807: } else {
808: for (int i = 0; i < states.length; i++)
809: if (states[i] == OCCUPIED && value.equals(values[i]))
810: return true;
811: }
812: return false;
813: }
814:
815: public Object get(double key) {
816: int h = Math.abs(keyhash.hash(key));
817: int i = h % keys.length;
818: if (states[i] != EMPTY) {
819: if (states[i] == OCCUPIED && keys[i] == key)
820: return values[i];
821: // second hash
822:
823: int c = 1 + (h % (keys.length - 2));
824: for (;;) {
825: i -= c;
826: if (i < 0)
827: i += keys.length;
828: if (states[i] == EMPTY)
829: return null;
830: if (states[i] == OCCUPIED && keys[i] == key)
831: return values[i];
832: }
833: }
834: return null;
835: }
836:
837: public boolean isEmpty() {
838: return size == 0;
839: }
840:
841: public Object remove(double key) {
842: int h = Math.abs(keyhash.hash(key));
843: int i = h % keys.length;
844: if (states[i] != EMPTY) {
845: if (states[i] == OCCUPIED && keys[i] == key) {
846: Object oldValue = values[i];
847: values[i] = null; // GC
848: states[i] = REMOVED;
849: size--;
850: return oldValue;
851: }
852: // second hash
853: int c = 1 + (h % (keys.length - 2));
854: for (;;) {
855: i -= c;
856: if (i < 0)
857: i += keys.length;
858: if (states[i] == EMPTY) {
859: return null;
860: }
861: if (states[i] == OCCUPIED && keys[i] == key) {
862: Object oldValue = values[i];
863: values[i] = null; // GC
864: states[i] = REMOVED;
865: size--;
866: return oldValue;
867: }
868: }
869: }
870: return null;
871: }
872:
873: public int size() {
874: return size;
875: }
876:
877: // ---------------------------------------------------------------
878: // Serialization
879: // ---------------------------------------------------------------
880:
881: /**
882: * @serialData Default fields; the capacity of the
883: * map (<tt>int</tt>); the maps's entries
884: * (<tt>double</tt>, <tt>Object</tt>).
885: *
886: * @since 1.1
887: */
888: private void writeObject(ObjectOutputStream s) throws IOException {
889: s.defaultWriteObject();
890: s.writeInt(keys.length);
891: DoubleKeyMapIterator i = entries();
892: while (i.hasNext()) {
893: i.next();
894: s.writeDouble(i.getKey());
895: s.writeObject(i.getValue());
896: }
897: }
898:
899: /**
900: * @since 1.1
901: */
902: private void readObject(ObjectInputStream s) throws IOException,
903: ClassNotFoundException {
904: s.defaultReadObject();
905: keys = new double[s.readInt()];
906: states = new byte[keys.length];
907: values = new Object[keys.length];
908: used = size;
909:
910: for (int n = 0; n < size; n++) {
911: double key = s.readDouble();
912: Object value = s.readObject();
913:
914: // first hash
915: int h = Math.abs(keyhash.hash(key));
916: int i = h % keys.length;
917: if (states[i] != EMPTY) {
918: // second hash
919: int c = 1 + (h % (keys.length - 2));
920: for (;;) {
921: i -= c;
922: if (i < 0)
923: i += keys.length;
924: if (states[i] == EMPTY)
925: break;
926: }
927: }
928: states[i] = OCCUPIED;
929: keys[i] = key;
930: values[i] = value;
931: }
932: }
933:
934: }
|