Source Code Cross Referenced for DoubleChainedHashSet.java in  » Development » PCJ » bak » pcj » set » Java Source Code / Java DocumentationJava Source Code and Java Documentation

Java Source Code / Java Documentation
1. 6.0 JDK Core
2. 6.0 JDK Modules
3. 6.0 JDK Modules com.sun
4. 6.0 JDK Modules com.sun.java
5. 6.0 JDK Modules sun
6. 6.0 JDK Platform
7. Ajax
8. Apache Harmony Java SE
9. Aspect oriented
10. Authentication Authorization
11. Blogger System
12. Build
13. Byte Code
14. Cache
15. Chart
16. Chat
17. Code Analyzer
18. Collaboration
19. Content Management System
20. Database Client
21. Database DBMS
22. Database JDBC Connection Pool
23. Database ORM
24. Development
25. EJB Server geronimo
26. EJB Server GlassFish
27. EJB Server JBoss 4.2.1
28. EJB Server resin 3.1.5
29. ERP CRM Financial
30. ESB
31. Forum
32. GIS
33. Graphic Library
34. Groupware
35. HTML Parser
36. IDE
37. IDE Eclipse
38. IDE Netbeans
39. Installer
40. Internationalization Localization
41. Inversion of Control
42. Issue Tracking
43. J2EE
44. JBoss
45. JMS
46. JMX
47. Library
48. Mail Clients
49. Net
50. Parser
51. PDF
52. Portal
53. Profiler
54. Project Management
55. Report
56. RSS RDF
57. Rule Engine
58. Science
59. Scripting
60. Search Engine
61. Security
62. Sevlet Container
63. Source Control
64. Swing Library
65. Template Engine
66. Test Coverage
67. Testing
68. UML
69. Web Crawler
70. Web Framework
71. Web Mail
72. Web Server
73. Web Services
74. Web Services apache cxf 2.0.1
75. Web Services AXIS2
76. Wiki Engine
77. Workflow Engines
78. XML
79. XML UI
Java
Java Tutorial
Java Open Source
Jar File Download
Java Articles
Java Products
Java by API
Photoshop Tutorials
Maya Tutorials
Flash Tutorials
3ds-Max Tutorials
Illustrator Tutorials
GIMP Tutorials
C# / C Sharp
C# / CSharp Tutorial
C# / CSharp Open Source
ASP.Net
ASP.NET Tutorial
JavaScript DHTML
JavaScript Tutorial
JavaScript Reference
HTML / CSS
HTML CSS Reference
C / ANSI-C
C Tutorial
C++
C++ Tutorial
Ruby
PHP
Python
Python Tutorial
Python Open Source
SQL Server / T-SQL
SQL Server / T-SQL Tutorial
Oracle PL / SQL
Oracle PL/SQL Tutorial
PostgreSQL
SQL / MySQL
MySQL Tutorial
VB.Net
VB.Net Tutorial
Flash / Flex / ActionScript
VBA / Excel / Access / Word
XML
XML Tutorial
Microsoft Office PowerPoint 2007 Tutorial
Microsoft Office Excel 2007 Tutorial
Microsoft Office Word 2007 Tutorial
Java Source Code / Java Documentation » Development » PCJ » bak.pcj.set 
Source Cross Referenced  Class Diagram Java Document (Java Doc) 


001:        /*
002:         *  Primitive Collections for Java.
003:         *  Copyright (C) 2002  Søren Bak
004:         *
005:         *  This library is free software; you can redistribute it and/or
006:         *  modify it under the terms of the GNU Lesser General Public
007:         *  License as published by the Free Software Foundation; either
008:         *  version 2.1 of the License, or (at your option) any later version.
009:         *
010:         *  This library is distributed in the hope that it will be useful,
011:         *  but WITHOUT ANY WARRANTY; without even the implied warranty of
012:         *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
013:         *  Lesser General Public License for more details.
014:         *
015:         *  You should have received a copy of the GNU Lesser General Public
016:         *  License along with this library; if not, write to the Free Software
017:         *  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
018:         */
019:        package bak.pcj.set;
020:
021:        import bak.pcj.DoubleCollection;
022:        import bak.pcj.DoubleIterator;
023:        import bak.pcj.hash.DoubleHashFunction;
024:        import bak.pcj.hash.DefaultDoubleHashFunction;
025:        import bak.pcj.util.Exceptions;
026:
027:        import java.io.Serializable;
028:        import java.io.IOException;
029:        import java.io.ObjectInputStream;
030:        import java.io.ObjectOutputStream;
031:
032:        /**
033:         *  This class represents chained hash table based sets of double values.
034:         *  Unlike the Java Collections <tt>HashSet</tt> instances of this class
035:         *  are not backed up by a map. It is implemented using a simple chained
036:         *  hash table where the keys are stored directly as entries.
037:         *
038:         *  @see        DoubleOpenHashSet
039:         *  @see        java.util.HashSet
040:         *
041:         *  @author     S&oslash;ren Bak
042:         *  @version    1.4     21-08-2003 20:05
043:         *  @since      1.0
044:         */
045:        public class DoubleChainedHashSet extends AbstractDoubleSet implements 
046:                DoubleSet, Cloneable, Serializable {
047:
048:            /** Constant indicating relative growth policy. */
049:            private static final int GROWTH_POLICY_RELATIVE = 0;
050:
051:            /** Constant indicating absolute growth policy. */
052:            private static final int GROWTH_POLICY_ABSOLUTE = 1;
053:
054:            /**
055:             *  The default growth policy of this set.
056:             *  @see    #GROWTH_POLICY_RELATIVE
057:             *  @see    #GROWTH_POLICY_ABSOLUTE
058:             */
059:            private static final int DEFAULT_GROWTH_POLICY = GROWTH_POLICY_RELATIVE;
060:
061:            /** The default factor with which to increase the capacity of this set. */
062:            public static final double DEFAULT_GROWTH_FACTOR = 1.0;
063:
064:            /** The default chunk size with which to increase the capacity of this set. */
065:            public static final int DEFAULT_GROWTH_CHUNK = 10;
066:
067:            /** The default capacity of this set. */
068:            public static final int DEFAULT_CAPACITY = 11;
069:
070:            /** The default load factor of this set. */
071:            public static final double DEFAULT_LOAD_FACTOR = 0.75;
072:
073:            /** 
074:             *  The hash function used to hash keys in this set.
075:             *  @serial
076:             */
077:            private DoubleHashFunction keyhash;
078:
079:            /** 
080:             *  The size of this set.
081:             *  @serial
082:             */
083:            private int size;
084:
085:            /** The hash table backing up this set. Contains set values directly. */
086:            private transient double[][] data;
087:
088:            /**
089:             *  The growth policy of this set (0 is relative growth, 1 is absolute growth).
090:             *  @serial
091:             */
092:            private int growthPolicy;
093:
094:            /**
095:             *  The growth factor of this set, if the growth policy is
096:             *  relative.
097:             *  @serial
098:             */
099:            private double growthFactor;
100:
101:            /**
102:             *  The growth chunk size of this set, if the growth policy is
103:             *  absolute.
104:             *  @serial
105:             */
106:            private int growthChunk;
107:
108:            /**
109:             *  The load factor of this set. 
110:             *  @serial
111:             */
112:            private double loadFactor;
113:
114:            /**
115:             *  The next size at which to expand the data[].
116:             *  @serial
117:             */
118:            private int expandAt;
119:
120:            private DoubleChainedHashSet(DoubleHashFunction keyhash,
121:                    int capacity, int growthPolicy, double growthFactor,
122:                    int growthChunk, double loadFactor) {
123:                if (keyhash == null)
124:                    Exceptions.nullArgument("hash function");
125:                if (capacity < 0)
126:                    Exceptions.negativeArgument("capacity", String
127:                            .valueOf(capacity));
128:                if (growthFactor < 0.0)
129:                    Exceptions.negativeArgument("growthFactor", String
130:                            .valueOf(growthFactor));
131:                if (growthChunk < 0)
132:                    Exceptions.negativeArgument("growthChunk", String
133:                            .valueOf(growthChunk));
134:                if (loadFactor <= 0.0)
135:                    Exceptions.negativeOrZeroArgument("loadFactor", String
136:                            .valueOf(loadFactor));
137:                data = new double[capacity][];
138:                size = 0;
139:                expandAt = (int) Math.round(loadFactor * capacity);
140:                this .growthPolicy = growthPolicy;
141:                this .growthFactor = growthFactor;
142:                this .growthChunk = growthChunk;
143:                this .loadFactor = loadFactor;
144:                this .keyhash = keyhash;
145:            }
146:
147:            private DoubleChainedHashSet(int capacity, int growthPolicy,
148:                    double growthFactor, int growthChunk, double loadFactor) {
149:                this (DefaultDoubleHashFunction.INSTANCE, capacity,
150:                        growthPolicy, growthFactor, growthChunk, loadFactor);
151:            }
152:
153:            /**
154:             *  Creates a new hash set with capacity 11, a relative
155:             *  growth factor of 1.0, and a load factor of 75%.
156:             */
157:            public DoubleChainedHashSet() {
158:                this (DEFAULT_CAPACITY);
159:            }
160:
161:            /**
162:             *  Creates a new hash set with the same elements as a specified
163:             *  collection.
164:             *
165:             *  @param      c
166:             *              the collection whose elements to add to the new
167:             *              set.
168:             *
169:             *  @throws     NullPointerException
170:             *              if <tt>c</tt> is <tt>null</tt>.
171:             */
172:            public DoubleChainedHashSet(DoubleCollection c) {
173:                this ();
174:                addAll(c);
175:            }
176:
177:            /**
178:             *  Creates a new hash set with the same elements as the specified
179:             *  array.
180:             *
181:             *  @param      a
182:             *              the array whose elements to add to the new
183:             *              set.
184:             *
185:             *  @throws     NullPointerException
186:             *              if <tt>a</tt> is <tt>null</tt>.
187:             *
188:             *  @since      1.1
189:             */
190:            public DoubleChainedHashSet(double[] a) {
191:                this ();
192:                for (int i = 0; i < a.length; i++)
193:                    add(a[i]);
194:            }
195:
196:            /**
197:             *  Creates a new hash set with a specified capacity, a relative
198:             *  growth factor of 1.0, and a load factor of 75%.
199:             *
200:             *  @param      capacity
201:             *              the initial capacity of the set.
202:             *
203:             *  @throws     IllegalArgumentException
204:             *              if <tt>capacity</tt> is negative.
205:             */
206:            public DoubleChainedHashSet(int capacity) {
207:                this (capacity, DEFAULT_GROWTH_POLICY, DEFAULT_GROWTH_FACTOR,
208:                        DEFAULT_GROWTH_CHUNK, DEFAULT_LOAD_FACTOR);
209:            }
210:
211:            /**
212:             *  Creates a new hash set with a capacity of 11, a relative
213:             *  growth factor of 1.0, and a specified load factor.
214:             *
215:             *  @param      loadFactor
216:             *              the load factor of the set.
217:             *
218:             *  @throws     IllegalArgumentException
219:             *              if <tt>loadFactor</tt> is negative.
220:             */
221:            public DoubleChainedHashSet(double loadFactor) {
222:                this (DEFAULT_CAPACITY, DEFAULT_GROWTH_POLICY,
223:                        DEFAULT_GROWTH_FACTOR, DEFAULT_GROWTH_CHUNK, loadFactor);
224:            }
225:
226:            /**
227:             *  Creates a new hash set with a specified capacity and
228:             *  load factor, and a relative growth factor of 1.0.
229:             *
230:             *  @param      capacity
231:             *              the initial capacity of the set.
232:             *
233:             *  @param      loadFactor
234:             *              the load factor of the set.
235:             *
236:             *  @throws     IllegalArgumentException
237:             *              if <tt>capacity</tt> is negative;
238:             *              if <tt>loadFactor</tt> is not positive.
239:             */
240:            public DoubleChainedHashSet(int capacity, double loadFactor) {
241:                this (capacity, DEFAULT_GROWTH_POLICY, DEFAULT_GROWTH_FACTOR,
242:                        DEFAULT_GROWTH_CHUNK, loadFactor);
243:            }
244:
245:            /**
246:             *  Creates a new hash set with a specified capacity,
247:             *  load factor, and relative growth factor.
248:             *
249:             *  <p>The set capacity increases to <tt>capacity()*(1+growthFactor)</tt>.
250:             *  This strategy is good for avoiding many capacity increases, but
251:             *  the amount of wasted memory is approximately the size of the set.
252:             *
253:             *  @param      capacity
254:             *              the initial capacity of the set.
255:             *
256:             *  @param      loadFactor
257:             *              the load factor of the set.
258:             *
259:             *  @param      growthFactor
260:             *              the relative amount with which to increase the
261:             *              the capacity when a capacity increase is needed.
262:             *
263:             *  @throws     IllegalArgumentException
264:             *              if <tt>capacity</tt> is negative;
265:             *              if <tt>loadFactor</tt> is not positive;
266:             *              if <tt>growthFactor</tt> is not positive.
267:             */
268:            public DoubleChainedHashSet(int capacity, double loadFactor,
269:                    double growthFactor) {
270:                this (capacity, GROWTH_POLICY_RELATIVE, growthFactor,
271:                        DEFAULT_GROWTH_CHUNK, loadFactor);
272:            }
273:
274:            /**
275:             *  Creates a new hash set with a specified capacity,
276:             *  load factor, and absolute growth factor.
277:             *
278:             *  <p>The set capacity increases to <tt>capacity()+growthChunk</tt>.
279:             *  This strategy is good for avoiding wasting memory. However, an
280:             *  overhead is potentially introduced by frequent capacity increases.
281:             *
282:             *  @param      capacity
283:             *              the initial capacity of the set.
284:             *
285:             *  @param      loadFactor
286:             *              the load factor of the set.
287:             *
288:             *  @param      growthChunk
289:             *              the absolute amount with which to increase the
290:             *              the capacity when a capacity increase is needed.
291:             *
292:             *  @throws     IllegalArgumentException
293:             *              if <tt>capacity</tt> is negative;
294:             *              if <tt>loadFactor</tt> is not positive;
295:             *              if <tt>growthChunk</tt> is not positive.
296:             */
297:            public DoubleChainedHashSet(int capacity, double loadFactor,
298:                    int growthChunk) {
299:                this (capacity, GROWTH_POLICY_ABSOLUTE, DEFAULT_GROWTH_FACTOR,
300:                        growthChunk, loadFactor);
301:            }
302:
303:            // ---------------------------------------------------------------
304:            //      Constructors with hash function argument
305:            // ---------------------------------------------------------------
306:
307:            /**
308:             *  Creates a new hash set with capacity 11, a relative
309:             *  growth factor of 1.0, and a load factor of 75%.
310:             *
311:             *  @param      keyhash
312:             *              the hash function to use when hashing keys.
313:             *
314:             *  @throws     NullPointerException
315:             *              if <tt>keyhash</tt> is <tt>null</tt>.
316:             */
317:            public DoubleChainedHashSet(DoubleHashFunction keyhash) {
318:                this (keyhash, DEFAULT_CAPACITY, DEFAULT_GROWTH_POLICY,
319:                        DEFAULT_GROWTH_FACTOR, DEFAULT_GROWTH_CHUNK,
320:                        DEFAULT_LOAD_FACTOR);
321:            }
322:
323:            /**
324:             *  Creates a new hash set with a specified capacity, a relative
325:             *  growth factor of 1.0, and a load factor of 75%.
326:             *
327:             *  @param      keyhash
328:             *              the hash function to use when hashing keys.
329:             *
330:             *  @param      capacity
331:             *              the initial capacity of the set.
332:             *
333:             *  @throws     IllegalArgumentException
334:             *              if <tt>capacity</tt> is negative.
335:             *
336:             *  @throws     NullPointerException
337:             *              if <tt>keyhash</tt> is <tt>null</tt>.
338:             */
339:            public DoubleChainedHashSet(DoubleHashFunction keyhash, int capacity) {
340:                this (keyhash, capacity, DEFAULT_GROWTH_POLICY,
341:                        DEFAULT_GROWTH_FACTOR, DEFAULT_GROWTH_CHUNK,
342:                        DEFAULT_LOAD_FACTOR);
343:            }
344:
345:            /**
346:             *  Creates a new hash set with a capacity of 11, a relative
347:             *  growth factor of 1.0, and a specified load factor.
348:             *
349:             *  @param      keyhash
350:             *              the hash function to use when hashing keys.
351:             *
352:             *  @param      loadFactor
353:             *              the load factor of the set.
354:             *
355:             *  @throws     IllegalArgumentException
356:             *              if <tt>loadFactor</tt> is negative.
357:             *
358:             *  @throws     NullPointerException
359:             *              if <tt>keyhash</tt> is <tt>null</tt>.
360:             */
361:            public DoubleChainedHashSet(DoubleHashFunction keyhash,
362:                    double loadFactor) {
363:                this (keyhash, DEFAULT_CAPACITY, DEFAULT_GROWTH_POLICY,
364:                        DEFAULT_GROWTH_FACTOR, DEFAULT_GROWTH_CHUNK, loadFactor);
365:            }
366:
367:            /**
368:             *  Creates a new hash set with a specified capacity and
369:             *  load factor, and a relative growth factor of 1.0.
370:             *
371:             *  @param      keyhash
372:             *              the hash function to use when hashing keys.
373:             *
374:             *  @param      capacity
375:             *              the initial capacity of the set.
376:             *
377:             *  @param      loadFactor
378:             *              the load factor of the set.
379:             *
380:             *  @throws     IllegalArgumentException
381:             *              if <tt>capacity</tt> is negative;
382:             *              if <tt>loadFactor</tt> is not positive.
383:             *
384:             *  @throws     NullPointerException
385:             *              if <tt>keyhash</tt> is <tt>null</tt>.
386:             */
387:            public DoubleChainedHashSet(DoubleHashFunction keyhash,
388:                    int capacity, double loadFactor) {
389:                this (keyhash, capacity, DEFAULT_GROWTH_POLICY,
390:                        DEFAULT_GROWTH_FACTOR, DEFAULT_GROWTH_CHUNK, loadFactor);
391:            }
392:
393:            /**
394:             *  Creates a new hash set with a specified capacity,
395:             *  load factor, and relative growth factor.
396:             *
397:             *  <p>The set capacity increases to <tt>capacity()*(1+growthFactor)</tt>.
398:             *  This strategy is good for avoiding many capacity increases, but
399:             *  the amount of wasted memory is approximately the size of the set.
400:             *
401:             *  @param      keyhash
402:             *              the hash function to use when hashing keys.
403:             *
404:             *  @param      capacity
405:             *              the initial capacity of the set.
406:             *
407:             *  @param      loadFactor
408:             *              the load factor of the set.
409:             *
410:             *  @param      growthFactor
411:             *              the relative amount with which to increase the
412:             *              the capacity when a capacity increase is needed.
413:             *
414:             *  @throws     IllegalArgumentException
415:             *              if <tt>capacity</tt> is negative;
416:             *              if <tt>loadFactor</tt> is not positive;
417:             *              if <tt>growthFactor</tt> is not positive.
418:             *
419:             *  @throws     NullPointerException
420:             *              if <tt>keyhash</tt> is <tt>null</tt>.
421:             */
422:            public DoubleChainedHashSet(DoubleHashFunction keyhash,
423:                    int capacity, double loadFactor, double growthFactor) {
424:                this (keyhash, capacity, GROWTH_POLICY_RELATIVE, growthFactor,
425:                        DEFAULT_GROWTH_CHUNK, loadFactor);
426:            }
427:
428:            /**
429:             *  Creates a new hash set with a specified capacity,
430:             *  load factor, and absolute growth factor.
431:             *
432:             *  <p>The set capacity increases to <tt>capacity()+growthChunk</tt>.
433:             *  This strategy is good for avoiding wasting memory. However, an
434:             *  overhead is potentially introduced by frequent capacity increases.
435:             *
436:             *  @param      keyhash
437:             *              the hash function to use when hashing keys.
438:             *
439:             *  @param      capacity
440:             *              the initial capacity of the set.
441:             *
442:             *  @param      loadFactor
443:             *              the load factor of the set.
444:             *
445:             *  @param      growthChunk
446:             *              the absolute amount with which to increase the
447:             *              the capacity when a capacity increase is needed.
448:             *
449:             *  @throws     IllegalArgumentException
450:             *              if <tt>capacity</tt> is negative;
451:             *              if <tt>loadFactor</tt> is not positive;
452:             *              if <tt>growthChunk</tt> is not positive.
453:             *
454:             *  @throws     NullPointerException
455:             *              if <tt>keyhash</tt> is <tt>null</tt>.
456:             */
457:            public DoubleChainedHashSet(DoubleHashFunction keyhash,
458:                    int capacity, double loadFactor, int growthChunk) {
459:                this (keyhash, capacity, GROWTH_POLICY_ABSOLUTE,
460:                        DEFAULT_GROWTH_FACTOR, growthChunk, loadFactor);
461:            }
462:
463:            // ---------------------------------------------------------------
464:            //      Hash table management
465:            // ---------------------------------------------------------------
466:
467:            private void ensureCapacity(int elements) {
468:                if (elements >= expandAt) {
469:                    int newcapacity;
470:                    if (growthPolicy == GROWTH_POLICY_RELATIVE)
471:                        newcapacity = (int) (data.length * (1.0 + growthFactor));
472:                    else
473:                        newcapacity = data.length + growthChunk;
474:                    if (newcapacity * loadFactor < elements)
475:                        newcapacity = (int) Math
476:                                .round(((double) elements / loadFactor));
477:                    newcapacity = bak.pcj.hash.Primes.nextPrime(newcapacity);
478:                    expandAt = (int) Math.round(loadFactor * newcapacity);
479:
480:                    double[][] newdata = new double[newcapacity][];
481:
482:                    //  re-hash
483:                    for (int i = 0; i < data.length; i++) {
484:                        double[] list = data[i];
485:                        if (list != null) {
486:                            for (int n = 0; n < list.length; n++) {
487:                                double v = list[n];
488:                                int index = Math.abs(keyhash.hash(v))
489:                                        % newdata.length;
490:                                newdata[index] = addList(newdata[index], v);
491:                            }
492:                        }
493:                    }
494:
495:                    data = newdata;
496:                }
497:            }
498:
499:            private double[] addList(double[] list, double v) {
500:                if (list == null)
501:                    return new double[] { v };
502:                double[] newlist = new double[list.length + 1];
503:                for (int i = 0; i < list.length; i++)
504:                    newlist[i] = list[i];
505:                newlist[list.length] = v;
506:                return newlist;
507:            }
508:
509:            private double[] removeList(double[] list, int index) {
510:                if (list.length == 1)
511:                    return null;
512:                double[] newlist = new double[list.length - 1];
513:                int n = 0;
514:                for (int i = 0; i < index; i++)
515:                    newlist[n++] = list[i];
516:                for (int i = index + 1; i < list.length; i++)
517:                    newlist[n++] = list[i];
518:                return newlist;
519:            }
520:
521:            private int searchList(double[] list, double v) {
522:                for (int i = 0; i < list.length; i++)
523:                    if (list[i] == v)
524:                        return i;
525:                return -1;
526:            }
527:
528:            // ---------------------------------------------------------------
529:            //      Operations not supported by abstract implementation
530:            // ---------------------------------------------------------------
531:
532:            public boolean add(double v) {
533:                ensureCapacity(size + 1);
534:
535:                int index = Math.abs(keyhash.hash(v)) % data.length;
536:                double[] list = data[index];
537:                if (list == null) {
538:                    data[index] = new double[] { v };
539:                    size++;
540:                    return true;
541:                }
542:                for (int i = 0; i < list.length; i++)
543:                    if (list[i] == v)
544:                        return false;
545:                data[index] = addList(data[index], v);
546:                size++;
547:                return true;
548:            }
549:
550:            public DoubleIterator iterator() {
551:                return new DoubleIterator() {
552:                    int currList = nextList(0);
553:                    int currDouble = 0;
554:                    int lastList = -1;
555:                    int lastDouble;
556:                    double lastValue;
557:
558:                    int nextList(int index) {
559:                        while (index < data.length && data[index] == null)
560:                            index++;
561:                        return index < data.length ? index : -1;
562:                    }
563:
564:                    public boolean hasNext() {
565:                        return currList != -1;
566:                    }
567:
568:                    public double next() {
569:                        if (currList == -1)
570:                            Exceptions.endOfIterator();
571:                        lastList = currList;
572:                        lastDouble = currDouble;
573:                        lastValue = data[currList][currDouble];
574:                        if (currDouble == data[currList].length - 1) {
575:                            currList = nextList(currList + 1);
576:                            currDouble = 0;
577:                        } else {
578:                            currDouble++;
579:                        }
580:                        return lastValue;
581:                    }
582:
583:                    public void remove() {
584:                        if (lastList == -1)
585:                            Exceptions.noElementToRemove();
586:                        if (currList == lastList)
587:                            currDouble--;
588:                        data[lastList] = removeList(data[lastList], lastDouble);
589:                        size--;
590:                        lastList = -1;
591:                    }
592:                };
593:            }
594:
595:            public void trimToSize() {
596:            }
597:
598:            /**
599:             *  Returns a clone of this hash set.
600:             *
601:             *  @return     a clone of this hash set.
602:             *
603:             *  @since      1.1
604:             */
605:            public Object clone() {
606:                try {
607:                    DoubleChainedHashSet c = (DoubleChainedHashSet) super 
608:                            .clone();
609:                    c.data = new double[data.length][];
610:                    // Cloning each array is not necessary since they are immutable
611:                    System.arraycopy(data, 0, c.data, 0, data.length);
612:                    return c;
613:                } catch (CloneNotSupportedException e) {
614:                    Exceptions.cloning();
615:                    throw new RuntimeException();
616:                }
617:            }
618:
619:            // ---------------------------------------------------------------
620:            //      Operations overwritten for efficiency
621:            // ---------------------------------------------------------------
622:
623:            public int size() {
624:                return size;
625:            }
626:
627:            public void clear() {
628:                size = 0;
629:            }
630:
631:            public boolean contains(double v) {
632:                double[] list = data[Math.abs(keyhash.hash(v)) % data.length];
633:                if (list == null)
634:                    return false;
635:                return searchList(list, v) != -1;
636:            }
637:
638:            public int hashCode() {
639:                int h = 0;
640:                for (int i = 0; i < data.length; i++) {
641:                    double[] list = data[i];
642:                    if (list != null) {
643:                        for (int n = 0; n < list.length; n++)
644:                            h += list[n];
645:                    }
646:                }
647:                return h;
648:            }
649:
650:            public boolean remove(double v) {
651:                int index = Math.abs(keyhash.hash(v)) % data.length;
652:                double[] list = data[index];
653:                if (list != null) {
654:                    int lindex = searchList(list, v);
655:                    if (lindex == -1)
656:                        return false;
657:                    data[index] = removeList(list, lindex);
658:                    size--;
659:                    return true;
660:                }
661:                return false;
662:            }
663:
664:            public double[] toArray(double[] a) {
665:                if (a == null || a.length < size)
666:                    a = new double[size];
667:
668:                int p = 0;
669:                for (int i = 0; i < data.length; i++) {
670:                    double[] list = data[i];
671:                    if (list != null) {
672:                        for (int n = 0; n < list.length; n++)
673:                            a[p++] = list[n];
674:                    }
675:                }
676:                return a;
677:            }
678:
679:            // ---------------------------------------------------------------
680:            //      Serialization
681:            // ---------------------------------------------------------------
682:
683:            /**
684:             *  @serialData     Default fields; the capacity of the
685:             *                  set (<tt>int</tt>); the set's elements
686:             *                  (<tt>double</tt>).
687:             *
688:             *  @since          1.1
689:             */
690:            private void writeObject(ObjectOutputStream s) throws IOException {
691:                s.defaultWriteObject();
692:                s.writeInt(data.length);
693:                DoubleIterator i = iterator();
694:                while (i.hasNext()) {
695:                    double x = i.next();
696:                    s.writeDouble(x);
697:                }
698:            }
699:
700:            /**
701:             *  @since          1.1
702:             */
703:            private void readObject(ObjectInputStream s) throws IOException,
704:                    ClassNotFoundException {
705:                s.defaultReadObject();
706:                data = new double[s.readInt()][];
707:                for (int i = 0; i < size; i++) {
708:                    double v = s.readDouble();
709:                    int index = Math.abs(keyhash.hash(v)) % data.length;
710:                    double[] list = data[index];
711:                    if (list == null)
712:                        data[index] = new double[] { v };
713:                    else
714:                        data[index] = addList(data[index], v);
715:                }
716:            }
717:
718:        }
www.java2java.com | Contact Us
Copyright 2009 - 12 Demo Source and Support. All rights reserved.
All other trademarks are property of their respective owners.