001: package org.apache.lucene.index;
002:
003: /**
004: * Licensed to the Apache Software Foundation (ASF) under one or more
005: * contributor license agreements. See the NOTICE file distributed with
006: * this work for additional information regarding copyright ownership.
007: * The ASF licenses this file to You under the Apache License, Version 2.0
008: * (the "License"); you may not use this file except in compliance with
009: * the License. You may obtain a copy of the License at
010: *
011: * http://www.apache.org/licenses/LICENSE-2.0
012: *
013: * Unless required by applicable law or agreed to in writing, software
014: * distributed under the License is distributed on an "AS IS" BASIS,
015: * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
016: * See the License for the specific language governing permissions and
017: * limitations under the License.
018: */
019:
020: import org.apache.lucene.util.LuceneTestCase;
021: import org.apache.lucene.analysis.Analyzer;
022: import org.apache.lucene.analysis.SimpleAnalyzer;
023: import org.apache.lucene.analysis.standard.StandardAnalyzer;
024: import org.apache.lucene.document.Document;
025: import org.apache.lucene.document.Field;
026: import org.apache.lucene.document.Field.Index;
027: import org.apache.lucene.document.Field.Store;
028: import org.apache.lucene.store.Directory;
029: import org.apache.lucene.store.FSDirectory;
030: import org.apache.lucene.store.RAMDirectory;
031:
032: import java.io.File;
033: import java.io.IOException;
034: import java.util.EmptyStackException;
035: import java.util.Random;
036: import java.util.Stack;
037:
038: /**
039: * Tests for the "IndexModifier" class, including accesses from two threads at the
040: * same time.
041: *
042: * @author Daniel Naber
043: * @deprecated
044: */
045: public class TestIndexModifier extends LuceneTestCase {
046:
047: private int docCount = 0;
048:
049: private final Term allDocTerm = new Term("all", "x");
050:
051: public void testIndex() throws IOException {
052: Directory ramDir = new RAMDirectory();
053: IndexModifier i = new IndexModifier(ramDir,
054: new StandardAnalyzer(), true);
055: i.addDocument(getDoc());
056: assertEquals(1, i.docCount());
057: i.flush();
058: i.addDocument(getDoc(), new SimpleAnalyzer());
059: assertEquals(2, i.docCount());
060: i.optimize();
061: assertEquals(2, i.docCount());
062: i.flush();
063: i.deleteDocument(0);
064: assertEquals(1, i.docCount());
065: i.flush();
066: assertEquals(1, i.docCount());
067: i.addDocument(getDoc());
068: i.addDocument(getDoc());
069: i.flush();
070: // depend on merge policy - assertEquals(3, i.docCount());
071: i.deleteDocuments(allDocTerm);
072: assertEquals(0, i.docCount());
073: i.optimize();
074: assertEquals(0, i.docCount());
075:
076: // Lucene defaults:
077: assertNull(i.getInfoStream());
078: assertTrue(i.getUseCompoundFile());
079: assertEquals(IndexWriter.DISABLE_AUTO_FLUSH, i
080: .getMaxBufferedDocs());
081: assertEquals(10000, i.getMaxFieldLength());
082: assertEquals(10, i.getMergeFactor());
083: // test setting properties:
084: i.setMaxBufferedDocs(100);
085: i.setMergeFactor(25);
086: i.setMaxFieldLength(250000);
087: i.addDocument(getDoc());
088: i.setUseCompoundFile(false);
089: i.flush();
090: assertEquals(100, i.getMaxBufferedDocs());
091: assertEquals(25, i.getMergeFactor());
092: assertEquals(250000, i.getMaxFieldLength());
093: assertFalse(i.getUseCompoundFile());
094:
095: // test setting properties when internally the reader is opened:
096: i.deleteDocuments(allDocTerm);
097: i.setMaxBufferedDocs(100);
098: i.setMergeFactor(25);
099: i.setMaxFieldLength(250000);
100: i.addDocument(getDoc());
101: i.setUseCompoundFile(false);
102: i.optimize();
103: assertEquals(100, i.getMaxBufferedDocs());
104: assertEquals(25, i.getMergeFactor());
105: assertEquals(250000, i.getMaxFieldLength());
106: assertFalse(i.getUseCompoundFile());
107:
108: i.close();
109: try {
110: i.docCount();
111: fail();
112: } catch (IllegalStateException e) {
113: // expected exception
114: }
115: }
116:
117: public void testExtendedIndex() throws IOException {
118: Directory ramDir = new RAMDirectory();
119: PowerIndex powerIndex = new PowerIndex(ramDir,
120: new StandardAnalyzer(), true);
121: powerIndex.addDocument(getDoc());
122: powerIndex.addDocument(getDoc());
123: powerIndex.addDocument(getDoc());
124: powerIndex.addDocument(getDoc());
125: powerIndex.addDocument(getDoc());
126: powerIndex.flush();
127: assertEquals(5, powerIndex.docFreq(allDocTerm));
128: powerIndex.close();
129: }
130:
131: private Document getDoc() {
132: Document doc = new Document();
133: doc.add(new Field("body", Integer.toString(docCount),
134: Field.Store.YES, Field.Index.UN_TOKENIZED));
135: doc.add(new Field("all", "x", Field.Store.YES,
136: Field.Index.UN_TOKENIZED));
137: docCount++;
138: return doc;
139: }
140:
141: public void testIndexWithThreads() throws IOException {
142: testIndexInternal(0);
143: testIndexInternal(10);
144: testIndexInternal(50);
145: }
146:
147: private void testIndexInternal(int maxWait) throws IOException {
148: final boolean create = true;
149: //Directory rd = new RAMDirectory();
150: // work on disk to make sure potential lock problems are tested:
151: String tempDir = System.getProperty("java.io.tmpdir");
152: if (tempDir == null)
153: throw new IOException(
154: "java.io.tmpdir undefined, cannot run test");
155: File indexDir = new File(tempDir, "lucenetestindex");
156: Directory rd = FSDirectory.getDirectory(indexDir);
157: IndexThread.id = 0;
158: IndexThread.idStack.clear();
159: IndexModifier index = new IndexModifier(rd,
160: new StandardAnalyzer(), create);
161: IndexThread thread1 = new IndexThread(index, maxWait, 1);
162: thread1.start();
163: IndexThread thread2 = new IndexThread(index, maxWait, 2);
164: thread2.start();
165: while (thread1.isAlive() || thread2.isAlive()) {
166: try {
167: Thread.sleep(100);
168: } catch (InterruptedException e) {
169: throw new RuntimeException(e);
170: }
171: }
172: index.optimize();
173: int added = thread1.added + thread2.added;
174: int deleted = thread1.deleted + thread2.deleted;
175: assertEquals(added - deleted, index.docCount());
176: index.close();
177:
178: try {
179: index.close();
180: fail();
181: } catch (IllegalStateException e) {
182: // expected exception
183: }
184: rmDir(indexDir);
185: }
186:
187: private void rmDir(File dir) {
188: File[] files = dir.listFiles();
189: for (int i = 0; i < files.length; i++) {
190: files[i].delete();
191: }
192: dir.delete();
193: }
194:
195: private class PowerIndex extends IndexModifier {
196: public PowerIndex(Directory dir, Analyzer analyzer,
197: boolean create) throws IOException {
198: super (dir, analyzer, create);
199: }
200:
201: public int docFreq(Term term) throws IOException {
202: synchronized (directory) {
203: assureOpen();
204: createIndexReader();
205: return indexReader.docFreq(term);
206: }
207: }
208: }
209:
210: }
211:
212: class IndexThread extends Thread {
213:
214: private final static int ITERATIONS = 500; // iterations of thread test
215:
216: static int id = 0;
217: static Stack idStack = new Stack();
218:
219: int added = 0;
220: int deleted = 0;
221:
222: private int maxWait = 10;
223: private IndexModifier index;
224: private int threadNumber;
225: private Random random;
226:
227: IndexThread(IndexModifier index, int maxWait, int threadNumber) {
228: this .index = index;
229: this .maxWait = maxWait;
230: this .threadNumber = threadNumber;
231: // TODO: test case is not reproducible despite pseudo-random numbers:
232: random = new Random(101 + threadNumber); // constant seed for better reproducability
233: }
234:
235: public void run() {
236: try {
237: for (int i = 0; i < ITERATIONS; i++) {
238: int rand = random.nextInt(101);
239: if (rand < 5) {
240: index.optimize();
241: } else if (rand < 60) {
242: Document doc = getDocument();
243: index.addDocument(doc);
244: idStack.push(doc.get("id"));
245: added++;
246: } else {
247: // we just delete the last document added and remove it
248: // from the id stack so that it won't be removed twice:
249: String delId = null;
250: try {
251: delId = (String) idStack.pop();
252: } catch (EmptyStackException e) {
253: continue;
254: }
255: Term delTerm = new Term("id", new Integer(delId)
256: .toString());
257: int delCount = index.deleteDocuments(delTerm);
258: if (delCount != 1) {
259: throw new RuntimeException("Internal error: "
260: + threadNumber + " deleted " + delCount
261: + " documents, term=" + delTerm);
262: }
263: deleted++;
264: }
265: if (maxWait > 0) {
266: try {
267: rand = random.nextInt(maxWait);
268: //System.out.println("waiting " + rand + "ms");
269: Thread.sleep(rand);
270: } catch (InterruptedException e) {
271: throw new RuntimeException(e);
272: }
273: }
274: }
275: } catch (IOException e) {
276: throw new RuntimeException(e);
277: }
278: }
279:
280: private Document getDocument() {
281: Document doc = new Document();
282: synchronized (getClass()) {
283: doc.add(new Field("id", Integer.toString(id),
284: Field.Store.YES, Field.Index.UN_TOKENIZED));
285: id++;
286: }
287: // add random stuff:
288: doc
289: .add(new Field("content", Integer.toString(random
290: .nextInt(1000)), Field.Store.YES,
291: Field.Index.TOKENIZED));
292: doc
293: .add(new Field("content", Integer.toString(random
294: .nextInt(1000)), Field.Store.YES,
295: Field.Index.TOKENIZED));
296: doc.add(new Field("all", "x", Field.Store.YES,
297: Field.Index.TOKENIZED));
298: return doc;
299: }
300:
301: }
|