From ca79b461047d1a72d0609c1749ae64fcf0de8de6 Mon Sep 17 00:00:00 2001 From: shubharm Date: Mon, 6 Oct 2025 17:27:55 +0100 Subject: [PATCH 1/7] removed CFS ratio and moved CFS to CompoundFormat.java --- .../apache/lucene/codecs/CompoundFormat.java | 78 +++++++++++++ .../lucene/index/FilterMergePolicy.java | 27 ----- .../org/apache/lucene/index/IndexWriter.java | 16 ++- .../lucene/index/LiveIndexWriterConfig.java | 4 +- .../apache/lucene/index/LogMergePolicy.java | 14 +-- .../org/apache/lucene/index/MergePolicy.java | 104 +----------------- .../apache/lucene/index/NoMergePolicy.java | 26 ----- .../lucene/index/TieredMergePolicy.java | 14 +-- 8 files changed, 98 insertions(+), 185 deletions(-) diff --git a/lucene/core/src/java/org/apache/lucene/codecs/CompoundFormat.java b/lucene/core/src/java/org/apache/lucene/codecs/CompoundFormat.java index 6a7e75f267e7..9f80f106855f 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/CompoundFormat.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/CompoundFormat.java @@ -17,6 +17,8 @@ package org.apache.lucene.codecs; import java.io.IOException; +import org.apache.lucene.index.LogDocMergePolicy; +import org.apache.lucene.index.MergePolicy; import org.apache.lucene.index.SegmentInfo; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; @@ -34,6 +36,82 @@ protected CompoundFormat() {} // TODO: this is very minimal. If we need more methods, // we can add 'producer' classes. + + static final long CFS_THRESHOLD_DOC_SIZE = 65536; + static final long CFS_THRESHOLD_BYTE_SIZE = 65; + + /** + * Default max segment size in order to use compound file system. Set to {@link Long#MAX_VALUE}. + */ + static final long DEFAULT_MAX_CFS_SEGMENT_SIZE = Long.MAX_VALUE; + + private long CfsThresholdDocSize = CFS_THRESHOLD_DOC_SIZE; + private long CfsThresholdByteSize = CFS_THRESHOLD_BYTE_SIZE; + private boolean shouldUseCompoundFile = true; + + /** + * If the size of the merged segment exceeds this value then it will not use compound file format. + */ + private long maxCFSSegmentSize = DEFAULT_MAX_CFS_SEGMENT_SIZE; + + public void setCfsThresholdDocSize(long cfsThresholdDocSize) { + this.CfsThresholdDocSize = cfsThresholdDocSize; + } + + public void setCfsThresholdByteSize(long cfsThresholdByteSize) { + this.CfsThresholdByteSize = cfsThresholdByteSize; + } + + public long getCfsThresholdByteSize() { + return this.CfsThresholdByteSize; + } + + public long getCfsThresholdDocSize() { + return this.CfsThresholdDocSize; + } + + public void setShouldUseCompoundFile(boolean useCompoundFile) { + this.shouldUseCompoundFile = useCompoundFile; + } + + /** Returns the largest size allowed for a compound file segment */ + public double getMaxCFSSegmentSizeMB() { + return maxCFSSegmentSize / 1024. / 1024.; + } + + /** + * If a merged segment will be more than this value, leave the segment as non-compound file even + * if compound file is enabled. Set this to Double.POSITIVE_INFINITY (default) and noCFSRatio to + * 1.0 to always use CFS regardless of merge size. + */ + public void setMaxCFSSegmentSizeMB(double v) { + if (v < 0.0) { + throw new IllegalArgumentException("maxCFSSegmentSizeMB must be >=0 (got " + v + ")"); + } + v *= 1024 * 1024; + this.maxCFSSegmentSize = v > Long.MAX_VALUE ? Long.MAX_VALUE : (long) v; + } + + /** + * Returns true if a new segment (regardless of its origin) should use the compound file format. + * The default implementation returns true iff the size of the given mergedInfo is + * less or equal to {@link #getMaxCFSSegmentSizeMB()} and the size is less or equal to the + */ + public boolean useCompoundFile(long mergedInfoSize, MergePolicy mergePolicy) throws IOException { + if (this.shouldUseCompoundFile == false) { + return false; + } + if (mergedInfoSize > maxCFSSegmentSize) { + return false; + } + + if (mergePolicy instanceof LogDocMergePolicy) { + return mergedInfoSize <= this.CfsThresholdDocSize; + } + + return mergedInfoSize <= this.CfsThresholdByteSize; + } + /** Returns a Directory view (read-only) for the compound files in this segment */ public abstract CompoundDirectory getCompoundReader(Directory dir, SegmentInfo si) throws IOException; diff --git a/lucene/core/src/java/org/apache/lucene/index/FilterMergePolicy.java b/lucene/core/src/java/org/apache/lucene/index/FilterMergePolicy.java index 08c0196a4136..268c70db39ac 100644 --- a/lucene/core/src/java/org/apache/lucene/index/FilterMergePolicy.java +++ b/lucene/core/src/java/org/apache/lucene/index/FilterMergePolicy.java @@ -75,38 +75,11 @@ public MergeSpecification findFullFlushMerges( return in.findFullFlushMerges(mergeTrigger, segmentInfos, mergeContext); } - @Override - public boolean useCompoundFile( - SegmentInfos infos, SegmentCommitInfo mergedInfo, MergeContext mergeContext) - throws IOException { - return in.useCompoundFile(infos, mergedInfo, mergeContext); - } - @Override protected long size(SegmentCommitInfo info, MergeContext context) throws IOException { return in.size(info, context); } - @Override - public double getNoCFSRatio() { - return in.getNoCFSRatio(); - } - - @Override - public final void setNoCFSRatio(double noCFSRatio) { - in.setNoCFSRatio(noCFSRatio); - } - - @Override - public final void setMaxCFSSegmentSizeMB(double v) { - in.setMaxCFSSegmentSizeMB(v); - } - - @Override - public final double getMaxCFSSegmentSizeMB() { - return in.getMaxCFSSegmentSizeMB(); - } - @Override public String toString() { return getClass().getSimpleName() + "(" + in + ")"; diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java index aa2c92ffecca..92f49f3a4a74 100644 --- a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java +++ b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java @@ -3480,7 +3480,13 @@ public void addIndexesReaderMerge(MergePolicy.OneMerge merge) throws IOException boolean useCompoundFile; synchronized (this) { merge.checkAborted(); - useCompoundFile = mergePolicy.useCompoundFile(segmentInfos, merge.getMergeInfo(), this); + useCompoundFile = + merge + .getMergeInfo() + .info + .getCodec() + .compoundFormat() + .useCompoundFile(mergePolicy.size(merge.getMergeInfo(), this), mergePolicy); } // Now create the compound file if needed @@ -5336,7 +5342,13 @@ public int length() { // this segment: boolean useCompoundFile; synchronized (this) { // Guard segmentInfos - useCompoundFile = mergePolicy.useCompoundFile(segmentInfos, merge.info, this); + useCompoundFile = + merge + .getMergeInfo() + .info + .getCodec() + .compoundFormat() + .useCompoundFile(mergePolicy.size(merge.info, this), mergePolicy); } if (useCompoundFile) { diff --git a/lucene/core/src/java/org/apache/lucene/index/LiveIndexWriterConfig.java b/lucene/core/src/java/org/apache/lucene/index/LiveIndexWriterConfig.java index 81d3f52b8d79..c9a0453aad0f 100644 --- a/lucene/core/src/java/org/apache/lucene/index/LiveIndexWriterConfig.java +++ b/lucene/core/src/java/org/apache/lucene/index/LiveIndexWriterConfig.java @@ -368,9 +368,7 @@ public InfoStream getInfoStream() { * *

Use false for batch indexing with very large ram buffer settings. * - *

Note: To control compound file usage during segment merges see {@link - * MergePolicy#setNoCFSRatio(double)} and {@link MergePolicy#setMaxCFSSegmentSizeMB(double)}. This - * setting only applies to newly created segments. + *

Note: To control compound file usage during segment merges. */ public LiveIndexWriterConfig setUseCompoundFile(boolean useCompoundFile) { this.useCompoundFile = useCompoundFile; diff --git a/lucene/core/src/java/org/apache/lucene/index/LogMergePolicy.java b/lucene/core/src/java/org/apache/lucene/index/LogMergePolicy.java index b6dc9848c9df..f3a004f857fc 100644 --- a/lucene/core/src/java/org/apache/lucene/index/LogMergePolicy.java +++ b/lucene/core/src/java/org/apache/lucene/index/LogMergePolicy.java @@ -56,14 +56,6 @@ public abstract class LogMergePolicy extends MergePolicy { */ public static final int DEFAULT_MAX_MERGE_DOCS = Integer.MAX_VALUE; - /** - * Default noCFSRatio. If a merge's size is {@code >= 10%} of the index, then we disable compound - * file for it. - * - * @see MergePolicy#setNoCFSRatio - */ - public static final double DEFAULT_NO_CFS_RATIO = 0.1; - /** How many segments to merge at a time. */ protected int mergeFactor = DEFAULT_MERGE_FACTOR; @@ -97,9 +89,7 @@ public abstract class LogMergePolicy extends MergePolicy { protected int targetSearchConcurrency = 1; /** Sole constructor. (For invocation by subclass constructors, typically implicit.) */ - public LogMergePolicy() { - super(DEFAULT_NO_CFS_RATIO, MergePolicy.DEFAULT_MAX_CFS_SEGMENT_SIZE); - } + public LogMergePolicy() {} /** * Returns the number of segments that are merged at once and also controls the total number of @@ -730,8 +720,6 @@ public String toString() { sb.append("maxMergeSizeForForcedMerge=").append(maxMergeSizeForForcedMerge).append(", "); sb.append("calibrateSizeByDeletes=").append(calibrateSizeByDeletes).append(", "); sb.append("maxMergeDocs=").append(maxMergeDocs).append(", "); - sb.append("maxCFSSegmentSizeMB=").append(getMaxCFSSegmentSizeMB()).append(", "); - sb.append("noCFSRatio=").append(noCFSRatio); sb.append("]"); return sb.toString(); } diff --git a/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java b/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java index 7c14a34c127a..2e0edd8ae20b 100644 --- a/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java +++ b/lucene/core/src/java/org/apache/lucene/index/MergePolicy.java @@ -577,41 +577,8 @@ public MergeAbortedException(String message) { } } - /** - * Default ratio for compound file system usage. Set to 1.0, always use compound file - * system. - */ - protected static final double DEFAULT_NO_CFS_RATIO = 1.0; - - /** - * Default max segment size in order to use compound file system. Set to {@link Long#MAX_VALUE}. - */ - protected static final long DEFAULT_MAX_CFS_SEGMENT_SIZE = Long.MAX_VALUE; - - /** - * If the size of the merge segment exceeds this ratio of the total index size then it will remain - * in non-compound format - */ - protected double noCFSRatio; - - /** - * If the size of the merged segment exceeds this value then it will not use compound file format. - */ - protected long maxCFSSegmentSize; - /** Creates a new merge policy instance. */ - protected MergePolicy() { - this(DEFAULT_NO_CFS_RATIO, DEFAULT_MAX_CFS_SEGMENT_SIZE); - } - - /** - * Creates a new merge policy instance with default settings for noCFSRatio and maxCFSSegmentSize. - * This ctor should be used by subclasses using different defaults than the {@link MergePolicy} - */ - protected MergePolicy(double defaultNoCFSRatio, long defaultMaxCFSSegmentSize) { - this.noCFSRatio = defaultNoCFSRatio; - this.maxCFSSegmentSize = defaultMaxCFSSegmentSize; - } + protected MergePolicy() {} /** * Determine what set of merge operations are now necessary on the index. {@link IndexWriter} @@ -727,32 +694,6 @@ public MergeSpecification findFullFlushMerges( return newMergeSpec; } - /** - * Returns true if a new segment (regardless of its origin) should use the compound file format. - * The default implementation returns true iff the size of the given mergedInfo is - * less or equal to {@link #getMaxCFSSegmentSizeMB()} and the size is less or equal to the - * TotalIndexSize * {@link #getNoCFSRatio()} otherwise false. - */ - public boolean useCompoundFile( - SegmentInfos infos, SegmentCommitInfo mergedInfo, MergeContext mergeContext) - throws IOException { - if (getNoCFSRatio() == 0.0) { - return false; - } - long mergedInfoSize = size(mergedInfo, mergeContext); - if (mergedInfoSize > maxCFSSegmentSize) { - return false; - } - if (getNoCFSRatio() >= 1.0) { - return true; - } - long totalSize = 0; - for (SegmentCommitInfo info : infos) { - totalSize += size(info, mergeContext); - } - return mergedInfoSize <= getNoCFSRatio() * totalSize; - } - /** * Return the byte size of the provided {@link SegmentCommitInfo}, prorated by percentage of * non-deleted documents. @@ -793,47 +734,8 @@ protected final boolean isMerged( int delCount = mergeContext.numDeletesToMerge(info); assert assertDelCount(delCount, info); return delCount == 0 - && useCompoundFile(infos, info, mergeContext) == info.info.getUseCompoundFile(); - } - - /** - * Returns current {@code noCFSRatio}. - * - * @see #setNoCFSRatio - */ - public double getNoCFSRatio() { - return noCFSRatio; - } - - /** - * If a merged segment will be more than this percentage of the total size of the index, leave the - * segment as non-compound file even if compound file is enabled. Set to 1.0 to always use CFS - * regardless of merge size. - */ - public void setNoCFSRatio(double noCFSRatio) { - if (noCFSRatio < 0.0 || noCFSRatio > 1.0) { - throw new IllegalArgumentException( - "noCFSRatio must be 0.0 to 1.0 inclusive; got " + noCFSRatio); - } - this.noCFSRatio = noCFSRatio; - } - - /** Returns the largest size allowed for a compound file segment */ - public double getMaxCFSSegmentSizeMB() { - return maxCFSSegmentSize / 1024. / 1024.; - } - - /** - * If a merged segment will be more than this value, leave the segment as non-compound file even - * if compound file is enabled. Set this to Double.POSITIVE_INFINITY (default) and noCFSRatio to - * 1.0 to always use CFS regardless of merge size. - */ - public void setMaxCFSSegmentSizeMB(double v) { - if (v < 0.0) { - throw new IllegalArgumentException("maxCFSSegmentSizeMB must be >=0 (got " + v + ")"); - } - v *= 1024 * 1024; - this.maxCFSSegmentSize = v > Long.MAX_VALUE ? Long.MAX_VALUE : (long) v; + && info.info.getCodec().compoundFormat().useCompoundFile(size(info, mergeContext), this) + == info.info.getUseCompoundFile(); } /** diff --git a/lucene/core/src/java/org/apache/lucene/index/NoMergePolicy.java b/lucene/core/src/java/org/apache/lucene/index/NoMergePolicy.java index ce7b1ce006b3..1aec5399095d 100644 --- a/lucene/core/src/java/org/apache/lucene/index/NoMergePolicy.java +++ b/lucene/core/src/java/org/apache/lucene/index/NoMergePolicy.java @@ -70,37 +70,11 @@ public MergeSpecification findFullFlushMerges( return null; } - @Override - public boolean useCompoundFile( - SegmentInfos segments, SegmentCommitInfo newSegment, MergeContext mergeContext) { - return newSegment.info.getUseCompoundFile(); - } - @Override protected long size(SegmentCommitInfo info, MergeContext context) throws IOException { return Long.MAX_VALUE; } - @Override - public double getNoCFSRatio() { - return super.getNoCFSRatio(); - } - - @Override - public double getMaxCFSSegmentSizeMB() { - return super.getMaxCFSSegmentSizeMB(); - } - - @Override - public void setMaxCFSSegmentSizeMB(double v) { - super.setMaxCFSSegmentSizeMB(v); - } - - @Override - public void setNoCFSRatio(double noCFSRatio) { - super.setNoCFSRatio(noCFSRatio); - } - @Override public boolean keepFullyDeletedSegment(IOSupplier readerIOSupplier) throws IOException { diff --git a/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java b/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java index b43535cdb68e..3e0e5e883a5a 100644 --- a/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java +++ b/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java @@ -74,14 +74,6 @@ // maybe CMS should do so) public class TieredMergePolicy extends MergePolicy { - /** - * Default noCFSRatio. If a merge's size is {@code >= 10%} of the index, then we disable compound - * file for it. - * - * @see MergePolicy#setNoCFSRatio - */ - public static final double DEFAULT_NO_CFS_RATIO = 0.1; - private long maxMergedSegmentBytes = 5 * 1024 * 1024 * 1024L; private long floorSegmentBytes = 16 * 1024 * 1024L; @@ -91,9 +83,7 @@ public class TieredMergePolicy extends MergePolicy { private int targetSearchConcurrency = 1; /** Sole constructor, setting all settings to their defaults. */ - public TieredMergePolicy() { - super(DEFAULT_NO_CFS_RATIO, MergePolicy.DEFAULT_MAX_CFS_SEGMENT_SIZE); - } + public TieredMergePolicy() {} private enum MERGE_TYPE { NATURAL, @@ -1002,8 +992,6 @@ public String toString() { sb.append("floorSegmentMB=").append(floorSegmentBytes / 1024. / 1024.).append(", "); sb.append("forceMergeDeletesPctAllowed=").append(forceMergeDeletesPctAllowed).append(", "); sb.append("segmentsPerTier=").append(segsPerTier).append(", "); - sb.append("maxCFSSegmentSizeMB=").append(getMaxCFSSegmentSizeMB()).append(", "); - sb.append("noCFSRatio=").append(noCFSRatio).append(", "); sb.append("deletesPctAllowed=").append(deletesPctAllowed).append(", "); sb.append("targetSearchConcurrency=").append(targetSearchConcurrency); return sb.toString(); From 127204b4831bed166dc14c0a0f44d1f4a14be566 Mon Sep 17 00:00:00 2001 From: shubharm Date: Tue, 7 Oct 2025 00:33:19 +0100 Subject: [PATCH 2/7] Fixed tests --- .../test/org/apache/lucene/TestSearch.java | 2 +- .../lucene/TestSearchForDuplicates.java | 2 +- .../lucene90/TestLucene90DocValuesFormat.java | 9 +++-- .../perfield/TestPerFieldPostingsFormat2.java | 4 +-- .../document/TestFeatureDoubleValues.java | 15 ++++++--- .../lucene/document/TestFeatureField.java | 15 ++++++--- .../lucene/document/TestFeatureSort.java | 15 ++++++--- .../TestLatLonPointDistanceFeatureQuery.java | 20 ++++++----- .../TestLongDistanceFeatureQuery.java | 16 +++++---- .../lucene/index/Test2BBinaryDocValues.java | 6 ++-- .../org/apache/lucene/index/Test2BDocs.java | 4 +-- .../lucene/index/Test2BNumericDocValues.java | 4 +-- .../org/apache/lucene/index/Test2BPoints.java | 7 ++-- .../apache/lucene/index/Test2BPositions.java | 4 +-- .../apache/lucene/index/Test2BPostings.java | 3 +- .../lucene/index/Test2BPostingsBytes.java | 3 +- .../Test2BSortedDocValuesFixedSorted.java | 3 +- .../index/Test2BSortedDocValuesOrds.java | 3 +- .../org/apache/lucene/index/Test2BTerms.java | 3 +- .../lucene/index/Test4GBStoredFields.java | 3 +- .../index/TestAllFilesCheckIndexHeader.java | 2 +- .../TestAllFilesDetectMismatchedChecksum.java | 2 +- .../index/TestAllFilesDetectTruncation.java | 2 +- .../index/TestDefaultCodecParallelizesIO.java | 3 +- .../lucene/index/TestDeletionPolicy.java | 27 +++++---------- .../index/TestDemoParallelLeafReader.java | 7 ---- .../apache/lucene/index/TestFieldsReader.java | 2 +- .../lucene/index/TestIndexFileDeleter.java | 11 +++---- .../apache/lucene/index/TestIndexWriter.java | 11 +++---- .../lucene/index/TestIndexWriterDelete.java | 1 - .../index/TestIndexWriterExceptions.java | 7 ++-- .../index/TestIndexWriterMergePolicy.java | 26 --------------- .../index/TestIndexWriterOnDiskFull.java | 6 ++-- .../index/TestNRTReaderWithThreads.java | 3 +- .../apache/lucene/index/TestOmitNorms.java | 2 +- .../lucene/index/TestOmitPositions.java | 2 +- .../org/apache/lucene/index/TestOmitTf.java | 2 +- .../lucene/index/TestPerSegmentDeletes.java | 6 ---- .../lucene/index/TestStressIndexing2.java | 4 +-- .../lucene/index/TestSwappedIndexFiles.java | 4 +-- .../lucene/index/TestTermVectorsReader.java | 3 +- .../lucene/index/TestTieredMergePolicy.java | 19 ----------- .../search/TestConstantScoreScorer.java | 4 +-- .../lucene/search/TestReqOptSumScorer.java | 3 +- .../lucene/store/TestFileSwitchDirectory.java | 3 +- .../index/BaseIndexFileFormatTestCase.java | 4 +-- .../tests/index/BaseMergePolicyTestCase.java | 2 +- .../tests/index/MockRandomMergePolicy.java | 8 ----- .../lucene/tests/util/LuceneTestCase.java | 33 ++++--------------- .../apache/lucene/tests/util/TestUtil.java | 14 +++++++- 50 files changed, 160 insertions(+), 204 deletions(-) diff --git a/lucene/core/src/test/org/apache/lucene/TestSearch.java b/lucene/core/src/test/org/apache/lucene/TestSearch.java index 1ef629bd5db8..b4db24206c10 100644 --- a/lucene/core/src/test/org/apache/lucene/TestSearch.java +++ b/lucene/core/src/test/org/apache/lucene/TestSearch.java @@ -81,7 +81,7 @@ private void doTestSearch(Random random, PrintWriter out, boolean useCompoundFil Analyzer analyzer = new MockAnalyzer(random); IndexWriterConfig conf = newIndexWriterConfig(analyzer); MergePolicy mp = conf.getMergePolicy(); - mp.setNoCFSRatio(useCompoundFile ? 1.0 : 0.0); + conf.getCodec().compoundFormat().setShouldUseCompoundFile(useCompoundFile); IndexWriter writer = new IndexWriter(directory, conf); String[] docs = { diff --git a/lucene/core/src/test/org/apache/lucene/TestSearchForDuplicates.java b/lucene/core/src/test/org/apache/lucene/TestSearchForDuplicates.java index e92401235310..51b255606f3b 100644 --- a/lucene/core/src/test/org/apache/lucene/TestSearchForDuplicates.java +++ b/lucene/core/src/test/org/apache/lucene/TestSearchForDuplicates.java @@ -85,7 +85,7 @@ private void doTest(Random random, PrintWriter out, boolean useCompoundFiles, in Analyzer analyzer = new MockAnalyzer(random); IndexWriterConfig conf = newIndexWriterConfig(analyzer); final MergePolicy mp = conf.getMergePolicy(); - mp.setNoCFSRatio(useCompoundFiles ? 1.0 : 0.0); + conf.getCodec().compoundFormat().setShouldUseCompoundFile(useCompoundFiles); IndexWriter writer = new IndexWriter(directory, conf); if (VERBOSE) { System.out.println("TEST: now build index MAX_DOCS=" + MAX_DOCS); diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene90/TestLucene90DocValuesFormat.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene90/TestLucene90DocValuesFormat.java index 16f9d06151da..92a6059003b6 100644 --- a/lucene/core/src/test/org/apache/lucene/codecs/lucene90/TestLucene90DocValuesFormat.java +++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene90/TestLucene90DocValuesFormat.java @@ -610,7 +610,8 @@ private IndexWriter createFastIndexWriter(Directory dir, int maxBufferedDocs) th IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); conf.setMaxBufferedDocs(maxBufferedDocs); conf.setRAMBufferSizeMB(-1); - conf.setMergePolicy(newLogMergePolicy(random().nextBoolean())); + conf.setMergePolicy(newLogMergePolicy()); + conf.getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); return new IndexWriter(dir, conf); } @@ -642,7 +643,8 @@ private void doTestSortedNumericBlocksOfVariousBitsPerValue(LongSupplier counts) IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); conf.setMaxBufferedDocs(atLeast(Lucene90DocValuesFormat.NUMERIC_BLOCK_SIZE)); conf.setRAMBufferSizeMB(-1); - conf.setMergePolicy(newLogMergePolicy(random().nextBoolean())); + conf.setMergePolicy(newLogMergePolicy()); + conf.getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); IndexWriter writer = new IndexWriter(dir, conf); final int numDocs = atLeast(Lucene90DocValuesFormat.NUMERIC_BLOCK_SIZE * 3); @@ -712,7 +714,8 @@ private void doTestSparseNumericBlocksOfVariousBitsPerValue(double density) thro IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); conf.setMaxBufferedDocs(atLeast(Lucene90DocValuesFormat.NUMERIC_BLOCK_SIZE)); conf.setRAMBufferSizeMB(-1); - conf.setMergePolicy(newLogMergePolicy(random().nextBoolean())); + conf.setMergePolicy(newLogMergePolicy()); + conf.getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); IndexWriter writer = new IndexWriter(dir, conf); Document doc = new Document(); Field storedField = newStringField("stored", "", Field.Store.YES); diff --git a/lucene/core/src/test/org/apache/lucene/codecs/perfield/TestPerFieldPostingsFormat2.java b/lucene/core/src/test/org/apache/lucene/codecs/perfield/TestPerFieldPostingsFormat2.java index ebae291f35e2..a782440c952b 100644 --- a/lucene/core/src/test/org/apache/lucene/codecs/perfield/TestPerFieldPostingsFormat2.java +++ b/lucene/core/src/test/org/apache/lucene/codecs/perfield/TestPerFieldPostingsFormat2.java @@ -67,9 +67,9 @@ public class TestPerFieldPostingsFormat2 extends LuceneTestCase { private IndexWriter newWriter(Directory dir, IndexWriterConfig conf) throws IOException { LogDocMergePolicy logByteSizeMergePolicy = new LogDocMergePolicy(); - logByteSizeMergePolicy.setNoCFSRatio(0.0); // make sure we use plain - // files conf.setMergePolicy(logByteSizeMergePolicy); + conf.getCodec().compoundFormat().setShouldUseCompoundFile(false); // make sure we use plain + // files final IndexWriter writer = new IndexWriter(dir, conf); return writer; diff --git a/lucene/core/src/test/org/apache/lucene/document/TestFeatureDoubleValues.java b/lucene/core/src/test/org/apache/lucene/document/TestFeatureDoubleValues.java index 1f60acc7833e..08d1bfdcf91a 100644 --- a/lucene/core/src/test/org/apache/lucene/document/TestFeatureDoubleValues.java +++ b/lucene/core/src/test/org/apache/lucene/document/TestFeatureDoubleValues.java @@ -32,7 +32,8 @@ public class TestFeatureDoubleValues extends LuceneTestCase { public void testFeature() throws IOException { Directory dir = newDirectory(); IndexWriterConfig config = - newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random().nextBoolean())); + newIndexWriterConfig().setMergePolicy(newLogMergePolicy()); + config.getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config); Document doc = new Document(); doc.add(new FeatureField("field", "name", 30F)); @@ -67,7 +68,8 @@ public void testFeature() throws IOException { public void testFeatureMissing() throws IOException { Directory dir = newDirectory(); IndexWriterConfig config = - newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random().nextBoolean())); + newIndexWriterConfig().setMergePolicy(newLogMergePolicy()); + config.getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config); Document doc = new Document(); writer.addDocument(doc); @@ -100,7 +102,8 @@ public void testFeatureMissing() throws IOException { public void testFeatureMissingFieldInSegment() throws IOException { Directory dir = newDirectory(); IndexWriterConfig config = - newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random().nextBoolean())); + newIndexWriterConfig().setMergePolicy(newLogMergePolicy()); + config.getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config); Document doc = new Document(); writer.addDocument(doc); @@ -123,7 +126,8 @@ public void testFeatureMissingFieldInSegment() throws IOException { public void testFeatureMissingFeatureNameInSegment() throws IOException { Directory dir = newDirectory(); IndexWriterConfig config = - newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random().nextBoolean())); + newIndexWriterConfig().setMergePolicy(newLogMergePolicy()); + config.getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config); Document doc = new Document(); doc.add(new FeatureField("field", "different_name", 0.5F)); @@ -147,7 +151,8 @@ public void testFeatureMissingFeatureNameInSegment() throws IOException { public void testFeatureMultipleMissing() throws IOException { Directory dir = newDirectory(); IndexWriterConfig config = - newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random().nextBoolean())); + newIndexWriterConfig().setMergePolicy(newLogMergePolicy()); + config.getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config); Document doc = new Document(); writer.addDocument(doc); diff --git a/lucene/core/src/test/org/apache/lucene/document/TestFeatureField.java b/lucene/core/src/test/org/apache/lucene/document/TestFeatureField.java index 7b03487c7956..ed0002246d02 100644 --- a/lucene/core/src/test/org/apache/lucene/document/TestFeatureField.java +++ b/lucene/core/src/test/org/apache/lucene/document/TestFeatureField.java @@ -64,7 +64,8 @@ public void testBasics() throws Exception { new RandomIndexWriter( random(), dir, - newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random().nextBoolean()))); + newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + writer.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); FeatureField pagerank = new FeatureField("features", "pagerank", 1); FeatureField urlLength = new FeatureField("features", "urlLen", 1); @@ -199,7 +200,8 @@ public void testExplanations() throws Exception { new RandomIndexWriter( random(), dir, - newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random().nextBoolean()))); + newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + writer.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); FeatureField pagerank = new FeatureField("features", "pagerank", 1); doc.add(pagerank); @@ -325,7 +327,8 @@ public void testDemo() throws IOException { new RandomIndexWriter( random(), dir, - newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random().nextBoolean()))); + newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + writer.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); FeatureField pagerank = new FeatureField("features", "pagerank", 1); doc.add(pagerank); @@ -381,7 +384,8 @@ public void testBasicsNonScoringCase() throws IOException { new RandomIndexWriter( random(), dir, - newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random().nextBoolean())))) { + newIndexWriterConfig().setMergePolicy(newLogMergePolicy()))) { + writer.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); FeatureField pagerank = new FeatureField("features", "pagerank", 1); FeatureField urlLength = new FeatureField("features", "urlLen", 1); @@ -464,7 +468,8 @@ public void testStoreTermVectors() throws Exception { new RandomIndexWriter( random(), dir, - newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random().nextBoolean()))); + newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + writer.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); FeatureField pagerank = new FeatureField("features", "pagerank", 1, true); FeatureField urlLength = new FeatureField("features", "urlLen", 1, true); diff --git a/lucene/core/src/test/org/apache/lucene/document/TestFeatureSort.java b/lucene/core/src/test/org/apache/lucene/document/TestFeatureSort.java index cd5bde9ba3aa..a1a23b92159f 100644 --- a/lucene/core/src/test/org/apache/lucene/document/TestFeatureSort.java +++ b/lucene/core/src/test/org/apache/lucene/document/TestFeatureSort.java @@ -51,7 +51,8 @@ public class TestFeatureSort extends LuceneTestCase { public void testFeature() throws IOException { Directory dir = newDirectory(); IndexWriterConfig config = - newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random().nextBoolean())); + newIndexWriterConfig().setMergePolicy(newLogMergePolicy()); + config.getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config); Document doc = new Document(); doc.add(new FeatureField("field", "name", 30.1F)); @@ -86,7 +87,8 @@ public void testFeature() throws IOException { public void testFeatureMissing() throws IOException { Directory dir = newDirectory(); IndexWriterConfig config = - newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random().nextBoolean())); + newIndexWriterConfig().setMergePolicy(newLogMergePolicy()); + config.getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config); Document doc = new Document(); writer.addDocument(doc); @@ -119,7 +121,8 @@ public void testFeatureMissing() throws IOException { public void testFeatureMissingFieldInSegment() throws IOException { Directory dir = newDirectory(); IndexWriterConfig config = - newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random().nextBoolean())); + newIndexWriterConfig().setMergePolicy(newLogMergePolicy()); + config.getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config); Document doc = new Document(); writer.addDocument(doc); @@ -153,7 +156,8 @@ public void testFeatureMissingFieldInSegment() throws IOException { public void testFeatureMissingFeatureNameInSegment() throws IOException { Directory dir = newDirectory(); IndexWriterConfig config = - newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random().nextBoolean())); + newIndexWriterConfig().setMergePolicy(newLogMergePolicy()); + config.getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config); Document doc = new Document(); doc.add(new FeatureField("field", "different_name", 0.5F)); @@ -188,7 +192,8 @@ public void testFeatureMissingFeatureNameInSegment() throws IOException { public void testFeatureMultipleMissing() throws IOException { Directory dir = newDirectory(); IndexWriterConfig config = - newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random().nextBoolean())); + newIndexWriterConfig().setMergePolicy(newLogMergePolicy()); + config.getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config); Document doc = new Document(); writer.addDocument(doc); diff --git a/lucene/core/src/test/org/apache/lucene/document/TestLatLonPointDistanceFeatureQuery.java b/lucene/core/src/test/org/apache/lucene/document/TestLatLonPointDistanceFeatureQuery.java index 210b92295329..d28d61af7bb7 100644 --- a/lucene/core/src/test/org/apache/lucene/document/TestLatLonPointDistanceFeatureQuery.java +++ b/lucene/core/src/test/org/apache/lucene/document/TestLatLonPointDistanceFeatureQuery.java @@ -67,7 +67,8 @@ public void testBasics() throws IOException { new RandomIndexWriter( random(), dir, - newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random().nextBoolean()))); + newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + w.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); LatLonPoint point = new LatLonPoint("foo", 0.0, 0.0); doc.add(point); @@ -163,7 +164,8 @@ public void testCrossesDateLine() throws IOException { new RandomIndexWriter( random(), dir, - newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random().nextBoolean()))); + newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + w.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); LatLonPoint point = new LatLonPoint("foo", 0.0, 0.0); doc.add(point); @@ -241,7 +243,8 @@ public void testMissingValue() throws IOException { new RandomIndexWriter( random(), dir, - newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random().nextBoolean()))); + newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + w.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); LatLonPoint point = new LatLonPoint("foo", 0, 0); doc.add(point); @@ -300,8 +303,8 @@ public void testMultiValued() throws IOException { new RandomIndexWriter( random(), dir, - newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random().nextBoolean()))); - + newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + w.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); for (double[] point : new double[][] {{0, 0}, {30, 30}, {60, 60}}) { doc.add(new LatLonPoint("foo", point[0], point[1])); @@ -402,7 +405,8 @@ public void testRandom() throws IOException { Directory dir = newDirectory(); IndexWriter w = new IndexWriter( - dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random().nextBoolean()))); + dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); LatLonPoint point = new LatLonPoint("foo", 0., 0.); doc.add(point); @@ -447,8 +451,8 @@ public void testCompareSorting() throws IOException { new RandomIndexWriter( random(), dir, - newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random().nextBoolean()))); - + newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + w.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); LatLonPoint point = new LatLonPoint("foo", 0., 0.); doc.add(point); diff --git a/lucene/core/src/test/org/apache/lucene/document/TestLongDistanceFeatureQuery.java b/lucene/core/src/test/org/apache/lucene/document/TestLongDistanceFeatureQuery.java index 036e0a5450e3..1abbf51b2977 100644 --- a/lucene/core/src/test/org/apache/lucene/document/TestLongDistanceFeatureQuery.java +++ b/lucene/core/src/test/org/apache/lucene/document/TestLongDistanceFeatureQuery.java @@ -59,7 +59,8 @@ public void testBasics() throws IOException { new RandomIndexWriter( random(), dir, - newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random().nextBoolean()))); + newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + w.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); LongField field = new LongField("foo", 0L, Store.NO); doc.add(field); @@ -120,7 +121,8 @@ public void testOverUnderFlow() throws IOException { new RandomIndexWriter( random(), dir, - newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random().nextBoolean()))); + newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + w.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); LongField field = new LongField("foo", 0L, Store.NO); doc.add(field); @@ -206,7 +208,8 @@ public void testMissingValue() throws IOException { new RandomIndexWriter( random(), dir, - newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random().nextBoolean()))); + newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + w.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); LongField field = new LongField("foo", 0L, Store.NO); doc.add(field); @@ -248,8 +251,8 @@ public void testMultiValued() throws IOException { new RandomIndexWriter( random(), dir, - newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random().nextBoolean()))); - + newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + w.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); for (long v : new long[] {3, 1000, Long.MAX_VALUE}) { doc.add(new LongField("foo", v, Store.NO)); @@ -319,7 +322,8 @@ public void testRandom() throws IOException { Directory dir = newDirectory(); IndexWriter w = new IndexWriter( - dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random().nextBoolean()))); + dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); LongField field = new LongField("foo", 0L, Store.NO); doc.add(field); diff --git a/lucene/core/src/test/org/apache/lucene/index/Test2BBinaryDocValues.java b/lucene/core/src/test/org/apache/lucene/index/Test2BBinaryDocValues.java index 2faf281b3e8c..300316095acc 100644 --- a/lucene/core/src/test/org/apache/lucene/index/Test2BBinaryDocValues.java +++ b/lucene/core/src/test/org/apache/lucene/index/Test2BBinaryDocValues.java @@ -54,9 +54,10 @@ public void testFixedBinary() throws Exception { .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) .setRAMBufferSizeMB(256.0) .setMergeScheduler(new ConcurrentMergeScheduler()) - .setMergePolicy(newLogMergePolicy(false, 10)) + .setMergePolicy(newLogMergePolicy(10)) .setOpenMode(IndexWriterConfig.OpenMode.CREATE) .setCodec(TestUtil.getDefaultCodec())); + w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(false); Document doc = new Document(); byte[] bytes = new byte[4]; @@ -117,9 +118,10 @@ public void testVariableBinary() throws Exception { .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) .setRAMBufferSizeMB(256.0) .setMergeScheduler(new ConcurrentMergeScheduler()) - .setMergePolicy(newLogMergePolicy(false, 10)) + .setMergePolicy(newLogMergePolicy(10)) .setOpenMode(IndexWriterConfig.OpenMode.CREATE) .setCodec(TestUtil.getDefaultCodec())); + w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(false); Document doc = new Document(); byte[] bytes = new byte[4]; diff --git a/lucene/core/src/test/org/apache/lucene/index/Test2BDocs.java b/lucene/core/src/test/org/apache/lucene/index/Test2BDocs.java index 8c30aaa6bbad..56cc4ccbae9f 100644 --- a/lucene/core/src/test/org/apache/lucene/index/Test2BDocs.java +++ b/lucene/core/src/test/org/apache/lucene/index/Test2BDocs.java @@ -53,9 +53,9 @@ public void test2BDocs() throws Exception { .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) .setRAMBufferSizeMB(256.0) .setMergeScheduler(new ConcurrentMergeScheduler()) - .setMergePolicy(newLogMergePolicy(false, 10)) + .setMergePolicy(newLogMergePolicy(10)) .setOpenMode(IndexWriterConfig.OpenMode.CREATE) - .setCodec(TestUtil.getDefaultCodec())); + .setCodec(TestUtil.getDefaultCodec(false))); Document doc = new Document(); Field field = new Field("f1", "a", StringField.TYPE_NOT_STORED); diff --git a/lucene/core/src/test/org/apache/lucene/index/Test2BNumericDocValues.java b/lucene/core/src/test/org/apache/lucene/index/Test2BNumericDocValues.java index 0ee78973cf76..4b30bbda0718 100644 --- a/lucene/core/src/test/org/apache/lucene/index/Test2BNumericDocValues.java +++ b/lucene/core/src/test/org/apache/lucene/index/Test2BNumericDocValues.java @@ -51,9 +51,9 @@ public void testNumerics() throws Exception { .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) .setRAMBufferSizeMB(256.0) .setMergeScheduler(new ConcurrentMergeScheduler()) - .setMergePolicy(newLogMergePolicy(false, 10)) + .setMergePolicy(newLogMergePolicy(10)) .setOpenMode(IndexWriterConfig.OpenMode.CREATE) - .setCodec(TestUtil.getDefaultCodec())); + .setCodec(TestUtil.getDefaultCodec(false))); Document doc = new Document(); NumericDocValuesField dvField = new NumericDocValuesField("dv", 0); diff --git a/lucene/core/src/test/org/apache/lucene/index/Test2BPoints.java b/lucene/core/src/test/org/apache/lucene/index/Test2BPoints.java index ca0318c1a8a3..de92274a2d5e 100644 --- a/lucene/core/src/test/org/apache/lucene/index/Test2BPoints.java +++ b/lucene/core/src/test/org/apache/lucene/index/Test2BPoints.java @@ -48,8 +48,9 @@ public void test1D() throws Exception { .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) .setRAMBufferSizeMB(256.0) .setMergeScheduler(new ConcurrentMergeScheduler()) - .setMergePolicy(newLogMergePolicy(false, 10)) + .setMergePolicy(newLogMergePolicy(10)) .setOpenMode(IndexWriterConfig.OpenMode.CREATE); + iwc.getCodec().compoundFormat().setShouldUseCompoundFile(false); ((ConcurrentMergeScheduler) iwc.getMergeScheduler()).setMaxMergesAndThreads(6, 3); @@ -96,9 +97,9 @@ public void test2D() throws Exception { .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) .setRAMBufferSizeMB(256.0) .setMergeScheduler(new ConcurrentMergeScheduler()) - .setMergePolicy(newLogMergePolicy(false, 10)) + .setMergePolicy(newLogMergePolicy(10)) .setOpenMode(IndexWriterConfig.OpenMode.CREATE); - + iwc.getCodec().compoundFormat().setShouldUseCompoundFile(false); ((ConcurrentMergeScheduler) iwc.getMergeScheduler()).setMaxMergesAndThreads(6, 3); IndexWriter w = new IndexWriter(dir, iwc); diff --git a/lucene/core/src/test/org/apache/lucene/index/Test2BPositions.java b/lucene/core/src/test/org/apache/lucene/index/Test2BPositions.java index 76867c92afae..0ed0370aa0b2 100644 --- a/lucene/core/src/test/org/apache/lucene/index/Test2BPositions.java +++ b/lucene/core/src/test/org/apache/lucene/index/Test2BPositions.java @@ -53,10 +53,10 @@ public void test() throws Exception { .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) .setRAMBufferSizeMB(256.0) .setMergeScheduler(new ConcurrentMergeScheduler()) - .setMergePolicy(newLogMergePolicy(false, 10)) + .setMergePolicy(newLogMergePolicy(10)) .setOpenMode(IndexWriterConfig.OpenMode.CREATE) .setCodec(TestUtil.getDefaultCodec())); - + w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(false); MergePolicy mp = w.getConfig().getMergePolicy(); if (mp instanceof LogByteSizeMergePolicy) { // 1 petabyte: diff --git a/lucene/core/src/test/org/apache/lucene/index/Test2BPostings.java b/lucene/core/src/test/org/apache/lucene/index/Test2BPostings.java index 6813d8f0f753..dfa1a75256d4 100644 --- a/lucene/core/src/test/org/apache/lucene/index/Test2BPostings.java +++ b/lucene/core/src/test/org/apache/lucene/index/Test2BPostings.java @@ -51,8 +51,9 @@ public void test() throws Exception { .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) .setRAMBufferSizeMB(256.0) .setMergeScheduler(new ConcurrentMergeScheduler()) - .setMergePolicy(newLogMergePolicy(false, 10)) + .setMergePolicy(newLogMergePolicy(10)) .setOpenMode(IndexWriterConfig.OpenMode.CREATE); + iwc.getCodec().compoundFormat().setShouldUseCompoundFile(false); IndexWriter w = new IndexWriter(dir, iwc); diff --git a/lucene/core/src/test/org/apache/lucene/index/Test2BPostingsBytes.java b/lucene/core/src/test/org/apache/lucene/index/Test2BPostingsBytes.java index 29abe55d780b..7fbebcaf11b0 100644 --- a/lucene/core/src/test/org/apache/lucene/index/Test2BPostingsBytes.java +++ b/lucene/core/src/test/org/apache/lucene/index/Test2BPostingsBytes.java @@ -79,9 +79,10 @@ public void test() throws Exception { .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) .setRAMBufferSizeMB(256.0) .setMergeScheduler(new ConcurrentMergeScheduler()) - .setMergePolicy(newLogMergePolicy(false, 10)) + .setMergePolicy(newLogMergePolicy(10)) .setOpenMode(IndexWriterConfig.OpenMode.CREATE) .setCodec(TestUtil.getDefaultCodec())); + w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(false); MergePolicy mp = w.getConfig().getMergePolicy(); if (mp instanceof LogByteSizeMergePolicy) { diff --git a/lucene/core/src/test/org/apache/lucene/index/Test2BSortedDocValuesFixedSorted.java b/lucene/core/src/test/org/apache/lucene/index/Test2BSortedDocValuesFixedSorted.java index 826580d8c461..c0f0442b30b3 100644 --- a/lucene/core/src/test/org/apache/lucene/index/Test2BSortedDocValuesFixedSorted.java +++ b/lucene/core/src/test/org/apache/lucene/index/Test2BSortedDocValuesFixedSorted.java @@ -52,10 +52,11 @@ public void testFixedSorted() throws Exception { .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) .setRAMBufferSizeMB(256.0) .setMergeScheduler(new ConcurrentMergeScheduler()) - .setMergePolicy(newLogMergePolicy(false, 10)) + .setMergePolicy(newLogMergePolicy(10)) .setOpenMode(IndexWriterConfig.OpenMode.CREATE) .setCodec(TestUtil.getDefaultCodec())); + w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(false); Document doc = new Document(); byte[] bytes = new byte[2]; BytesRef data = new BytesRef(bytes); diff --git a/lucene/core/src/test/org/apache/lucene/index/Test2BSortedDocValuesOrds.java b/lucene/core/src/test/org/apache/lucene/index/Test2BSortedDocValuesOrds.java index 52bcc7a109a1..a23f56543705 100644 --- a/lucene/core/src/test/org/apache/lucene/index/Test2BSortedDocValuesOrds.java +++ b/lucene/core/src/test/org/apache/lucene/index/Test2BSortedDocValuesOrds.java @@ -52,9 +52,10 @@ public void test2BOrds() throws Exception { .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) .setRAMBufferSizeMB(256.0) .setMergeScheduler(new ConcurrentMergeScheduler()) - .setMergePolicy(newLogMergePolicy(false, 10)) + .setMergePolicy(newLogMergePolicy(10)) .setOpenMode(IndexWriterConfig.OpenMode.CREATE) .setCodec(TestUtil.getDefaultCodec())); + w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(false); Document doc = new Document(); byte[] bytes = new byte[4]; diff --git a/lucene/core/src/test/org/apache/lucene/index/Test2BTerms.java b/lucene/core/src/test/org/apache/lucene/index/Test2BTerms.java index 90ed7902aaf2..3eea89b9764b 100644 --- a/lucene/core/src/test/org/apache/lucene/index/Test2BTerms.java +++ b/lucene/core/src/test/org/apache/lucene/index/Test2BTerms.java @@ -175,10 +175,11 @@ public void test2BTerms() throws IOException { .setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) .setRAMBufferSizeMB(256.0) .setMergeScheduler(new ConcurrentMergeScheduler()) - .setMergePolicy(newLogMergePolicy(false, 10)) + .setMergePolicy(newLogMergePolicy(10)) .setOpenMode(IndexWriterConfig.OpenMode.CREATE) .setCodec(TestUtil.getDefaultCodec())); + w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(false); MergePolicy mp = w.getConfig().getMergePolicy(); if (mp instanceof LogByteSizeMergePolicy) { // 1 petabyte: diff --git a/lucene/core/src/test/org/apache/lucene/index/Test4GBStoredFields.java b/lucene/core/src/test/org/apache/lucene/index/Test4GBStoredFields.java index c75ed932876c..27f41de62694 100644 --- a/lucene/core/src/test/org/apache/lucene/index/Test4GBStoredFields.java +++ b/lucene/core/src/test/org/apache/lucene/index/Test4GBStoredFields.java @@ -46,7 +46,8 @@ public void test() throws Exception { iwc.setMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH); iwc.setRAMBufferSizeMB(256.0); iwc.setMergeScheduler(new ConcurrentMergeScheduler()); - iwc.setMergePolicy(newLogMergePolicy(false, 10)); + iwc.setMergePolicy(newLogMergePolicy(10)); + iwc.getCodec().compoundFormat().setShouldUseCompoundFile(false); iwc.setOpenMode(IndexWriterConfig.OpenMode.CREATE); // TODO: we disable "Compressing" since it likes to pick very extreme values which will be too diff --git a/lucene/core/src/test/org/apache/lucene/index/TestAllFilesCheckIndexHeader.java b/lucene/core/src/test/org/apache/lucene/index/TestAllFilesCheckIndexHeader.java index f826ee45b66c..65c434c386e7 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestAllFilesCheckIndexHeader.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestAllFilesCheckIndexHeader.java @@ -47,7 +47,7 @@ public void test() throws Exception { // time we test truncation of .cfs/.cfe too: if (random().nextInt(5) != 1) { conf.setUseCompoundFile(false); - conf.getMergePolicy().setNoCFSRatio(0.0); + conf.getCodec().compoundFormat().setShouldUseCompoundFile(false); } RandomIndexWriter riw = new RandomIndexWriter(random(), dir, conf); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestAllFilesDetectMismatchedChecksum.java b/lucene/core/src/test/org/apache/lucene/index/TestAllFilesDetectMismatchedChecksum.java index 0022a5e3caed..4bee0c635184 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestAllFilesDetectMismatchedChecksum.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestAllFilesDetectMismatchedChecksum.java @@ -53,7 +53,7 @@ public void test() throws Exception { conf.setCodec(TestUtil.getDefaultCodec()); // Disable CFS, which makes it harder to test due to its double checksumming conf.setUseCompoundFile(false); - conf.getMergePolicy().setNoCFSRatio(0.0); + conf.getCodec().compoundFormat().setShouldUseCompoundFile(false); RandomIndexWriter riw = new RandomIndexWriter(random(), dir, conf); Document doc = new Document(); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestAllFilesDetectTruncation.java b/lucene/core/src/test/org/apache/lucene/index/TestAllFilesDetectTruncation.java index f752356b4626..f8c9c42f6387 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestAllFilesDetectTruncation.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestAllFilesDetectTruncation.java @@ -65,7 +65,7 @@ private void doTest(boolean cfs) throws Exception { // time we test truncation of .cfs/.cfe too: if (cfs == false) { conf.setUseCompoundFile(false); - conf.getMergePolicy().setNoCFSRatio(0.0); + conf.getCodec().compoundFormat().setShouldUseCompoundFile(false); } RandomIndexWriter riw = new RandomIndexWriter(random(), dir, conf); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDefaultCodecParallelizesIO.java b/lucene/core/src/test/org/apache/lucene/index/TestDefaultCodecParallelizesIO.java index 70788c1f64ea..de01dae1bc5d 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestDefaultCodecParallelizesIO.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestDefaultCodecParallelizesIO.java @@ -47,8 +47,9 @@ public static void beforeClass() throws Exception { bbDir, new IndexWriterConfig() .setUseCompoundFile(false) - .setMergePolicy(newLogMergePolicy(false)) + .setMergePolicy(newLogMergePolicy()) .setCodec(TestUtil.getDefaultCodec()))) { + w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(false); final int numDocs = atLeast(10_000); for (int d = 0; d < numDocs; ++d) { Document doc = docs.nextDoc(); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDeletionPolicy.java b/lucene/core/src/test/org/apache/lucene/index/TestDeletionPolicy.java index 08c6b3ebf6ed..9832b3d7dec4 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestDeletionPolicy.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestDeletionPolicy.java @@ -232,8 +232,7 @@ public void testExpirationTimeDeletionPolicy() throws IOException, InterruptedEx IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())) .setIndexDeletionPolicy(new ExpirationTimeDeletionPolicy(dir, SECONDS)); - MergePolicy mp = conf.getMergePolicy(); - mp.setNoCFSRatio(1.0); + conf.getCodec().compoundFormat().setShouldUseCompoundFile(true); IndexWriter writer = new IndexWriter(dir, conf); ExpirationTimeDeletionPolicy policy = (ExpirationTimeDeletionPolicy) writer.getConfig().getIndexDeletionPolicy(); @@ -253,8 +252,7 @@ public void testExpirationTimeDeletionPolicy() throws IOException, InterruptedEx newIndexWriterConfig(new MockAnalyzer(random())) .setOpenMode(OpenMode.APPEND) .setIndexDeletionPolicy(policy); - mp = conf.getMergePolicy(); - mp.setNoCFSRatio(1.0); + conf.getCodec().compoundFormat().setShouldUseCompoundFile(true); writer = new IndexWriter(dir, conf); policy = (ExpirationTimeDeletionPolicy) writer.getConfig().getIndexDeletionPolicy(); for (int j = 0; j < 17; j++) { @@ -332,8 +330,7 @@ public void testKeepAllDeletionPolicy() throws IOException { .setIndexDeletionPolicy(new KeepAllDeletionPolicy(dir)) .setMaxBufferedDocs(10) .setMergeScheduler(new SerialMergeScheduler()); - MergePolicy mp = conf.getMergePolicy(); - mp.setNoCFSRatio(useCompoundFile ? 1.0 : 0.0); + conf.getCodec().compoundFormat().setShouldUseCompoundFile(useCompoundFile); IndexWriter writer = new IndexWriter(dir, conf); KeepAllDeletionPolicy policy = (KeepAllDeletionPolicy) writer.getConfig().getIndexDeletionPolicy(); @@ -353,8 +350,7 @@ public void testKeepAllDeletionPolicy() throws IOException { newIndexWriterConfig(new MockAnalyzer(random())) .setOpenMode(OpenMode.APPEND) .setIndexDeletionPolicy(policy); - mp = conf.getMergePolicy(); - mp.setNoCFSRatio(useCompoundFile ? 1.0 : 0.0); + conf.getCodec().compoundFormat().setShouldUseCompoundFile(useCompoundFile); if (VERBOSE) { System.out.println("TEST: open writer for forceMerge"); } @@ -549,8 +545,7 @@ public void testKeepNoneOnInitDeletionPolicy() throws IOException { .setOpenMode(OpenMode.CREATE) .setIndexDeletionPolicy(new KeepNoneOnInitDeletionPolicy()) .setMaxBufferedDocs(10); - MergePolicy mp = conf.getMergePolicy(); - mp.setNoCFSRatio(useCompoundFile ? 1.0 : 0.0); + conf.getCodec().compoundFormat().setShouldUseCompoundFile(useCompoundFile); IndexWriter writer = new IndexWriter(dir, conf); KeepNoneOnInitDeletionPolicy policy = (KeepNoneOnInitDeletionPolicy) writer.getConfig().getIndexDeletionPolicy(); @@ -563,8 +558,7 @@ public void testKeepNoneOnInitDeletionPolicy() throws IOException { newIndexWriterConfig(new MockAnalyzer(random())) .setOpenMode(OpenMode.APPEND) .setIndexDeletionPolicy(policy); - mp = conf.getMergePolicy(); - mp.setNoCFSRatio(1.0); + conf.getCodec().compoundFormat().setShouldUseCompoundFile(true); writer = new IndexWriter(dir, conf); policy = (KeepNoneOnInitDeletionPolicy) writer.getConfig().getIndexDeletionPolicy(); writer.forceMerge(1); @@ -603,8 +597,7 @@ public void testKeepLastNDeletionPolicy() throws IOException { .setOpenMode(OpenMode.CREATE) .setIndexDeletionPolicy(policy) .setMaxBufferedDocs(10); - MergePolicy mp = conf.getMergePolicy(); - mp.setNoCFSRatio(useCompoundFile ? 1.0 : 0.0); + conf.getCodec().compoundFormat().setShouldUseCompoundFile(useCompoundFile); IndexWriter writer = new IndexWriter(dir, conf); policy = (KeepLastNDeletionPolicy) writer.getConfig().getIndexDeletionPolicy(); for (int i = 0; i < 17; i++) { @@ -661,8 +654,7 @@ public void testKeepLastNDeletionPolicyWithCreates() throws IOException { .setOpenMode(OpenMode.CREATE) .setIndexDeletionPolicy(new KeepLastNDeletionPolicy(N)) .setMaxBufferedDocs(10); - MergePolicy mp = conf.getMergePolicy(); - mp.setNoCFSRatio(useCompoundFile ? 1.0 : 0.0); + conf.getCodec().compoundFormat().setShouldUseCompoundFile(useCompoundFile); IndexWriter writer = new IndexWriter(dir, conf); KeepLastNDeletionPolicy policy = (KeepLastNDeletionPolicy) writer.getConfig().getIndexDeletionPolicy(); @@ -677,8 +669,7 @@ public void testKeepLastNDeletionPolicyWithCreates() throws IOException { .setOpenMode(OpenMode.APPEND) .setIndexDeletionPolicy(policy) .setMaxBufferedDocs(10); - mp = conf.getMergePolicy(); - mp.setNoCFSRatio(useCompoundFile ? 1.0 : 0.0); + conf.getCodec().compoundFormat().setShouldUseCompoundFile(useCompoundFile); writer = new IndexWriter(dir, conf); policy = (KeepLastNDeletionPolicy) writer.getConfig().getIndexDeletionPolicy(); for (int j = 0; j < 17; j++) { diff --git a/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java b/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java index c08814ae750e..cc6cac1ebd69 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestDemoParallelLeafReader.java @@ -738,13 +738,6 @@ public MergeSpecification findFullFlushMerges( return wrap(in.findFullFlushMerges(mergeTrigger, segmentInfos, mergeContext)); } - @Override - public boolean useCompoundFile( - SegmentInfos segments, SegmentCommitInfo newSegment, MergeContext mergeContext) - throws IOException { - return in.useCompoundFile(segments, newSegment, mergeContext); - } - @Override public String toString() { return "ReindexingMergePolicy(" + in + ")"; diff --git a/lucene/core/src/test/org/apache/lucene/index/TestFieldsReader.java b/lucene/core/src/test/org/apache/lucene/index/TestFieldsReader.java index 55dc58a6129b..9dc6b9c13671 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestFieldsReader.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestFieldsReader.java @@ -73,8 +73,8 @@ public static void beforeClass() throws Exception { dir = newDirectory(); IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()); - conf.getMergePolicy().setNoCFSRatio(0.0); IndexWriter writer = new IndexWriter(dir, conf); + writer.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(false); writer.addDocument(testDoc); writer.close(); } diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexFileDeleter.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexFileDeleter.java index e62ca8235a24..5dd19c4b8184 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexFileDeleter.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexFileDeleter.java @@ -51,11 +51,7 @@ public class TestIndexFileDeleter extends LuceneTestCase { public void testDeleteLeftoverFiles() throws IOException { Directory dir = newDirectory(); - MergePolicy mergePolicy = newLogMergePolicy(true, 10); - - // This test expects all of its segments to be in CFS - mergePolicy.setNoCFSRatio(1.0); - mergePolicy.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY); + MergePolicy mergePolicy = newLogMergePolicy(10); IndexWriter writer = new IndexWriter( @@ -64,12 +60,15 @@ public void testDeleteLeftoverFiles() throws IOException { .setMaxBufferedDocs(10) .setMergePolicy(mergePolicy) .setUseCompoundFile(true)); + // This test expects all of its segments to be in CFS + writer.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(true); + writer.getConfig().getCodec().compoundFormat().setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY); int i; for (i = 0; i < 35; i++) { addDoc(writer, i); } - writer.getConfig().getMergePolicy().setNoCFSRatio(0.0); + writer.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(false); writer.getConfig().setUseCompoundFile(false); for (; i < 45; i++) { addDoc(writer, i); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java index dcfdc9d4b0c1..54b1ea866f07 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java @@ -1243,11 +1243,7 @@ public void testDeleteUnusedFiles() throws Exception { // handles open. FSDirectory dir = new NIOFSDirectory(indexPath); - MergePolicy mergePolicy = newLogMergePolicy(true); - - // This test expects all of its segments to be in CFS - mergePolicy.setNoCFSRatio(1.0); - mergePolicy.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY); + MergePolicy mergePolicy = newLogMergePolicy(); IndexWriter w = new IndexWriter( @@ -1255,6 +1251,9 @@ public void testDeleteUnusedFiles() throws Exception { newIndexWriterConfig(new MockAnalyzer(random())) .setMergePolicy(mergePolicy) .setUseCompoundFile(true)); + // This test expects all of its segments to be in CFS + w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(true); + w.getConfig().getCodec().compoundFormat().setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY); Document doc = new Document(); doc.add(newTextField("field", "go", Field.Store.NO)); w.addDocument(doc); @@ -1462,7 +1461,7 @@ public void testNoUnwantedTVFiles() throws Exception { newIndexWriterConfig(new MockAnalyzer(random())) .setRAMBufferSizeMB(0.01) .setMergePolicy(newLogMergePolicy())); - indexWriter.getConfig().getMergePolicy().setNoCFSRatio(0.0); + indexWriter.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(false); String BIG = "alskjhlaksjghlaksjfhalksvjepgjioefgjnsdfjgefgjhelkgjhqewlrkhgwlekgrhwelkgjhwelkgrhwlkejg"; diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java index 3be515c1c896..53478b34c2ce 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java @@ -863,7 +863,6 @@ public void eval(MockDirectoryWrapper dir) throws IOException { .setMergePolicy(newLogMergePolicy())); MergePolicy lmp = modifier.getConfig().getMergePolicy(); - lmp.setNoCFSRatio(1.0); dir.failOn(failure.reset()); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java index 72f4e4f595eb..b6591517c6aa 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterExceptions.java @@ -1332,12 +1332,11 @@ public void testSimulatedCorruptIndex2() throws IOException { new IndexWriter( dir, newIndexWriterConfig(new MockAnalyzer(random())) - .setMergePolicy(newLogMergePolicy(true)) + .setMergePolicy(newLogMergePolicy()) .setUseCompoundFile(true)); - MergePolicy lmp = writer.getConfig().getMergePolicy(); // Force creation of CFS: - lmp.setNoCFSRatio(1.0); - lmp.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY); + writer.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(true); + writer.getConfig().getCodec().compoundFormat().setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY); // add 100 documents for (int i = 0; i < 100; i++) { diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java index d096f06bcce0..b240e107fde7 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java @@ -373,11 +373,6 @@ private void checkInvariants(IndexWriter writer) throws IOException { private static final double EPSILON = 1E-14; - public void testSetters() { - assertSetters(new LogByteSizeMergePolicy()); - assertSetters(new MockMergePolicy()); - } - // Test basic semantics of merge on commit public void testMergeOnCommit() throws IOException { Directory dir = newDirectory(); @@ -467,27 +462,6 @@ public void testMergeOnCommitWithEventListener() throws IOException { dir.close(); } - private void assertSetters(MergePolicy lmp) { - lmp.setMaxCFSSegmentSizeMB(2.0); - assertEquals(2.0, lmp.getMaxCFSSegmentSizeMB(), EPSILON); - - lmp.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY); - assertEquals( - Long.MAX_VALUE / 1024. / 1024., lmp.getMaxCFSSegmentSizeMB(), EPSILON * Long.MAX_VALUE); - - lmp.setMaxCFSSegmentSizeMB(Long.MAX_VALUE / 1024. / 1024.); - assertEquals( - Long.MAX_VALUE / 1024. / 1024., lmp.getMaxCFSSegmentSizeMB(), EPSILON * Long.MAX_VALUE); - - expectThrows( - IllegalArgumentException.class, - () -> { - lmp.setMaxCFSSegmentSizeMB(-2.0); - }); - - // TODO: Add more checks for other non-double setters! - } - public void testCarryOverNewDeletesOnCommit() throws IOException, InterruptedException { try (Directory directory = newDirectory()) { boolean useSoftDeletes = random().nextBoolean(); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java index 745f51d5d096..6c7316ce082e 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterOnDiskFull.java @@ -266,7 +266,8 @@ public void testAddIndexOnDiskFull() throws IOException { IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())) .setOpenMode(OpenMode.APPEND) - .setMergePolicy(newLogMergePolicy(false)); + .setMergePolicy(newLogMergePolicy()); + iwc.getCodec().compoundFormat().setShouldUseCompoundFile(false); writer = new IndexWriter(dir, iwc); Exception err = null; @@ -387,7 +388,8 @@ public void testAddIndexOnDiskFull() throws IOException { dir, newIndexWriterConfig(new MockAnalyzer(random())) .setOpenMode(OpenMode.APPEND) - .setMergePolicy(newLogMergePolicy(false))); + .setMergePolicy(newLogMergePolicy())); + writer.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(false); } if (VERBOSE) { diff --git a/lucene/core/src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java b/lucene/core/src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java index ad2ad8074c29..fa8320ec1f78 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestNRTReaderWithThreads.java @@ -39,7 +39,8 @@ public void testIndexing() throws Exception { ensureSaneIWCOnNightly( newIndexWriterConfig(new MockAnalyzer(random())) .setMaxBufferedDocs(10) - .setMergePolicy(newLogMergePolicy(false, 2)))); + .setMergePolicy(newLogMergePolicy(2)))); + writer.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(false); IndexReader reader = DirectoryReader.open(writer); // start pooling readers reader.close(); int numThreads = TEST_NIGHTLY ? 4 : 2; diff --git a/lucene/core/src/test/org/apache/lucene/index/TestOmitNorms.java b/lucene/core/src/test/org/apache/lucene/index/TestOmitNorms.java index 1d088ca7dd36..408a9db31705 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestOmitNorms.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestOmitNorms.java @@ -149,7 +149,7 @@ public void testNoNrmFile() throws Throwable { .setMergePolicy(newLogMergePolicy())); LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy(); lmp.setMergeFactor(2); - lmp.setNoCFSRatio(0.0); + writer.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(false); Document d = new Document(); FieldType customType = new FieldType(TextField.TYPE_NOT_STORED); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestOmitPositions.java b/lucene/core/src/test/org/apache/lucene/index/TestOmitPositions.java index cb0a88077467..a20d1117676a 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestOmitPositions.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestOmitPositions.java @@ -129,7 +129,7 @@ public void testNoPrxFile() throws Throwable { .setMergePolicy(newLogMergePolicy())); LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy(); lmp.setMergeFactor(2); - lmp.setNoCFSRatio(0.0); + writer.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(false); Document d = new Document(); FieldType ft = new FieldType(TextField.TYPE_NOT_STORED); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestOmitTf.java b/lucene/core/src/test/org/apache/lucene/index/TestOmitTf.java index b1bdd87d487b..2b59333c5eef 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestOmitTf.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestOmitTf.java @@ -141,8 +141,8 @@ public void testNoPrxFile() throws Throwable { .setMergePolicy(newLogMergePolicy())); LogMergePolicy lmp = (LogMergePolicy) writer.getConfig().getMergePolicy(); lmp.setMergeFactor(2); - lmp.setNoCFSRatio(0.0); Document d = new Document(); + writer.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(false); Field f1 = newField("f1", "This field has term freqs", omitType); d.add(f1); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java b/lucene/core/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java index c6f79f5ecaaf..9cd2c6a82ddb 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java @@ -271,11 +271,5 @@ public MergeSpecification findForcedDeletesMerges( SegmentInfos segmentInfos, MergeContext mergeContext) throws IOException { return null; } - - @Override - public boolean useCompoundFile( - SegmentInfos segments, SegmentCommitInfo newSegment, MergeContext mergeContext) { - return useCompoundFile; - } } } diff --git a/lucene/core/src/test/org/apache/lucene/index/TestStressIndexing2.java b/lucene/core/src/test/org/apache/lucene/index/TestStressIndexing2.java index 8b99e9f8b0ab..6c55937b5148 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestStressIndexing2.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestStressIndexing2.java @@ -158,7 +158,7 @@ public DocsAndWriter indexRandomIWReader(int nThreads, int iterations, int range random()); w.commit(); LogMergePolicy lmp = (LogMergePolicy) w.getConfig().getMergePolicy(); - lmp.setNoCFSRatio(0.0); + w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(false); lmp.setMergeFactor(mergeFactor); /* * w.setMaxMergeDocs(Integer.MAX_VALUE); @@ -216,7 +216,7 @@ public Map indexRandom( .setMergePolicy(newLogMergePolicy()), random()); LogMergePolicy lmp = (LogMergePolicy) w.getConfig().getMergePolicy(); - lmp.setNoCFSRatio(0.0); + w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(false); lmp.setMergeFactor(mergeFactor); threads = new IndexingThread[nThreads]; diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSwappedIndexFiles.java b/lucene/core/src/test/org/apache/lucene/index/TestSwappedIndexFiles.java index 8442a4a9ca76..29f67c211534 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestSwappedIndexFiles.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestSwappedIndexFiles.java @@ -66,10 +66,10 @@ private void indexOneDoc(long seed, Directory dir, Document doc, boolean useCFS) if (useCFS == false) { conf.setUseCompoundFile(false); - conf.getMergePolicy().setNoCFSRatio(0.0); + conf.getCodec().compoundFormat().setShouldUseCompoundFile(false); } else { conf.setUseCompoundFile(true); - conf.getMergePolicy().setNoCFSRatio(1.0); + conf.getCodec().compoundFormat().setShouldUseCompoundFile(true); } RandomIndexWriter w = new RandomIndexWriter(random, dir, conf); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsReader.java b/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsReader.java index 8f625736b8fb..2ee0766fbf72 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsReader.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestTermVectorsReader.java @@ -98,9 +98,10 @@ public void setUp() throws Exception { dir, newIndexWriterConfig(new MyAnalyzer()) .setMaxBufferedDocs(-1) - .setMergePolicy(newLogMergePolicy(false, 10)) + .setMergePolicy(newLogMergePolicy(10)) .setUseCompoundFile(false)); + writer.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(false); Document doc = new Document(); for (int i = 0; i < testFields.length; i++) { FieldType customType = new FieldType(TextField.TYPE_NOT_STORED); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestTieredMergePolicy.java b/lucene/core/src/test/org/apache/lucene/index/TestTieredMergePolicy.java index 8d9570fa739b..0f6250e60e07 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestTieredMergePolicy.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestTieredMergePolicy.java @@ -792,25 +792,6 @@ public void testSetters() { () -> { tmp.setFloorSegmentMB(-2.0); }); - - tmp.setMaxCFSSegmentSizeMB(2.0); - assertEquals(2.0, tmp.getMaxCFSSegmentSizeMB(), EPSILON); - - tmp.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY); - assertEquals( - Long.MAX_VALUE / 1024. / 1024., tmp.getMaxCFSSegmentSizeMB(), EPSILON * Long.MAX_VALUE); - - tmp.setMaxCFSSegmentSizeMB(Long.MAX_VALUE / 1024. / 1024.); - assertEquals( - Long.MAX_VALUE / 1024. / 1024., tmp.getMaxCFSSegmentSizeMB(), EPSILON * Long.MAX_VALUE); - - expectThrows( - IllegalArgumentException.class, - () -> { - tmp.setMaxCFSSegmentSizeMB(-2.0); - }); - - // TODO: Add more checks for other non-double setters! } // LUCENE-5668 diff --git a/lucene/core/src/test/org/apache/lucene/search/TestConstantScoreScorer.java b/lucene/core/src/test/org/apache/lucene/search/TestConstantScoreScorer.java index 0882550144c2..6ddab402c67c 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestConstantScoreScorer.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestConstantScoreScorer.java @@ -180,8 +180,8 @@ static class TestConstantScoreScorerIndex implements AutoCloseable { new RandomIndexWriter( random(), directory, - newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random().nextBoolean()))); - + newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + writer.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); for (String VALUE : VALUES) { Document doc = new Document(); doc.add(newTextField(FIELD, VALUE, Field.Store.YES)); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestReqOptSumScorer.java b/lucene/core/src/test/org/apache/lucene/search/TestReqOptSumScorer.java index 79795958b4d1..8b325c4a4bfb 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestReqOptSumScorer.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestReqOptSumScorer.java @@ -58,7 +58,8 @@ private void doTestBasics(Occur reqOccur) throws IOException { newIndexWriterConfig() .setMergePolicy( // retain doc id order - newLogMergePolicy(random().nextBoolean()))); + newLogMergePolicy())); + w.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); doc.add(new StringField("f", "foo", Store.NO)); w.addDocument(doc); diff --git a/lucene/core/src/test/org/apache/lucene/store/TestFileSwitchDirectory.java b/lucene/core/src/test/org/apache/lucene/store/TestFileSwitchDirectory.java index bc9ec12774e0..629e820dd5a9 100644 --- a/lucene/core/src/test/org/apache/lucene/store/TestFileSwitchDirectory.java +++ b/lucene/core/src/test/org/apache/lucene/store/TestFileSwitchDirectory.java @@ -60,9 +60,10 @@ public void testBasic() throws IOException { new IndexWriter( fsd, new IndexWriterConfig(new MockAnalyzer(random())) - .setMergePolicy(newLogMergePolicy(false)) + .setMergePolicy(newLogMergePolicy()) .setCodec(TestUtil.getDefaultCodec()) .setUseCompoundFile(false)); + writer.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(false); TestIndexWriterReader.createIndexNoClose(true, "ram", writer); IndexReader reader = DirectoryReader.open(writer); assertEquals(100, reader.maxDoc()); diff --git a/lucene/test-framework/src/java/org/apache/lucene/tests/index/BaseIndexFileFormatTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/tests/index/BaseIndexFileFormatTestCase.java index db669cd59472..fef3f5be041f 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/tests/index/BaseIndexFileFormatTestCase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/tests/index/BaseIndexFileFormatTestCase.java @@ -270,11 +270,11 @@ public void testMergeStability() throws Exception { // do not use newMergePolicy that might return a MockMergePolicy that ignores the no-CFS ratio // do not use RIW which will change things up! MergePolicy mp = newTieredMergePolicy(); - mp.setNoCFSRatio(0); IndexWriterConfig cfg = new IndexWriterConfig(new MockAnalyzer(random())) .setUseCompoundFile(false) .setMergePolicy(mp); + cfg.getCodec().compoundFormat().setShouldUseCompoundFile(false); if (VERBOSE) { cfg.setInfoStream(System.out); } @@ -292,11 +292,11 @@ public void testMergeStability() throws Exception { Directory dir2 = applyCreatedVersionMajor(newDirectory()); mp = newTieredMergePolicy(); - mp.setNoCFSRatio(0); cfg = new IndexWriterConfig(new MockAnalyzer(random())) .setUseCompoundFile(false) .setMergePolicy(mp); + cfg.getCodec().compoundFormat().setShouldUseCompoundFile(false); w = new IndexWriter(dir2, cfg); TestUtil.addIndexesSlowly(w, reader); diff --git a/lucene/test-framework/src/java/org/apache/lucene/tests/index/BaseMergePolicyTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/tests/index/BaseMergePolicyTestCase.java index cc4180a2a079..6bf1854e6567 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/tests/index/BaseMergePolicyTestCase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/tests/index/BaseMergePolicyTestCase.java @@ -102,11 +102,11 @@ public synchronized void merge(MergeSource mergeSource, MergeTrigger trigger) assumeFalse( "this test cannot tolerate random forceMerges", mp.toString().contains("MockRandomMergePolicy")); - mp.setNoCFSRatio(random().nextBoolean() ? 0 : 1); IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random())); iwc.setMergeScheduler(mergeScheduler); iwc.setMergePolicy(mp); + iwc.getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); IndexWriter writer = new IndexWriter(dir, iwc); final int numSegments = TestUtil.nextInt(random(), 2, 20); diff --git a/lucene/test-framework/src/java/org/apache/lucene/tests/index/MockRandomMergePolicy.java b/lucene/test-framework/src/java/org/apache/lucene/tests/index/MockRandomMergePolicy.java index d3f202ad9dcb..f25e46ed77b2 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/tests/index/MockRandomMergePolicy.java +++ b/lucene/test-framework/src/java/org/apache/lucene/tests/index/MockRandomMergePolicy.java @@ -184,14 +184,6 @@ public MergeSpecification findFullFlushMerges( return null; } - @Override - public boolean useCompoundFile( - SegmentInfos infos, SegmentCommitInfo mergedInfo, MergeContext mergeContext) - throws IOException { - // 80% of the time we create CFS: - return random.nextInt(5) != 1; - } - static class MockRandomOneMerge extends OneMerge { final Random r; diff --git a/lucene/test-framework/src/java/org/apache/lucene/tests/util/LuceneTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/tests/util/LuceneTestCase.java index 8ced7b7cf827..34049621d8fd 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/tests/util/LuceneTestCase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/tests/util/LuceneTestCase.java @@ -94,6 +94,7 @@ import java.util.regex.Pattern; import junit.framework.AssertionFailedError; import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.codecs.CompoundFormat; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.bitvectors.HnswBitVectorsFormat; import org.apache.lucene.codecs.hnsw.FlatVectorsFormat; @@ -920,6 +921,7 @@ public static IndexWriterConfig newIndexWriterConfig(Analyzer a) { /** create a new index writer config with random defaults using the specified random */ public static IndexWriterConfig newIndexWriterConfig(Random r, Analyzer a) { IndexWriterConfig c = new IndexWriterConfig(a); + configureRandom(r, c); c.setSimilarity(classEnvRule.similarity); if (VERBOSE) { // Even though TestRuleSetupAndRestoreClassEnv calls @@ -1056,21 +1058,17 @@ public static LogMergePolicy newLogMergePolicy(Random r) { } else { logmp.setMergeFactor(TestUtil.nextInt(r, 10, 50)); } - configureRandom(r, logmp); return logmp; } - private static void configureRandom(Random r, MergePolicy mergePolicy) { - if (r.nextBoolean()) { - mergePolicy.setNoCFSRatio(0.1 + r.nextDouble() * 0.8); - } else { - mergePolicy.setNoCFSRatio(r.nextBoolean() ? 1.0 : 0.0); - } + private static void configureRandom(Random r, IndexWriterConfig iwc) { + CompoundFormat compoundFormat = iwc.getCodec().compoundFormat(); + compoundFormat.setShouldUseCompoundFile(random().nextBoolean()); if (rarely(r)) { - mergePolicy.setMaxCFSSegmentSizeMB(0.2 + r.nextDouble() * 2.0); + compoundFormat.setMaxCFSSegmentSizeMB(0.2 + r.nextDouble() * 2.0); } else { - mergePolicy.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY); + compoundFormat.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY); } } @@ -1094,24 +1092,10 @@ public static TieredMergePolicy newTieredMergePolicy(Random r) { tmp.setTargetSearchConcurrency(TestUtil.nextInt(r, 2, 20)); } - configureRandom(r, tmp); tmp.setDeletesPctAllowed(20 + random().nextDouble() * 30); return tmp; } - public static MergePolicy newLogMergePolicy(boolean useCFS) { - MergePolicy logmp = newLogMergePolicy(); - logmp.setNoCFSRatio(useCFS ? 1.0 : 0.0); - return logmp; - } - - public static LogMergePolicy newLogMergePolicy(boolean useCFS, int mergeFactor) { - LogMergePolicy logmp = newLogMergePolicy(); - logmp.setNoCFSRatio(useCFS ? 1.0 : 0.0); - logmp.setMergeFactor(mergeFactor); - return logmp; - } - public static LogMergePolicy newLogMergePolicy(int mergeFactor) { LogMergePolicy logmp = newLogMergePolicy(); logmp.setMergeFactor(mergeFactor); @@ -1198,7 +1182,6 @@ public static void maybeChangeLiveIndexWriterConfig(Random r, LiveIndexWriterCon if (rarely(r)) { MergePolicy mp = c.getMergePolicy(); - configureRandom(r, mp); if (mp instanceof LogMergePolicy logmp) { logmp.setCalibrateSizeByDeletes(r.nextBoolean()); if (rarely(r)) { @@ -1219,7 +1202,6 @@ public static void maybeChangeLiveIndexWriterConfig(Random r, LiveIndexWriterCon } else { tmp.setSegmentsPerTier(TestUtil.nextInt(r, 10, 50)); } - configureRandom(r, tmp); tmp.setDeletesPctAllowed(20 + random().nextDouble() * 30); } didChange = true; @@ -3149,7 +3131,6 @@ protected static IndexWriterConfig ensureSaneIWCOnNightly(IndexWriterConfig conf // and might use many per-field codecs. turn on CFS for IW flushes // and ensure CFS ratio is reasonable to keep it contained. conf.setUseCompoundFile(true); - mp.setNoCFSRatio(Math.max(0.25d, mp.getNoCFSRatio())); } return conf; } diff --git a/lucene/test-framework/src/java/org/apache/lucene/tests/util/TestUtil.java b/lucene/test-framework/src/java/org/apache/lucene/tests/util/TestUtil.java index 073da0d3a5bc..b6521f80bc84 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/tests/util/TestUtil.java +++ b/lucene/test-framework/src/java/org/apache/lucene/tests/util/TestUtil.java @@ -1330,6 +1330,18 @@ public static Codec getDefaultCodec() { return new Lucene104Codec(); } + /** + * Returns the actual default codec (e.g. LuceneMNCodec) for this version of Lucene. This may be + * different from {@link Codec#getDefault()} because that is randomized. + * @param shouldUseCfs + * @return + */ + public static Codec getDefaultCodec(boolean shouldUseCfs) { + Codec codec = getDefaultCodec(); + codec.compoundFormat().setShouldUseCompoundFile(shouldUseCfs); + return codec; + } + /** * Returns the actual default postings format (e.g. LuceneMNPostingsFormat) for this version of * Lucene. @@ -1440,7 +1452,7 @@ public static void addIndexesSlowly(IndexWriter writer, DirectoryReader... reade public static void reduceOpenFiles(IndexWriter w) { // keep number of open files lowish MergePolicy mp = w.getConfig().getMergePolicy(); - mp.setNoCFSRatio(1.0); + w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(true); if (mp instanceof LogMergePolicy lmp) { lmp.setMergeFactor(Math.min(5, lmp.getMergeFactor())); } else if (mp instanceof TieredMergePolicy tmp) { From 98ea06eab0c9e337f4bbd0c2656d9e41b6faa7e0 Mon Sep 17 00:00:00 2001 From: shubharm Date: Tue, 7 Oct 2025 14:04:43 +0100 Subject: [PATCH 3/7] Fixed tests --- .../lucene80/BaseLucene80DocValuesFormatTestCase.java | 9 ++++++--- .../TestBasicBackwardsCompatibility.java | 8 +++----- .../TestIndexSortBackwardsCompatibility.java | 4 ++-- .../TestMoreTermsBackwardsCompatibility.java | 4 ++-- .../benchmark/byTask/tasks/CreateIndexTask.java | 2 +- .../lucene/benchmark/byTask/TestPerfTasksLogic.java | 2 +- .../java/org/apache/lucene/codecs/CompoundFormat.java | 4 ++++ .../org/apache/lucene/codecs/TestMinimalCodec.java | 6 +++--- .../apache/lucene/index/TestIndexWriterConfig.java | 10 ++++------ .../lucene/search/join/TestBlockJoinBulkScorer.java | 11 ++++++----- .../lucene/search/join/TestBlockJoinScorer.java | 6 ++++-- .../lucene/misc/index/TestBPIndexReorderer.java | 9 ++++++--- .../apache/lucene/misc/index/TestIndexSplitter.java | 4 ++-- .../sandbox/index/TestMergeOnFlushMergePolicy.java | 2 -- 14 files changed, 44 insertions(+), 37 deletions(-) diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/backward_codecs/lucene80/BaseLucene80DocValuesFormatTestCase.java b/lucene/backward-codecs/src/test/org/apache/lucene/backward_codecs/lucene80/BaseLucene80DocValuesFormatTestCase.java index 376b72d79a02..f77a950f0c1b 100644 --- a/lucene/backward-codecs/src/test/org/apache/lucene/backward_codecs/lucene80/BaseLucene80DocValuesFormatTestCase.java +++ b/lucene/backward-codecs/src/test/org/apache/lucene/backward_codecs/lucene80/BaseLucene80DocValuesFormatTestCase.java @@ -697,7 +697,8 @@ private IndexWriter createFastIndexWriter(Directory dir, int maxBufferedDocs) th IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); conf.setMaxBufferedDocs(maxBufferedDocs); conf.setRAMBufferSizeMB(-1); - conf.setMergePolicy(newLogMergePolicy(random().nextBoolean())); + conf.setMergePolicy(newLogMergePolicy()); + conf.getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); return new IndexWriter(dir, conf); } @@ -727,7 +728,8 @@ private void doTestSortedNumericBlocksOfVariousBitsPerValue(LongSupplier counts) conf.setMaxBufferedDocs(atLeast(Lucene80DocValuesFormat.NUMERIC_BLOCK_SIZE)); conf.setRAMBufferSizeMB(-1); // so Lucene docids are predictable / stay in order - conf.setMergePolicy(newLogMergePolicy(random().nextBoolean())); + conf.setMergePolicy(newLogMergePolicy()); + conf.getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); IndexWriter writer = new IndexWriter(dir, conf); final int numDocs = atLeast(Lucene80DocValuesFormat.NUMERIC_BLOCK_SIZE * 3); @@ -797,7 +799,8 @@ private void doTestSparseNumericBlocksOfVariousBitsPerValue(double density) thro IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); conf.setMaxBufferedDocs(atLeast(Lucene80DocValuesFormat.NUMERIC_BLOCK_SIZE)); conf.setRAMBufferSizeMB(-1); - conf.setMergePolicy(newLogMergePolicy(random().nextBoolean())); + conf.setMergePolicy(newLogMergePolicy()); + conf.getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); IndexWriter writer = new IndexWriter(dir, conf); Document doc = new Document(); Field storedField = newStringField("stored", "", Field.Store.YES); diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/backward_index/TestBasicBackwardsCompatibility.java b/lucene/backward-codecs/src/test/org/apache/lucene/backward_index/TestBasicBackwardsCompatibility.java index 77dc6d2412c0..e3818b3f5cbf 100644 --- a/lucene/backward-codecs/src/test/org/apache/lucene/backward_index/TestBasicBackwardsCompatibility.java +++ b/lucene/backward-codecs/src/test/org/apache/lucene/backward_index/TestBasicBackwardsCompatibility.java @@ -125,15 +125,14 @@ protected void createIndex(Directory directory) throws IOException { } static void createIndex(Directory dir, boolean doCFS, boolean fullyMerged) throws IOException { - LogByteSizeMergePolicy mp = new LogByteSizeMergePolicy(); - mp.setNoCFSRatio(doCFS ? 1.0 : 0.0); - mp.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY); // TODO: remove randomness IndexWriterConfig conf = new IndexWriterConfig(new MockAnalyzer(random())) .setMaxBufferedDocs(10) .setCodec(TestUtil.getDefaultCodec()) .setMergePolicy(NoMergePolicy.INSTANCE); + conf.getCodec().compoundFormat().setShouldUseCompoundFile(doCFS); + conf.getCodec().compoundFormat().setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY); IndexWriter writer = new IndexWriter(dir, conf); for (int i = 0; i < DOCS_COUNT; i++) { @@ -147,14 +146,13 @@ static void createIndex(Directory dir, boolean doCFS, boolean fullyMerged) throw if (!fullyMerged) { // open fresh writer so we get no prx file in the added segment - mp = new LogByteSizeMergePolicy(); - mp.setNoCFSRatio(doCFS ? 1.0 : 0.0); // TODO: remove randomness conf = new IndexWriterConfig(new MockAnalyzer(random())) .setMaxBufferedDocs(10) .setCodec(TestUtil.getDefaultCodec()) .setMergePolicy(NoMergePolicy.INSTANCE); + conf.getCodec().compoundFormat().setShouldUseCompoundFile(doCFS); writer = new IndexWriter(dir, conf); addNoProxDoc(writer); writer.close(); diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/backward_index/TestIndexSortBackwardsCompatibility.java b/lucene/backward-codecs/src/test/org/apache/lucene/backward_index/TestIndexSortBackwardsCompatibility.java index 573018f82318..4947a021bec7 100644 --- a/lucene/backward-codecs/src/test/org/apache/lucene/backward_index/TestIndexSortBackwardsCompatibility.java +++ b/lucene/backward-codecs/src/test/org/apache/lucene/backward_index/TestIndexSortBackwardsCompatibility.java @@ -153,8 +153,6 @@ public void testSortedIndex() throws Exception { @Override protected void createIndex(Directory directory) throws IOException { LogByteSizeMergePolicy mp = new LogByteSizeMergePolicy(); - mp.setNoCFSRatio(1.0); - mp.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY); MockAnalyzer analyzer = new MockAnalyzer(random()); // Don't filter out tokens that are too short because we use those tokens in assertions (#14344) @@ -162,6 +160,8 @@ protected void createIndex(Directory directory) throws IOException { // TODO: remove randomness IndexWriterConfig conf = new IndexWriterConfig(analyzer); + conf.getCodec().compoundFormat().setShouldUseCompoundFile(true); + conf.getCodec().compoundFormat().setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY); conf.setMergePolicy(mp); conf.setUseCompoundFile(false); conf.setCodec(TestUtil.getDefaultCodec()); diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/backward_index/TestMoreTermsBackwardsCompatibility.java b/lucene/backward-codecs/src/test/org/apache/lucene/backward_index/TestMoreTermsBackwardsCompatibility.java index 6b33eeb5add9..d56f6c6e23a8 100644 --- a/lucene/backward-codecs/src/test/org/apache/lucene/backward_index/TestMoreTermsBackwardsCompatibility.java +++ b/lucene/backward-codecs/src/test/org/apache/lucene/backward_index/TestMoreTermsBackwardsCompatibility.java @@ -57,8 +57,6 @@ public static Iterable testVersionsFactory() { @Override protected void createIndex(Directory directory) throws IOException { LogByteSizeMergePolicy mp = new LogByteSizeMergePolicy(); - mp.setNoCFSRatio(1.0); - mp.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY); MockAnalyzer analyzer = new MockAnalyzer(random()); analyzer.setMaxTokenLength(TestUtil.nextInt(random(), 1, IndexWriter.MAX_TERM_LENGTH)); @@ -67,6 +65,8 @@ protected void createIndex(Directory directory) throws IOException { .setMergePolicy(mp) .setCodec(TestUtil.getDefaultCodec()) .setUseCompoundFile(false); + conf.getCodec().compoundFormat().setShouldUseCompoundFile(true); + conf.getCodec().compoundFormat().setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY); IndexWriter writer = new IndexWriter(directory, conf); LineFileDocs docs = new LineFileDocs(new Random(0)); for (int i = 0; i < 50; i++) { diff --git a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/CreateIndexTask.java b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/CreateIndexTask.java index defa00856d5e..3159a7db4416 100644 --- a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/CreateIndexTask.java +++ b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/CreateIndexTask.java @@ -180,7 +180,7 @@ public PostingsFormat postingsFormat() { throw new RuntimeException( "unable to instantiate class '" + mergePolicy + "' as merge policy", e); } - iwConf.getMergePolicy().setNoCFSRatio(isCompound ? 1.0 : 0.0); + iwConf.getCodec().compoundFormat().setShouldUseCompoundFile(isCompound); if (iwConf.getMergePolicy() instanceof LogMergePolicy) { LogMergePolicy logMergePolicy = (LogMergePolicy) iwConf.getMergePolicy(); logMergePolicy.setMergeFactor( diff --git a/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/TestPerfTasksLogic.java b/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/TestPerfTasksLogic.java index 35ee9e3b2a14..7a19f1559b12 100644 --- a/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/TestPerfTasksLogic.java +++ b/lucene/benchmark/src/test/org/apache/lucene/benchmark/byTask/TestPerfTasksLogic.java @@ -642,7 +642,7 @@ public void testIndexWriterSettings() throws Exception { assertEquals( IndexWriterConfig.DISABLE_AUTO_FLUSH, (int) writer.getConfig().getRAMBufferSizeMB()); assertEquals(3, ((LogMergePolicy) writer.getConfig().getMergePolicy()).getMergeFactor()); - assertEquals(0.0d, writer.getConfig().getMergePolicy().getNoCFSRatio(), 0.0); + assertFalse(writer.getConfig().getCodec().compoundFormat().getShouldUseCompoundFile()); writer.close(); Directory dir = benchmark.getRunData().getDirectory(); IndexReader reader = DirectoryReader.open(dir); diff --git a/lucene/core/src/java/org/apache/lucene/codecs/CompoundFormat.java b/lucene/core/src/java/org/apache/lucene/codecs/CompoundFormat.java index 9f80f106855f..95e9af54bd41 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/CompoundFormat.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/CompoundFormat.java @@ -74,6 +74,10 @@ public void setShouldUseCompoundFile(boolean useCompoundFile) { this.shouldUseCompoundFile = useCompoundFile; } + public boolean getShouldUseCompoundFile() { + return this.shouldUseCompoundFile; + } + /** Returns the largest size allowed for a compound file segment */ public double getMaxCFSSegmentSizeMB() { return maxCFSSegmentSize / 1024. / 1024.; diff --git a/lucene/core/src/test/org/apache/lucene/codecs/TestMinimalCodec.java b/lucene/core/src/test/org/apache/lucene/codecs/TestMinimalCodec.java index adeb5bd7cc79..54b712cb1744 100644 --- a/lucene/core/src/test/org/apache/lucene/codecs/TestMinimalCodec.java +++ b/lucene/core/src/test/org/apache/lucene/codecs/TestMinimalCodec.java @@ -51,8 +51,8 @@ private void runMinimalCodecTest(boolean useCompoundFile) throws IOException { if (!useCompoundFile) { // Avoid using MockMP as it randomly enables compound file creation writerConfig.setMergePolicy(newMergePolicy(random(), false)); - writerConfig.getMergePolicy().setNoCFSRatio(0.0); - writerConfig.getMergePolicy().setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY); + writerConfig.getCodec().compoundFormat().setShouldUseCompoundFile(false); + writerConfig.getCodec().compoundFormat().setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY); } try (IndexWriter writer = new IndexWriter(dir, writerConfig)) { @@ -102,7 +102,7 @@ public SegmentInfoFormat segmentInfoFormat() { @Override public CompoundFormat compoundFormat() { - throw new UnsupportedOperationException(); + return wrappedCodec.compoundFormat(); } @Override diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterConfig.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterConfig.java index 7f4258d18fa4..9119aaff865f 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterConfig.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterConfig.java @@ -284,10 +284,10 @@ public void testInvalidValues() throws Exception { public void testLiveChangeToCFS() throws Exception { Directory dir = newDirectory(); IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random())); - iwc.setMergePolicy(newLogMergePolicy(true)); + iwc.setMergePolicy(newLogMergePolicy()); // Start false: iwc.setUseCompoundFile(false); - iwc.getMergePolicy().setNoCFSRatio(0.0d); + iwc.getCodec().compoundFormat().setShouldUseCompoundFile(false); IndexWriter w = new IndexWriter(dir, iwc); // Change to true: w.getConfig().setUseCompoundFile(true); @@ -307,10 +307,8 @@ public void testLiveChangeToCFS() throws Exception { // no compound files after merge assertFalse("Expected Non-CFS after merge", w.newestSegment().info.getUseCompoundFile()); - MergePolicy lmp = w.getConfig().getMergePolicy(); - lmp.setNoCFSRatio(1.0); - lmp.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY); - + w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(true); + w.getConfig().getCodec().compoundFormat().setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY); w.addDocument(doc); w.forceMerge(1); w.commit(); diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinBulkScorer.java b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinBulkScorer.java index b9580331347f..e526fbcd91eb 100644 --- a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinBulkScorer.java +++ b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinBulkScorer.java @@ -326,7 +326,8 @@ public void testScoreRandomIndices() throws IOException { newIndexWriterConfig() .setMergePolicy( // retain doc id order - newLogMergePolicy(random().nextBoolean())))) { + newLogMergePolicy()))) { + w.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); expectedMatches = populateRandomIndex( @@ -371,8 +372,8 @@ public void testSetMinCompetitiveScoreWithScoreModeMax() throws IOException { newIndexWriterConfig() .setMergePolicy( // retain doc id order - newLogMergePolicy(random().nextBoolean())))) { - + newLogMergePolicy()))) { + w.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); populateStaticIndex(w); w.forceMerge(1); } @@ -441,8 +442,8 @@ public void testSetMinCompetitiveScoreWithScoreModeNone() throws IOException { newIndexWriterConfig() .setMergePolicy( // retain doc id order - newLogMergePolicy(random().nextBoolean())))) { - + newLogMergePolicy()))) { + w.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); populateStaticIndex(w); w.forceMerge(1); } diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinScorer.java b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinScorer.java index 4f7b13e8a6a7..748b22bce61d 100644 --- a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinScorer.java +++ b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinScorer.java @@ -55,7 +55,8 @@ public void testScoreNone() throws IOException { newIndexWriterConfig() .setMergePolicy( // retain doc id order - newLogMergePolicy(random().nextBoolean()))); + newLogMergePolicy())); + w.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); List docs = new ArrayList<>(); for (int i = 0; i < 10; i++) { docs.clear(); @@ -134,8 +135,9 @@ public void testScoreMax() throws IOException { newIndexWriterConfig() .setMergePolicy( // retain doc id order - newLogMergePolicy(random().nextBoolean())))) { + newLogMergePolicy()))) { + w.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); for (String[][] values : Arrays.asList( new String[][] {{"A", "B"}, {"A", "B", "C"}}, diff --git a/lucene/misc/src/test/org/apache/lucene/misc/index/TestBPIndexReorderer.java b/lucene/misc/src/test/org/apache/lucene/misc/index/TestBPIndexReorderer.java index b7da3088df2c..bd56a0b5f1dd 100644 --- a/lucene/misc/src/test/org/apache/lucene/misc/index/TestBPIndexReorderer.java +++ b/lucene/misc/src/test/org/apache/lucene/misc/index/TestBPIndexReorderer.java @@ -70,7 +70,8 @@ public void doTestSingleTerm(ForkJoinPool pool) throws IOException { Directory dir = newDirectory(); IndexWriter w = new IndexWriter( - dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random().nextBoolean()))); + dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); StoredField idField = new StoredField("id", ""); doc.add(idField); @@ -161,7 +162,8 @@ private void doTestSingleTermWithBlocks(ForkJoinPool pool) throws IOException { dir, newIndexWriterConfig() .setParentField("parent") - .setMergePolicy(newLogMergePolicy(random().nextBoolean()))); + .setMergePolicy(newLogMergePolicy())); + w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); w.addDocuments(createBlock("1", "lucene", "search", "lucene")); // 0-2 w.addDocuments(createBlock("2", "lucene")); // 3 @@ -256,7 +258,8 @@ public void testMultiTerm() throws IOException { Directory dir = newDirectory(); IndexWriter w = new IndexWriter( - dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy(random().nextBoolean()))); + dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); StoredField idField = new StoredField("id", ""); doc.add(idField); diff --git a/lucene/misc/src/test/org/apache/lucene/misc/index/TestIndexSplitter.java b/lucene/misc/src/test/org/apache/lucene/misc/index/TestIndexSplitter.java index 4f4674a0797d..ddb975582b4f 100644 --- a/lucene/misc/src/test/org/apache/lucene/misc/index/TestIndexSplitter.java +++ b/lucene/misc/src/test/org/apache/lucene/misc/index/TestIndexSplitter.java @@ -43,14 +43,14 @@ public void test() throws Exception { } MergePolicy mergePolicy = new LogByteSizeMergePolicy(); - mergePolicy.setNoCFSRatio(1.0); - mergePolicy.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY); IndexWriter iw = new IndexWriter( fsDir, new IndexWriterConfig(new MockAnalyzer(random())) .setOpenMode(OpenMode.CREATE) .setMergePolicy(mergePolicy)); + iw.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(true); + iw.getConfig().getCodec().compoundFormat().setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY); for (int x = 0; x < 100; x++) { Document doc = DocHelper.createDocument(x, "index", 5); iw.addDocument(doc); diff --git a/lucene/sandbox/src/test/org/apache/lucene/sandbox/index/TestMergeOnFlushMergePolicy.java b/lucene/sandbox/src/test/org/apache/lucene/sandbox/index/TestMergeOnFlushMergePolicy.java index b3a42a355534..014e307b7eea 100644 --- a/lucene/sandbox/src/test/org/apache/lucene/sandbox/index/TestMergeOnFlushMergePolicy.java +++ b/lucene/sandbox/src/test/org/apache/lucene/sandbox/index/TestMergeOnFlushMergePolicy.java @@ -39,8 +39,6 @@ protected MergePolicy mergePolicy() { Random r = random(); MergePolicy mergePolicy = newMergePolicy(); MergeOnFlushMergePolicy mergeOnFlushPolicy = new MergeOnFlushMergePolicy(mergePolicy); - mergeOnFlushPolicy.setMaxCFSSegmentSizeMB(mergePolicy.getMaxCFSSegmentSizeMB()); - mergeOnFlushPolicy.setNoCFSRatio(mergePolicy.getNoCFSRatio()); mergeOnFlushPolicy.setSmallSegmentThresholdMB(TestUtil.nextInt(r, 1, 100)); if (mergePolicy instanceof TieredMergePolicy) { ((TieredMergePolicy) mergePolicy) From f14f19c1861af36502aea4de50798d44970692a6 Mon Sep 17 00:00:00 2001 From: shubharm Date: Tue, 7 Oct 2025 14:30:49 +0100 Subject: [PATCH 4/7] Added tests --- .../apache/lucene/codecs/CompoundFormat.java | 52 +++++----- .../lucene/codecs/TestCompoundFormat.java | 98 +++++++++++++++++++ .../apache/lucene/index/TestAddIndexes.java | 31 +++--- 3 files changed, 139 insertions(+), 42 deletions(-) create mode 100644 lucene/core/src/test/org/apache/lucene/codecs/TestCompoundFormat.java diff --git a/lucene/core/src/java/org/apache/lucene/codecs/CompoundFormat.java b/lucene/core/src/java/org/apache/lucene/codecs/CompoundFormat.java index 95e9af54bd41..dd179b0ef898 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/CompoundFormat.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/CompoundFormat.java @@ -36,38 +36,32 @@ protected CompoundFormat() {} // TODO: this is very minimal. If we need more methods, // we can add 'producer' classes. + // Default thresholds + static final int DEFAULT_CFS_THRESHOLD_DOC_SIZE = 65536; // docs + static final long DEFAULT_CFS_THRESHOLD_BYTE_SIZE = 64L * 1024 * 1024; // 64MB - static final long CFS_THRESHOLD_DOC_SIZE = 65536; - static final long CFS_THRESHOLD_BYTE_SIZE = 65; - - /** - * Default max segment size in order to use compound file system. Set to {@link Long#MAX_VALUE}. - */ + /** Default max segment size allowed for CFS (bytes) */ static final long DEFAULT_MAX_CFS_SEGMENT_SIZE = Long.MAX_VALUE; - private long CfsThresholdDocSize = CFS_THRESHOLD_DOC_SIZE; - private long CfsThresholdByteSize = CFS_THRESHOLD_BYTE_SIZE; + private int cfsThresholdDocSize = DEFAULT_CFS_THRESHOLD_DOC_SIZE; + private long cfsThresholdByteSize = DEFAULT_CFS_THRESHOLD_BYTE_SIZE; private boolean shouldUseCompoundFile = true; - - /** - * If the size of the merged segment exceeds this value then it will not use compound file format. - */ private long maxCFSSegmentSize = DEFAULT_MAX_CFS_SEGMENT_SIZE; - public void setCfsThresholdDocSize(long cfsThresholdDocSize) { - this.CfsThresholdDocSize = cfsThresholdDocSize; + public void setCfsThresholdDocSize(int threshold) { + this.cfsThresholdDocSize = threshold; } - public void setCfsThresholdByteSize(long cfsThresholdByteSize) { - this.CfsThresholdByteSize = cfsThresholdByteSize; + public void setCfsThresholdByteSize(long thresholdBytes) { + this.cfsThresholdByteSize = thresholdBytes; } - public long getCfsThresholdByteSize() { - return this.CfsThresholdByteSize; + public int getCfsThresholdDocSize() { + return this.cfsThresholdDocSize; } - public long getCfsThresholdDocSize() { - return this.CfsThresholdDocSize; + public long getCfsThresholdByteSize() { + return this.cfsThresholdByteSize; } public void setShouldUseCompoundFile(boolean useCompoundFile) { @@ -78,15 +72,14 @@ public boolean getShouldUseCompoundFile() { return this.shouldUseCompoundFile; } - /** Returns the largest size allowed for a compound file segment */ + /** Returns the largest size allowed for a compound file segment (in MB) */ public double getMaxCFSSegmentSizeMB() { return maxCFSSegmentSize / 1024. / 1024.; } /** * If a merged segment will be more than this value, leave the segment as non-compound file even - * if compound file is enabled. Set this to Double.POSITIVE_INFINITY (default) and noCFSRatio to - * 1.0 to always use CFS regardless of merge size. + * if compound file is enabled. Set this to Double.POSITIVE_INFINITY (default) to always use CFS. */ public void setMaxCFSSegmentSizeMB(double v) { if (v < 0.0) { @@ -98,8 +91,10 @@ public void setMaxCFSSegmentSizeMB(double v) { /** * Returns true if a new segment (regardless of its origin) should use the compound file format. - * The default implementation returns true iff the size of the given mergedInfo is - * less or equal to {@link #getMaxCFSSegmentSizeMB()} and the size is less or equal to the + * The default implementation uses fixed thresholds: + * - for LogDocMergePolicy, all segments with less than equals to {@link #getCfsThresholdDocSize()} docs; + * - for byte-size-based merge policies, all segments with less than equals to {@link #getCfsThresholdByteSize()} bytes. + * Additionally, segments larger than {@link #maxCFSSegmentSize} will not use CFS. */ public boolean useCompoundFile(long mergedInfoSize, MergePolicy mergePolicy) throws IOException { if (this.shouldUseCompoundFile == false) { @@ -108,12 +103,11 @@ public boolean useCompoundFile(long mergedInfoSize, MergePolicy mergePolicy) thr if (mergedInfoSize > maxCFSSegmentSize) { return false; } - if (mergePolicy instanceof LogDocMergePolicy) { - return mergedInfoSize <= this.CfsThresholdDocSize; + return mergedInfoSize <= this.cfsThresholdDocSize; + } else { + return mergedInfoSize <= this.cfsThresholdByteSize; } - - return mergedInfoSize <= this.CfsThresholdByteSize; } /** Returns a Directory view (read-only) for the compound files in this segment */ diff --git a/lucene/core/src/test/org/apache/lucene/codecs/TestCompoundFormat.java b/lucene/core/src/test/org/apache/lucene/codecs/TestCompoundFormat.java new file mode 100644 index 000000000000..e48ac4f60964 --- /dev/null +++ b/lucene/core/src/test/org/apache/lucene/codecs/TestCompoundFormat.java @@ -0,0 +1,98 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.codecs; + +import org.apache.lucene.index.LogDocMergePolicy; +import org.apache.lucene.index.MergePolicy; +import org.apache.lucene.index.TieredMergePolicy; +import org.apache.lucene.index.SegmentInfo; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.tests.util.LuceneTestCase; +import org.junit.Before; + +import java.io.IOException; + +public class TestCompoundFormat extends LuceneTestCase { + + private CompoundFormat format; + + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + format = new CompoundFormat() { + @Override + public CompoundDirectory getCompoundReader(Directory dir, SegmentInfo segmentInfo) { + return null; + } + @Override + public void write(Directory dir, SegmentInfo segmentInfo, IOContext context) { + // No-op + } + }; + } + + public void testDefaultThresholds() throws IOException { + format.setShouldUseCompoundFile(true); + format.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY); // Always allow + + MergePolicy docPolicy = new LogDocMergePolicy(); + MergePolicy bytePolicy = new TieredMergePolicy(); + + // Should use compound file for doc policy under threshold + assertTrue(format.useCompoundFile(65536, docPolicy)); + assertFalse(format.useCompoundFile(65537, docPolicy)); + + // Should use compound file for byte policy under threshold (64MB) + assertTrue(format.useCompoundFile(64L * 1024 * 1024, bytePolicy)); + assertFalse(format.useCompoundFile((64L * 1024 * 1024) + 1, bytePolicy)); + } + + public void testDisabledCompoundFile() throws IOException { + format.setShouldUseCompoundFile(false); + MergePolicy docPolicy = new LogDocMergePolicy(); + + // Should never use compound file if disabled + assertFalse(format.useCompoundFile(1, docPolicy)); + assertFalse(format.useCompoundFile(65536, docPolicy)); + } + + public void testMaxCFSSegmentSize() throws IOException { + format.setShouldUseCompoundFile(true); + format.setMaxCFSSegmentSizeMB(10); // 10MB + MergePolicy bytePolicy = new TieredMergePolicy(); + + // Should skip CFS if over maxCFSSegmentSize + assertTrue(format.useCompoundFile(9L * 1024 * 1024, bytePolicy)); + assertFalse(format.useCompoundFile(11L * 1024 * 1024, bytePolicy)); + } + + public void testCustomThresholds() throws IOException { + format.setCfsThresholdDocSize(1000); + format.setCfsThresholdByteSize(10 * 1024 * 1024); // 10MB + + MergePolicy docPolicy = new LogDocMergePolicy(); + MergePolicy bytePolicy = new TieredMergePolicy(); + + assertTrue(format.useCompoundFile(1000, docPolicy)); + assertFalse(format.useCompoundFile(1001, docPolicy)); + + assertTrue(format.useCompoundFile(10 * 1024 * 1024, bytePolicy)); + assertFalse(format.useCompoundFile((10 * 1024 * 1024) + 1, bytePolicy)); + } +} diff --git a/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java b/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java index 0943c08ba68b..091168c3b554 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestAddIndexes.java @@ -80,7 +80,8 @@ public void testSimpleCase() throws IOException { aux, newIndexWriterConfig(new MockAnalyzer(random())) .setOpenMode(OpenMode.CREATE) - .setMergePolicy(newLogMergePolicy(false))); + .setMergePolicy(newLogMergePolicy())); + writer.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(false); // add 40 documents in separate files addDocs(writer, 40); assertEquals(40, writer.getDocStats().maxDoc); @@ -314,7 +315,8 @@ public void testAddSelf() throws IOException { newIndexWriterConfig(new MockAnalyzer(random())) .setOpenMode(OpenMode.CREATE) .setMaxBufferedDocs(1000) - .setMergePolicy(newLogMergePolicy(false))); + .setMergePolicy(newLogMergePolicy())); + writer.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(false); // add 140 documents in separate files addDocs(writer, 40); writer.close(); @@ -324,7 +326,8 @@ public void testAddSelf() throws IOException { newIndexWriterConfig(new MockAnalyzer(random())) .setOpenMode(OpenMode.CREATE) .setMaxBufferedDocs(1000) - .setMergePolicy(newLogMergePolicy(false))); + .setMergePolicy(newLogMergePolicy())); + writer.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(false); addDocs(writer, 100); writer.close(); @@ -608,7 +611,8 @@ private void setUpDirs(Directory dir, Directory aux, boolean withID) throws IOEx newIndexWriterConfig(new MockAnalyzer(random())) .setOpenMode(OpenMode.CREATE) .setMaxBufferedDocs(1000) - .setMergePolicy(newLogMergePolicy(false, 10))); + .setMergePolicy(newLogMergePolicy(10))); + writer.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(false); // add 30 documents in 3 segments for (int i = 0; i < 3; i++) { if (withID) { @@ -623,7 +627,8 @@ private void setUpDirs(Directory dir, Directory aux, boolean withID) throws IOEx newIndexWriterConfig(new MockAnalyzer(random())) .setOpenMode(OpenMode.APPEND) .setMaxBufferedDocs(1000) - .setMergePolicy(newLogMergePolicy(false, 10))); + .setMergePolicy(newLogMergePolicy(10))); + writer.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(false); } assertEquals(30, writer.getDocStats().maxDoc); assertEquals(3, writer.getSegmentCount()); @@ -635,7 +640,6 @@ public void testHangOnClose() throws IOException { Directory dir = newDirectory(); LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy(); - lmp.setNoCFSRatio(0.0); lmp.setMergeFactor(100); IndexWriter writer = new IndexWriter( @@ -643,7 +647,7 @@ public void testHangOnClose() throws IOException { newIndexWriterConfig(new MockAnalyzer(random())) .setMaxBufferedDocs(5) .setMergePolicy(lmp)); - + writer.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(false); Document doc = new Document(); FieldType customType = new FieldType(TextField.TYPE_STORED); customType.setStoreTermVectors(true); @@ -665,7 +669,6 @@ public void testHangOnClose() throws IOException { Directory dir2 = newDirectory(); lmp = new LogByteSizeMergePolicy(); lmp.setMinMergeMB(0.0001); - lmp.setNoCFSRatio(0.0); lmp.setMergeFactor(4); writer = new IndexWriter( @@ -673,6 +676,7 @@ public void testHangOnClose() throws IOException { newIndexWriterConfig(new MockAnalyzer(random())) .setMergeScheduler(new SerialMergeScheduler()) .setMergePolicy(lmp)); + writer.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(false); writer.addIndexes(dir); writer.close(); dir.close(); @@ -1381,7 +1385,8 @@ public void testSimpleCaseCustomCodec() throws IOException { .setOpenMode(OpenMode.CREATE) .setCodec(codec) .setMaxBufferedDocs(10) - .setMergePolicy(newLogMergePolicy(false))); + .setMergePolicy(newLogMergePolicy())); + writer.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(false); // add 40 documents in separate files addDocs(writer, 40); assertEquals(40, writer.getDocStats().maxDoc); @@ -1450,11 +1455,11 @@ public void testNonCFSLeftovers() throws Exception { MockDirectoryWrapper dir = new MockDirectoryWrapper(random(), new ByteBuffersDirectory()); IndexWriterConfig conf = - new IndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy(true)); - MergePolicy lmp = conf.getMergePolicy(); + new IndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()); // Force creation of CFS: - lmp.setNoCFSRatio(1.0); - lmp.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY); + conf.getCodec().compoundFormat().setShouldUseCompoundFile(true); + conf.getCodec().compoundFormat().setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY); + conf.setUseCompoundFile(true); IndexWriter w3 = new IndexWriter(dir, conf); TestUtil.addIndexesSlowly(w3, readers); w3.close(); From b26495e414a51e077cc86901480607161745273e Mon Sep 17 00:00:00 2001 From: shubharm Date: Tue, 7 Oct 2025 14:41:34 +0100 Subject: [PATCH 5/7] AI generated comments and java docs --- .../apache/lucene/codecs/CompoundFormat.java | 90 ++++++++++++++-- .../lucene/codecs/TestCompoundFormat.java | 100 ++++++++++++++---- 2 files changed, 157 insertions(+), 33 deletions(-) diff --git a/lucene/core/src/java/org/apache/lucene/codecs/CompoundFormat.java b/lucene/core/src/java/org/apache/lucene/codecs/CompoundFormat.java index dd179b0ef898..a61d0804e2de 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/CompoundFormat.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/CompoundFormat.java @@ -36,76 +36,144 @@ protected CompoundFormat() {} // TODO: this is very minimal. If we need more methods, // we can add 'producer' classes. - // Default thresholds + /** Default document count threshold for using compound files with LogDocMergePolicy */ static final int DEFAULT_CFS_THRESHOLD_DOC_SIZE = 65536; // docs + + /** Default byte size threshold for using compound files with other merge policies (64MB) */ static final long DEFAULT_CFS_THRESHOLD_BYTE_SIZE = 64L * 1024 * 1024; // 64MB - /** Default max segment size allowed for CFS (bytes) */ + /** Default maximum segment size allowed for compound files (no limit) */ static final long DEFAULT_MAX_CFS_SEGMENT_SIZE = Long.MAX_VALUE; + /** Document count threshold for LogDocMergePolicy */ private int cfsThresholdDocSize = DEFAULT_CFS_THRESHOLD_DOC_SIZE; + + /** Byte size threshold for other merge policies */ private long cfsThresholdByteSize = DEFAULT_CFS_THRESHOLD_BYTE_SIZE; + + /** Whether compound files should be used at all */ private boolean shouldUseCompoundFile = true; + + /** Maximum segment size that can be stored as compound file */ private long maxCFSSegmentSize = DEFAULT_MAX_CFS_SEGMENT_SIZE; + /** + * Sets the document count threshold for using compound files with LogDocMergePolicy. + * Segments with document count less than or equal to this threshold will use compound files. + * + * @param threshold the document count threshold + */ public void setCfsThresholdDocSize(int threshold) { this.cfsThresholdDocSize = threshold; } + /** + * Sets the byte size threshold for using compound files with merge policies other than LogDocMergePolicy. + * Segments with size less than or equal to this threshold will use compound files. + * + * @param thresholdBytes the byte size threshold in bytes + */ public void setCfsThresholdByteSize(long thresholdBytes) { this.cfsThresholdByteSize = thresholdBytes; } + /** + * Returns the current document count threshold for compound files. + * + * @return the document count threshold + */ public int getCfsThresholdDocSize() { return this.cfsThresholdDocSize; } + /** + * Returns the current byte size threshold for compound files. + * + * @return the byte size threshold in bytes + */ public long getCfsThresholdByteSize() { return this.cfsThresholdByteSize; } + /** + * Enables or disables the use of compound files entirely. + * When disabled, no segments will use compound files regardless of other settings. + * + * @param useCompoundFile true to enable compound files, false to disable + */ public void setShouldUseCompoundFile(boolean useCompoundFile) { this.shouldUseCompoundFile = useCompoundFile; } + /** + * Returns whether compound files are enabled. + * + * @return true if compound files are enabled, false otherwise + */ public boolean getShouldUseCompoundFile() { return this.shouldUseCompoundFile; } - /** Returns the largest size allowed for a compound file segment (in MB) */ + /** + * Returns the largest size allowed for a compound file segment in megabytes. + * Segments larger than this size will not use compound files even if otherwise eligible. + * + * @return the maximum compound file segment size in MB + */ public double getMaxCFSSegmentSizeMB() { return maxCFSSegmentSize / 1024. / 1024.; } /** - * If a merged segment will be more than this value, leave the segment as non-compound file even - * if compound file is enabled. Set this to Double.POSITIVE_INFINITY (default) to always use CFS. + * Sets the maximum size limit for compound file segments in megabytes. + * If a merged segment will be larger than this value, it will be left as a non-compound file + * even if compound files are enabled. Set this to Double.POSITIVE_INFINITY (default) to always use CFS + * when other conditions are met. + * + * @param v the maximum segment size in MB (must be >= 0) + * @throws IllegalArgumentException if v is negative */ public void setMaxCFSSegmentSizeMB(double v) { if (v < 0.0) { throw new IllegalArgumentException("maxCFSSegmentSizeMB must be >=0 (got " + v + ")"); } - v *= 1024 * 1024; + v *= 1024 * 1024; // Convert MB to bytes this.maxCFSSegmentSize = v > Long.MAX_VALUE ? Long.MAX_VALUE : (long) v; } /** - * Returns true if a new segment (regardless of its origin) should use the compound file format. - * The default implementation uses fixed thresholds: - * - for LogDocMergePolicy, all segments with less than equals to {@link #getCfsThresholdDocSize()} docs; - * - for byte-size-based merge policies, all segments with less than equals to {@link #getCfsThresholdByteSize()} bytes. - * Additionally, segments larger than {@link #maxCFSSegmentSize} will not use CFS. + * Determines whether a segment should use the compound file format based on its size and merge policy. + * + *

The decision logic is as follows: + *

    + *
  1. If compound files are disabled globally, return false
  2. + *
  3. If segment size exceeds the maximum CFS segment size, return false
  4. + *
  5. For LogDocMergePolicy: use CFS if document count ≤ document threshold
  6. + *
  7. For other merge policies: use CFS if byte size ≤ byte threshold
  8. + *
+ * + * @param mergedInfoSize the size of the segment (document count for LogDocMergePolicy, bytes for others) + * @param mergePolicy the merge policy being used + * @return true if the segment should use compound file format, false otherwise + * @throws IOException if an I/O error occurs */ public boolean useCompoundFile(long mergedInfoSize, MergePolicy mergePolicy) throws IOException { + // Check if compound files are globally disabled if (this.shouldUseCompoundFile == false) { return false; } + + // Check if segment exceeds maximum allowed size for CFS if (mergedInfoSize > maxCFSSegmentSize) { return false; } + + // Apply appropriate threshold based on merge policy type if (mergePolicy instanceof LogDocMergePolicy) { + // For LogDocMergePolicy, mergedInfoSize represents document count return mergedInfoSize <= this.cfsThresholdDocSize; } else { + // For other policies, mergedInfoSize represents byte size return mergedInfoSize <= this.cfsThresholdByteSize; } } diff --git a/lucene/core/src/test/org/apache/lucene/codecs/TestCompoundFormat.java b/lucene/core/src/test/org/apache/lucene/codecs/TestCompoundFormat.java index e48ac4f60964..5909a38c39b1 100644 --- a/lucene/core/src/test/org/apache/lucene/codecs/TestCompoundFormat.java +++ b/lucene/core/src/test/org/apache/lucene/codecs/TestCompoundFormat.java @@ -27,72 +27,128 @@ import java.io.IOException; +/** + * Unit tests for {@link CompoundFormat} functionality. + * + *

This test class verifies the compound file decision logic, including: + *

+ */ public class TestCompoundFormat extends LuceneTestCase { + /** Test instance of CompoundFormat with minimal implementation */ private CompoundFormat format; + /** + * Sets up a test CompoundFormat instance with minimal abstract method implementations. + * The test format focuses on testing the threshold logic rather than actual I/O operations. + */ @Override @Before public void setUp() throws Exception { super.setUp(); + // Create a minimal CompoundFormat implementation for testing threshold logic format = new CompoundFormat() { @Override public CompoundDirectory getCompoundReader(Directory dir, SegmentInfo segmentInfo) { - return null; + return null; // Not needed for threshold testing } @Override public void write(Directory dir, SegmentInfo segmentInfo, IOContext context) { - // No-op + // No-op implementation for testing } }; } + /** + * Tests that the default thresholds work correctly for different merge policies. + * + *

Verifies: + *

+ */ public void testDefaultThresholds() throws IOException { + // Enable compound files with no size limit format.setShouldUseCompoundFile(true); - format.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY); // Always allow + format.setMaxCFSSegmentSizeMB(Double.POSITIVE_INFINITY); // Remove size constraints MergePolicy docPolicy = new LogDocMergePolicy(); MergePolicy bytePolicy = new TieredMergePolicy(); - // Should use compound file for doc policy under threshold - assertTrue(format.useCompoundFile(65536, docPolicy)); - assertFalse(format.useCompoundFile(65537, docPolicy)); + // Verify default threshold values are as expected + assertEquals(65536, format.getCfsThresholdDocSize()); + assertEquals(64L * 1024 * 1024, format.getCfsThresholdByteSize()); + + // Test LogDocMergePolicy uses document count threshold + assertTrue("Should use CFS at doc threshold", format.useCompoundFile(65536, docPolicy)); + assertFalse("Should not use CFS above doc threshold", format.useCompoundFile(65537, docPolicy)); - // Should use compound file for byte policy under threshold (64MB) - assertTrue(format.useCompoundFile(64L * 1024 * 1024, bytePolicy)); - assertFalse(format.useCompoundFile((64L * 1024 * 1024) + 1, bytePolicy)); + // Test other merge policies use byte size threshold (64MB) + assertTrue("Should use CFS at byte threshold", format.useCompoundFile(64L * 1024 * 1024, bytePolicy)); + assertFalse("Should not use CFS above byte threshold", format.useCompoundFile((64L * 1024 * 1024) + 1, bytePolicy)); } + /** + * Tests that compound files can be globally disabled. + * + *

When compound files are disabled, no segments should use compound files + * regardless of their size or the configured thresholds. + */ public void testDisabledCompoundFile() throws IOException { + // Globally disable compound files format.setShouldUseCompoundFile(false); MergePolicy docPolicy = new LogDocMergePolicy(); - // Should never use compound file if disabled - assertFalse(format.useCompoundFile(1, docPolicy)); - assertFalse(format.useCompoundFile(65536, docPolicy)); + // Verify that CFS is never used when globally disabled + assertFalse("Should not use CFS when disabled (small segment)", format.useCompoundFile(1, docPolicy)); + assertFalse("Should not use CFS when disabled (at threshold)", format.useCompoundFile(65536, docPolicy)); } + /** + * Tests the maximum compound file segment size limit. + * + *

Segments larger than the configured maximum size should not use compound files, + * even if they would otherwise be eligible based on the threshold settings. + */ public void testMaxCFSSegmentSize() throws IOException { format.setShouldUseCompoundFile(true); - format.setMaxCFSSegmentSizeMB(10); // 10MB + format.setMaxCFSSegmentSizeMB(10); // Set 10MB limit MergePolicy bytePolicy = new TieredMergePolicy(); - // Should skip CFS if over maxCFSSegmentSize - assertTrue(format.useCompoundFile(9L * 1024 * 1024, bytePolicy)); - assertFalse(format.useCompoundFile(11L * 1024 * 1024, bytePolicy)); + // Test segments below the maximum size limit + assertTrue("Should use CFS below max size limit", format.useCompoundFile(9L * 1024 * 1024, bytePolicy)); + + // Test segments above the maximum size limit + assertFalse("Should not use CFS above max size limit", format.useCompoundFile(11L * 1024 * 1024, bytePolicy)); } + /** + * Tests that custom threshold values can be configured and work correctly. + * + *

Verifies that both document count and byte size thresholds can be customized + * and that the boundary conditions work properly with the new values. + */ public void testCustomThresholds() throws IOException { - format.setCfsThresholdDocSize(1000); - format.setCfsThresholdByteSize(10 * 1024 * 1024); // 10MB + // Configure custom thresholds + format.setCfsThresholdDocSize(1000); // Custom doc count threshold + format.setCfsThresholdByteSize(10 * 1024 * 1024); // Custom 10MB byte threshold MergePolicy docPolicy = new LogDocMergePolicy(); MergePolicy bytePolicy = new TieredMergePolicy(); - assertTrue(format.useCompoundFile(1000, docPolicy)); - assertFalse(format.useCompoundFile(1001, docPolicy)); + // Test custom document count threshold + assertTrue("Should use CFS at custom doc threshold", format.useCompoundFile(1000, docPolicy)); + assertFalse("Should not use CFS above custom doc threshold", format.useCompoundFile(1001, docPolicy)); - assertTrue(format.useCompoundFile(10 * 1024 * 1024, bytePolicy)); - assertFalse(format.useCompoundFile((10 * 1024 * 1024) + 1, bytePolicy)); + // Test custom byte size threshold + assertTrue("Should use CFS at custom byte threshold", format.useCompoundFile(10 * 1024 * 1024, bytePolicy)); + assertFalse("Should not use CFS above custom byte threshold", format.useCompoundFile((10 * 1024 * 1024) + 1, bytePolicy)); } } From 9fae54af96e47e11f60d86ef30057a3af9fc6ac4 Mon Sep 17 00:00:00 2001 From: shubharm Date: Tue, 7 Oct 2025 14:44:04 +0100 Subject: [PATCH 6/7] gradle tidy --- .../TestBasicBackwardsCompatibility.java | 1 - .../apache/lucene/codecs/CompoundFormat.java | 40 ++++--- .../lucene/codecs/TestCompoundFormat.java | 106 ++++++++++-------- .../document/TestFeatureDoubleValues.java | 15 +-- .../lucene/document/TestFeatureField.java | 55 +++++---- .../lucene/document/TestFeatureSort.java | 15 +-- .../TestLatLonPointDistanceFeatureQuery.java | 23 +--- .../TestLongDistanceFeatureQuery.java | 19 +--- .../search/TestConstantScoreScorer.java | 11 +- .../search/join/TestBlockJoinBulkScorer.java | 18 ++- .../search/join/TestBlockJoinScorer.java | 6 +- .../misc/index/TestBPIndexReorderer.java | 10 +- .../apache/lucene/tests/util/TestUtil.java | 1 + 13 files changed, 171 insertions(+), 149 deletions(-) diff --git a/lucene/backward-codecs/src/test/org/apache/lucene/backward_index/TestBasicBackwardsCompatibility.java b/lucene/backward-codecs/src/test/org/apache/lucene/backward_index/TestBasicBackwardsCompatibility.java index e3818b3f5cbf..d9140c0aa043 100644 --- a/lucene/backward-codecs/src/test/org/apache/lucene/backward_index/TestBasicBackwardsCompatibility.java +++ b/lucene/backward-codecs/src/test/org/apache/lucene/backward_index/TestBasicBackwardsCompatibility.java @@ -53,7 +53,6 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.LogByteSizeMergePolicy; import org.apache.lucene.index.MultiBits; import org.apache.lucene.index.MultiDocValues; import org.apache.lucene.index.MultiTerms; diff --git a/lucene/core/src/java/org/apache/lucene/codecs/CompoundFormat.java b/lucene/core/src/java/org/apache/lucene/codecs/CompoundFormat.java index a61d0804e2de..d7ea21473274 100644 --- a/lucene/core/src/java/org/apache/lucene/codecs/CompoundFormat.java +++ b/lucene/core/src/java/org/apache/lucene/codecs/CompoundFormat.java @@ -58,8 +58,8 @@ protected CompoundFormat() {} private long maxCFSSegmentSize = DEFAULT_MAX_CFS_SEGMENT_SIZE; /** - * Sets the document count threshold for using compound files with LogDocMergePolicy. - * Segments with document count less than or equal to this threshold will use compound files. + * Sets the document count threshold for using compound files with LogDocMergePolicy. Segments + * with document count less than or equal to this threshold will use compound files. * * @param threshold the document count threshold */ @@ -68,8 +68,9 @@ public void setCfsThresholdDocSize(int threshold) { } /** - * Sets the byte size threshold for using compound files with merge policies other than LogDocMergePolicy. - * Segments with size less than or equal to this threshold will use compound files. + * Sets the byte size threshold for using compound files with merge policies other than + * LogDocMergePolicy. Segments with size less than or equal to this threshold will use compound + * files. * * @param thresholdBytes the byte size threshold in bytes */ @@ -96,8 +97,8 @@ public long getCfsThresholdByteSize() { } /** - * Enables or disables the use of compound files entirely. - * When disabled, no segments will use compound files regardless of other settings. + * Enables or disables the use of compound files entirely. When disabled, no segments will use + * compound files regardless of other settings. * * @param useCompoundFile true to enable compound files, false to disable */ @@ -115,8 +116,8 @@ public boolean getShouldUseCompoundFile() { } /** - * Returns the largest size allowed for a compound file segment in megabytes. - * Segments larger than this size will not use compound files even if otherwise eligible. + * Returns the largest size allowed for a compound file segment in megabytes. Segments larger than + * this size will not use compound files even if otherwise eligible. * * @return the maximum compound file segment size in MB */ @@ -125,10 +126,10 @@ public double getMaxCFSSegmentSizeMB() { } /** - * Sets the maximum size limit for compound file segments in megabytes. - * If a merged segment will be larger than this value, it will be left as a non-compound file - * even if compound files are enabled. Set this to Double.POSITIVE_INFINITY (default) to always use CFS - * when other conditions are met. + * Sets the maximum size limit for compound file segments in megabytes. If a merged segment will + * be larger than this value, it will be left as a non-compound file even if compound files are + * enabled. Set this to Double.POSITIVE_INFINITY (default) to always use CFS when other conditions + * are met. * * @param v the maximum segment size in MB (must be >= 0) * @throws IllegalArgumentException if v is negative @@ -142,17 +143,20 @@ public void setMaxCFSSegmentSizeMB(double v) { } /** - * Determines whether a segment should use the compound file format based on its size and merge policy. + * Determines whether a segment should use the compound file format based on its size and merge + * policy. * *

The decision logic is as follows: + * *

    - *
  1. If compound files are disabled globally, return false
  2. - *
  3. If segment size exceeds the maximum CFS segment size, return false
  4. - *
  5. For LogDocMergePolicy: use CFS if document count ≤ document threshold
  6. - *
  7. For other merge policies: use CFS if byte size ≤ byte threshold
  8. + *
  9. If compound files are disabled globally, return false + *
  10. If segment size exceeds the maximum CFS segment size, return false + *
  11. For LogDocMergePolicy: use CFS if document count ≤ document threshold + *
  12. For other merge policies: use CFS if byte size ≤ byte threshold *
* - * @param mergedInfoSize the size of the segment (document count for LogDocMergePolicy, bytes for others) + * @param mergedInfoSize the size of the segment (document count for LogDocMergePolicy, bytes for + * others) * @param mergePolicy the merge policy being used * @return true if the segment should use compound file format, false otherwise * @throws IOException if an I/O error occurs diff --git a/lucene/core/src/test/org/apache/lucene/codecs/TestCompoundFormat.java b/lucene/core/src/test/org/apache/lucene/codecs/TestCompoundFormat.java index 5909a38c39b1..1f969107f7e2 100644 --- a/lucene/core/src/test/org/apache/lucene/codecs/TestCompoundFormat.java +++ b/lucene/core/src/test/org/apache/lucene/codecs/TestCompoundFormat.java @@ -16,26 +16,26 @@ */ package org.apache.lucene.codecs; +import java.io.IOException; import org.apache.lucene.index.LogDocMergePolicy; import org.apache.lucene.index.MergePolicy; -import org.apache.lucene.index.TieredMergePolicy; import org.apache.lucene.index.SegmentInfo; +import org.apache.lucene.index.TieredMergePolicy; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.tests.util.LuceneTestCase; import org.junit.Before; -import java.io.IOException; - /** * Unit tests for {@link CompoundFormat} functionality. - * + * *

This test class verifies the compound file decision logic, including: + * *

*/ public class TestCompoundFormat extends LuceneTestCase { @@ -44,34 +44,37 @@ public class TestCompoundFormat extends LuceneTestCase { private CompoundFormat format; /** - * Sets up a test CompoundFormat instance with minimal abstract method implementations. - * The test format focuses on testing the threshold logic rather than actual I/O operations. + * Sets up a test CompoundFormat instance with minimal abstract method implementations. The test + * format focuses on testing the threshold logic rather than actual I/O operations. */ @Override @Before public void setUp() throws Exception { super.setUp(); // Create a minimal CompoundFormat implementation for testing threshold logic - format = new CompoundFormat() { - @Override - public CompoundDirectory getCompoundReader(Directory dir, SegmentInfo segmentInfo) { - return null; // Not needed for threshold testing - } - @Override - public void write(Directory dir, SegmentInfo segmentInfo, IOContext context) { - // No-op implementation for testing - } - }; + format = + new CompoundFormat() { + @Override + public CompoundDirectory getCompoundReader(Directory dir, SegmentInfo segmentInfo) { + return null; // Not needed for threshold testing + } + + @Override + public void write(Directory dir, SegmentInfo segmentInfo, IOContext context) { + // No-op implementation for testing + } + }; } /** * Tests that the default thresholds work correctly for different merge policies. - * + * *

Verifies: + * *

*/ public void testDefaultThresholds() throws IOException { @@ -91,15 +94,18 @@ public void testDefaultThresholds() throws IOException { assertFalse("Should not use CFS above doc threshold", format.useCompoundFile(65537, docPolicy)); // Test other merge policies use byte size threshold (64MB) - assertTrue("Should use CFS at byte threshold", format.useCompoundFile(64L * 1024 * 1024, bytePolicy)); - assertFalse("Should not use CFS above byte threshold", format.useCompoundFile((64L * 1024 * 1024) + 1, bytePolicy)); + assertTrue( + "Should use CFS at byte threshold", format.useCompoundFile(64L * 1024 * 1024, bytePolicy)); + assertFalse( + "Should not use CFS above byte threshold", + format.useCompoundFile((64L * 1024 * 1024) + 1, bytePolicy)); } /** * Tests that compound files can be globally disabled. - * - *

When compound files are disabled, no segments should use compound files - * regardless of their size or the configured thresholds. + * + *

When compound files are disabled, no segments should use compound files regardless of their + * size or the configured thresholds. */ public void testDisabledCompoundFile() throws IOException { // Globally disable compound files @@ -107,15 +113,18 @@ public void testDisabledCompoundFile() throws IOException { MergePolicy docPolicy = new LogDocMergePolicy(); // Verify that CFS is never used when globally disabled - assertFalse("Should not use CFS when disabled (small segment)", format.useCompoundFile(1, docPolicy)); - assertFalse("Should not use CFS when disabled (at threshold)", format.useCompoundFile(65536, docPolicy)); + assertFalse( + "Should not use CFS when disabled (small segment)", format.useCompoundFile(1, docPolicy)); + assertFalse( + "Should not use CFS when disabled (at threshold)", + format.useCompoundFile(65536, docPolicy)); } /** * Tests the maximum compound file segment size limit. - * - *

Segments larger than the configured maximum size should not use compound files, - * even if they would otherwise be eligible based on the threshold settings. + * + *

Segments larger than the configured maximum size should not use compound files, even if they + * would otherwise be eligible based on the threshold settings. */ public void testMaxCFSSegmentSize() throws IOException { format.setShouldUseCompoundFile(true); @@ -123,21 +132,25 @@ public void testMaxCFSSegmentSize() throws IOException { MergePolicy bytePolicy = new TieredMergePolicy(); // Test segments below the maximum size limit - assertTrue("Should use CFS below max size limit", format.useCompoundFile(9L * 1024 * 1024, bytePolicy)); - + assertTrue( + "Should use CFS below max size limit", + format.useCompoundFile(9L * 1024 * 1024, bytePolicy)); + // Test segments above the maximum size limit - assertFalse("Should not use CFS above max size limit", format.useCompoundFile(11L * 1024 * 1024, bytePolicy)); + assertFalse( + "Should not use CFS above max size limit", + format.useCompoundFile(11L * 1024 * 1024, bytePolicy)); } /** * Tests that custom threshold values can be configured and work correctly. - * - *

Verifies that both document count and byte size thresholds can be customized - * and that the boundary conditions work properly with the new values. + * + *

Verifies that both document count and byte size thresholds can be customized and that the + * boundary conditions work properly with the new values. */ public void testCustomThresholds() throws IOException { // Configure custom thresholds - format.setCfsThresholdDocSize(1000); // Custom doc count threshold + format.setCfsThresholdDocSize(1000); // Custom doc count threshold format.setCfsThresholdByteSize(10 * 1024 * 1024); // Custom 10MB byte threshold MergePolicy docPolicy = new LogDocMergePolicy(); @@ -145,10 +158,15 @@ public void testCustomThresholds() throws IOException { // Test custom document count threshold assertTrue("Should use CFS at custom doc threshold", format.useCompoundFile(1000, docPolicy)); - assertFalse("Should not use CFS above custom doc threshold", format.useCompoundFile(1001, docPolicy)); + assertFalse( + "Should not use CFS above custom doc threshold", format.useCompoundFile(1001, docPolicy)); // Test custom byte size threshold - assertTrue("Should use CFS at custom byte threshold", format.useCompoundFile(10 * 1024 * 1024, bytePolicy)); - assertFalse("Should not use CFS above custom byte threshold", format.useCompoundFile((10 * 1024 * 1024) + 1, bytePolicy)); + assertTrue( + "Should use CFS at custom byte threshold", + format.useCompoundFile(10 * 1024 * 1024, bytePolicy)); + assertFalse( + "Should not use CFS above custom byte threshold", + format.useCompoundFile((10 * 1024 * 1024) + 1, bytePolicy)); } } diff --git a/lucene/core/src/test/org/apache/lucene/document/TestFeatureDoubleValues.java b/lucene/core/src/test/org/apache/lucene/document/TestFeatureDoubleValues.java index 08d1bfdcf91a..6a57489f2ec6 100644 --- a/lucene/core/src/test/org/apache/lucene/document/TestFeatureDoubleValues.java +++ b/lucene/core/src/test/org/apache/lucene/document/TestFeatureDoubleValues.java @@ -31,8 +31,7 @@ public class TestFeatureDoubleValues extends LuceneTestCase { public void testFeature() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig config = - newIndexWriterConfig().setMergePolicy(newLogMergePolicy()); + IndexWriterConfig config = newIndexWriterConfig().setMergePolicy(newLogMergePolicy()); config.getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config); Document doc = new Document(); @@ -67,8 +66,7 @@ public void testFeature() throws IOException { public void testFeatureMissing() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig config = - newIndexWriterConfig().setMergePolicy(newLogMergePolicy()); + IndexWriterConfig config = newIndexWriterConfig().setMergePolicy(newLogMergePolicy()); config.getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config); Document doc = new Document(); @@ -101,8 +99,7 @@ public void testFeatureMissing() throws IOException { public void testFeatureMissingFieldInSegment() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig config = - newIndexWriterConfig().setMergePolicy(newLogMergePolicy()); + IndexWriterConfig config = newIndexWriterConfig().setMergePolicy(newLogMergePolicy()); config.getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config); Document doc = new Document(); @@ -125,8 +122,7 @@ public void testFeatureMissingFieldInSegment() throws IOException { public void testFeatureMissingFeatureNameInSegment() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig config = - newIndexWriterConfig().setMergePolicy(newLogMergePolicy()); + IndexWriterConfig config = newIndexWriterConfig().setMergePolicy(newLogMergePolicy()); config.getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config); Document doc = new Document(); @@ -150,8 +146,7 @@ public void testFeatureMissingFeatureNameInSegment() throws IOException { public void testFeatureMultipleMissing() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig config = - newIndexWriterConfig().setMergePolicy(newLogMergePolicy()); + IndexWriterConfig config = newIndexWriterConfig().setMergePolicy(newLogMergePolicy()); config.getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config); Document doc = new Document(); diff --git a/lucene/core/src/test/org/apache/lucene/document/TestFeatureField.java b/lucene/core/src/test/org/apache/lucene/document/TestFeatureField.java index ed0002246d02..c20d5d6897f3 100644 --- a/lucene/core/src/test/org/apache/lucene/document/TestFeatureField.java +++ b/lucene/core/src/test/org/apache/lucene/document/TestFeatureField.java @@ -62,10 +62,13 @@ public void testBasics() throws Exception { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter( - random(), - dir, - newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); - writer.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); + random(), dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + writer + .w + .getConfig() + .getCodec() + .compoundFormat() + .setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); FeatureField pagerank = new FeatureField("features", "pagerank", 1); FeatureField urlLength = new FeatureField("features", "urlLen", 1); @@ -198,10 +201,13 @@ public void testExplanations() throws Exception { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter( - random(), - dir, - newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); - writer.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); + random(), dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + writer + .w + .getConfig() + .getCodec() + .compoundFormat() + .setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); FeatureField pagerank = new FeatureField("features", "pagerank", 1); doc.add(pagerank); @@ -325,10 +331,13 @@ public void testDemo() throws IOException { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter( - random(), - dir, - newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); - writer.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); + random(), dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + writer + .w + .getConfig() + .getCodec() + .compoundFormat() + .setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); FeatureField pagerank = new FeatureField("features", "pagerank", 1); doc.add(pagerank); @@ -382,10 +391,13 @@ public void testBasicsNonScoringCase() throws IOException { DirectoryReader reader; try (RandomIndexWriter writer = new RandomIndexWriter( - random(), - dir, - newIndexWriterConfig().setMergePolicy(newLogMergePolicy()))) { - writer.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); + random(), dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy()))) { + writer + .w + .getConfig() + .getCodec() + .compoundFormat() + .setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); FeatureField pagerank = new FeatureField("features", "pagerank", 1); FeatureField urlLength = new FeatureField("features", "urlLen", 1); @@ -466,10 +478,13 @@ public void testStoreTermVectors() throws Exception { Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter( - random(), - dir, - newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); - writer.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); + random(), dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + writer + .w + .getConfig() + .getCodec() + .compoundFormat() + .setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); FeatureField pagerank = new FeatureField("features", "pagerank", 1, true); FeatureField urlLength = new FeatureField("features", "urlLen", 1, true); diff --git a/lucene/core/src/test/org/apache/lucene/document/TestFeatureSort.java b/lucene/core/src/test/org/apache/lucene/document/TestFeatureSort.java index a1a23b92159f..10c186a87106 100644 --- a/lucene/core/src/test/org/apache/lucene/document/TestFeatureSort.java +++ b/lucene/core/src/test/org/apache/lucene/document/TestFeatureSort.java @@ -50,8 +50,7 @@ public class TestFeatureSort extends LuceneTestCase { public void testFeature() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig config = - newIndexWriterConfig().setMergePolicy(newLogMergePolicy()); + IndexWriterConfig config = newIndexWriterConfig().setMergePolicy(newLogMergePolicy()); config.getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config); Document doc = new Document(); @@ -86,8 +85,7 @@ public void testFeature() throws IOException { public void testFeatureMissing() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig config = - newIndexWriterConfig().setMergePolicy(newLogMergePolicy()); + IndexWriterConfig config = newIndexWriterConfig().setMergePolicy(newLogMergePolicy()); config.getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config); Document doc = new Document(); @@ -120,8 +118,7 @@ public void testFeatureMissing() throws IOException { public void testFeatureMissingFieldInSegment() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig config = - newIndexWriterConfig().setMergePolicy(newLogMergePolicy()); + IndexWriterConfig config = newIndexWriterConfig().setMergePolicy(newLogMergePolicy()); config.getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config); Document doc = new Document(); @@ -155,8 +152,7 @@ public void testFeatureMissingFieldInSegment() throws IOException { public void testFeatureMissingFeatureNameInSegment() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig config = - newIndexWriterConfig().setMergePolicy(newLogMergePolicy()); + IndexWriterConfig config = newIndexWriterConfig().setMergePolicy(newLogMergePolicy()); config.getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config); Document doc = new Document(); @@ -191,8 +187,7 @@ public void testFeatureMissingFeatureNameInSegment() throws IOException { public void testFeatureMultipleMissing() throws IOException { Directory dir = newDirectory(); - IndexWriterConfig config = - newIndexWriterConfig().setMergePolicy(newLogMergePolicy()); + IndexWriterConfig config = newIndexWriterConfig().setMergePolicy(newLogMergePolicy()); config.getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config); Document doc = new Document(); diff --git a/lucene/core/src/test/org/apache/lucene/document/TestLatLonPointDistanceFeatureQuery.java b/lucene/core/src/test/org/apache/lucene/document/TestLatLonPointDistanceFeatureQuery.java index d28d61af7bb7..15042524647c 100644 --- a/lucene/core/src/test/org/apache/lucene/document/TestLatLonPointDistanceFeatureQuery.java +++ b/lucene/core/src/test/org/apache/lucene/document/TestLatLonPointDistanceFeatureQuery.java @@ -65,9 +65,7 @@ public void testBasics() throws IOException { Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter( - random(), - dir, - newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + random(), dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); w.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); LatLonPoint point = new LatLonPoint("foo", 0.0, 0.0); @@ -162,9 +160,7 @@ public void testCrossesDateLine() throws IOException { Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter( - random(), - dir, - newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + random(), dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); w.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); LatLonPoint point = new LatLonPoint("foo", 0.0, 0.0); @@ -241,9 +237,7 @@ public void testMissingValue() throws IOException { Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter( - random(), - dir, - newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + random(), dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); w.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); LatLonPoint point = new LatLonPoint("foo", 0, 0); @@ -301,9 +295,7 @@ public void testMultiValued() throws IOException { Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter( - random(), - dir, - newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + random(), dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); w.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); for (double[] point : new double[][] {{0, 0}, {30, 30}, {60, 60}}) { @@ -404,8 +396,7 @@ public void testMultiValued() throws IOException { public void testRandom() throws IOException { Directory dir = newDirectory(); IndexWriter w = - new IndexWriter( - dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + new IndexWriter(dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); LatLonPoint point = new LatLonPoint("foo", 0., 0.); @@ -449,9 +440,7 @@ public void testCompareSorting() throws IOException { Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter( - random(), - dir, - newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + random(), dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); w.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); LatLonPoint point = new LatLonPoint("foo", 0., 0.); diff --git a/lucene/core/src/test/org/apache/lucene/document/TestLongDistanceFeatureQuery.java b/lucene/core/src/test/org/apache/lucene/document/TestLongDistanceFeatureQuery.java index 1abbf51b2977..75ae854e3fc4 100644 --- a/lucene/core/src/test/org/apache/lucene/document/TestLongDistanceFeatureQuery.java +++ b/lucene/core/src/test/org/apache/lucene/document/TestLongDistanceFeatureQuery.java @@ -57,9 +57,7 @@ public void testBasics() throws IOException { Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter( - random(), - dir, - newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + random(), dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); w.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); LongField field = new LongField("foo", 0L, Store.NO); @@ -119,9 +117,7 @@ public void testOverUnderFlow() throws IOException { Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter( - random(), - dir, - newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + random(), dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); w.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); LongField field = new LongField("foo", 0L, Store.NO); @@ -206,9 +202,7 @@ public void testMissingValue() throws IOException { Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter( - random(), - dir, - newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + random(), dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); w.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); LongField field = new LongField("foo", 0L, Store.NO); @@ -249,9 +243,7 @@ public void testMultiValued() throws IOException { Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter( - random(), - dir, - newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + random(), dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); w.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); for (long v : new long[] {3, 1000, Long.MAX_VALUE}) { @@ -321,8 +313,7 @@ public void testMultiValued() throws IOException { public void testRandom() throws IOException { Directory dir = newDirectory(); IndexWriter w = - new IndexWriter( - dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + new IndexWriter(dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); LongField field = new LongField("foo", 0L, Store.NO); diff --git a/lucene/core/src/test/org/apache/lucene/search/TestConstantScoreScorer.java b/lucene/core/src/test/org/apache/lucene/search/TestConstantScoreScorer.java index 6ddab402c67c..c18cd88a2f77 100644 --- a/lucene/core/src/test/org/apache/lucene/search/TestConstantScoreScorer.java +++ b/lucene/core/src/test/org/apache/lucene/search/TestConstantScoreScorer.java @@ -178,10 +178,13 @@ static class TestConstantScoreScorerIndex implements AutoCloseable { writer = new RandomIndexWriter( - random(), - directory, - newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); - writer.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); + random(), directory, newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + writer + .w + .getConfig() + .getCodec() + .compoundFormat() + .setShouldUseCompoundFile(random().nextBoolean()); for (String VALUE : VALUES) { Document doc = new Document(); doc.add(newTextField(FIELD, VALUE, Field.Store.YES)); diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinBulkScorer.java b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinBulkScorer.java index e526fbcd91eb..7be43fe0c57a 100644 --- a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinBulkScorer.java +++ b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinBulkScorer.java @@ -327,7 +327,11 @@ public void testScoreRandomIndices() throws IOException { .setMergePolicy( // retain doc id order newLogMergePolicy()))) { - w.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); + w.w + .getConfig() + .getCodec() + .compoundFormat() + .setShouldUseCompoundFile(random().nextBoolean()); expectedMatches = populateRandomIndex( @@ -373,7 +377,11 @@ public void testSetMinCompetitiveScoreWithScoreModeMax() throws IOException { .setMergePolicy( // retain doc id order newLogMergePolicy()))) { - w.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); + w.w + .getConfig() + .getCodec() + .compoundFormat() + .setShouldUseCompoundFile(random().nextBoolean()); populateStaticIndex(w); w.forceMerge(1); } @@ -443,7 +451,11 @@ public void testSetMinCompetitiveScoreWithScoreModeNone() throws IOException { .setMergePolicy( // retain doc id order newLogMergePolicy()))) { - w.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); + w.w + .getConfig() + .getCodec() + .compoundFormat() + .setShouldUseCompoundFile(random().nextBoolean()); populateStaticIndex(w); w.forceMerge(1); } diff --git a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinScorer.java b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinScorer.java index 748b22bce61d..60d7afb713e6 100644 --- a/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinScorer.java +++ b/lucene/join/src/test/org/apache/lucene/search/join/TestBlockJoinScorer.java @@ -137,7 +137,11 @@ public void testScoreMax() throws IOException { // retain doc id order newLogMergePolicy()))) { - w.w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); + w.w + .getConfig() + .getCodec() + .compoundFormat() + .setShouldUseCompoundFile(random().nextBoolean()); for (String[][] values : Arrays.asList( new String[][] {{"A", "B"}, {"A", "B", "C"}}, diff --git a/lucene/misc/src/test/org/apache/lucene/misc/index/TestBPIndexReorderer.java b/lucene/misc/src/test/org/apache/lucene/misc/index/TestBPIndexReorderer.java index bd56a0b5f1dd..254e154aac83 100644 --- a/lucene/misc/src/test/org/apache/lucene/misc/index/TestBPIndexReorderer.java +++ b/lucene/misc/src/test/org/apache/lucene/misc/index/TestBPIndexReorderer.java @@ -69,8 +69,7 @@ public void testSingleTermWithForkJoinPool() throws IOException { public void doTestSingleTerm(ForkJoinPool pool) throws IOException { Directory dir = newDirectory(); IndexWriter w = - new IndexWriter( - dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + new IndexWriter(dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); StoredField idField = new StoredField("id", ""); @@ -160,9 +159,7 @@ private void doTestSingleTermWithBlocks(ForkJoinPool pool) throws IOException { IndexWriter w = new IndexWriter( dir, - newIndexWriterConfig() - .setParentField("parent") - .setMergePolicy(newLogMergePolicy())); + newIndexWriterConfig().setParentField("parent").setMergePolicy(newLogMergePolicy())); w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); w.addDocuments(createBlock("1", "lucene", "search", "lucene")); // 0-2 @@ -257,8 +254,7 @@ private List createBlock(String parentID, String... values) { public void testMultiTerm() throws IOException { Directory dir = newDirectory(); IndexWriter w = - new IndexWriter( - dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); + new IndexWriter(dir, newIndexWriterConfig().setMergePolicy(newLogMergePolicy())); w.getConfig().getCodec().compoundFormat().setShouldUseCompoundFile(random().nextBoolean()); Document doc = new Document(); StoredField idField = new StoredField("id", ""); diff --git a/lucene/test-framework/src/java/org/apache/lucene/tests/util/TestUtil.java b/lucene/test-framework/src/java/org/apache/lucene/tests/util/TestUtil.java index b6521f80bc84..b230e750a4d8 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/tests/util/TestUtil.java +++ b/lucene/test-framework/src/java/org/apache/lucene/tests/util/TestUtil.java @@ -1333,6 +1333,7 @@ public static Codec getDefaultCodec() { /** * Returns the actual default codec (e.g. LuceneMNCodec) for this version of Lucene. This may be * different from {@link Codec#getDefault()} because that is randomized. + * * @param shouldUseCfs * @return */ From 18d2582bbec4bdc3c582f1620ee9d001e0eafc6e Mon Sep 17 00:00:00 2001 From: shubharm Date: Tue, 7 Oct 2025 15:27:24 +0100 Subject: [PATCH 7/7] eclint checks --- lucene/core/src/test/org/apache/lucene/TestSearch.java | 2 -- .../test/org/apache/lucene/TestSearchForDuplicates.java | 2 -- .../org/apache/lucene/index/TestIndexWriterDelete.java | 2 -- .../apache/lucene/index/TestIndexWriterMergePolicy.java | 2 -- .../org/apache/lucene/index/TestPerSegmentDeletes.java | 8 ++------ .../java/org/apache/lucene/tests/util/LuceneTestCase.java | 7 ++++--- .../src/java/org/apache/lucene/tests/util/TestUtil.java | 3 --- 7 files changed, 6 insertions(+), 20 deletions(-) diff --git a/lucene/core/src/test/org/apache/lucene/TestSearch.java b/lucene/core/src/test/org/apache/lucene/TestSearch.java index b4db24206c10..716477da405d 100644 --- a/lucene/core/src/test/org/apache/lucene/TestSearch.java +++ b/lucene/core/src/test/org/apache/lucene/TestSearch.java @@ -29,7 +29,6 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.MergePolicy; import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; @@ -80,7 +79,6 @@ private void doTestSearch(Random random, PrintWriter out, boolean useCompoundFil Directory directory = newDirectory(); Analyzer analyzer = new MockAnalyzer(random); IndexWriterConfig conf = newIndexWriterConfig(analyzer); - MergePolicy mp = conf.getMergePolicy(); conf.getCodec().compoundFormat().setShouldUseCompoundFile(useCompoundFile); IndexWriter writer = new IndexWriter(directory, conf); diff --git a/lucene/core/src/test/org/apache/lucene/TestSearchForDuplicates.java b/lucene/core/src/test/org/apache/lucene/TestSearchForDuplicates.java index 51b255606f3b..6dd889ab5893 100644 --- a/lucene/core/src/test/org/apache/lucene/TestSearchForDuplicates.java +++ b/lucene/core/src/test/org/apache/lucene/TestSearchForDuplicates.java @@ -29,7 +29,6 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.MergePolicy; import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; @@ -84,7 +83,6 @@ private void doTest(Random random, PrintWriter out, boolean useCompoundFiles, in Directory directory = newDirectory(); Analyzer analyzer = new MockAnalyzer(random); IndexWriterConfig conf = newIndexWriterConfig(analyzer); - final MergePolicy mp = conf.getMergePolicy(); conf.getCodec().compoundFormat().setShouldUseCompoundFile(useCompoundFiles); IndexWriter writer = new IndexWriter(directory, conf); if (VERBOSE) { diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java index 53478b34c2ce..d5f394adfdee 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterDelete.java @@ -862,8 +862,6 @@ public void eval(MockDirectoryWrapper dir) throws IOException { .setReaderPooling(false) .setMergePolicy(newLogMergePolicy())); - MergePolicy lmp = modifier.getConfig().getMergePolicy(); - dir.failOn(failure.reset()); FieldType custom1 = new FieldType(); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java index b240e107fde7..5c0f1d7fbd7e 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java @@ -371,8 +371,6 @@ private void checkInvariants(IndexWriter writer) throws IOException { } } - private static final double EPSILON = 1E-14; - // Test basic semantics of merge on commit public void testMergeOnCommit() throws IOException { Directory dir = newDirectory(); diff --git a/lucene/core/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java b/lucene/core/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java index 9cd2c6a82ddb..0975cd0aae65 100644 --- a/lucene/core/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java +++ b/lucene/core/src/test/org/apache/lucene/index/TestPerSegmentDeletes.java @@ -40,7 +40,7 @@ public void testDeletes1() throws Exception { iwc.setMergeScheduler(new SerialMergeScheduler()); iwc.setMaxBufferedDocs(5000); iwc.setRAMBufferSizeMB(100); - RangeMergePolicy fsmp = new RangeMergePolicy(false); + RangeMergePolicy fsmp = new RangeMergePolicy(); iwc.setMergePolicy(fsmp); IndexWriter writer = new IndexWriter(dir, iwc); for (int x = 0; x < 5; x++) { @@ -236,11 +236,7 @@ public static class RangeMergePolicy extends MergePolicy { int start; int length; - private final boolean useCompoundFile; - - private RangeMergePolicy(boolean useCompoundFile) { - this.useCompoundFile = useCompoundFile; - } + private RangeMergePolicy() {} @Override public MergeSpecification findMerges( diff --git a/lucene/test-framework/src/java/org/apache/lucene/tests/util/LuceneTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/tests/util/LuceneTestCase.java index 34049621d8fd..c128b5516668 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/tests/util/LuceneTestCase.java +++ b/lucene/test-framework/src/java/org/apache/lucene/tests/util/LuceneTestCase.java @@ -921,7 +921,7 @@ public static IndexWriterConfig newIndexWriterConfig(Analyzer a) { /** create a new index writer config with random defaults using the specified random */ public static IndexWriterConfig newIndexWriterConfig(Random r, Analyzer a) { IndexWriterConfig c = new IndexWriterConfig(a); - configureRandom(r, c); + configureRandom(r, c.getCodec().compoundFormat()); c.setSimilarity(classEnvRule.similarity); if (VERBOSE) { // Even though TestRuleSetupAndRestoreClassEnv calls @@ -1061,8 +1061,7 @@ public static LogMergePolicy newLogMergePolicy(Random r) { return logmp; } - private static void configureRandom(Random r, IndexWriterConfig iwc) { - CompoundFormat compoundFormat = iwc.getCodec().compoundFormat(); + private static void configureRandom(Random r, CompoundFormat compoundFormat) { compoundFormat.setShouldUseCompoundFile(random().nextBoolean()); if (rarely(r)) { @@ -1182,6 +1181,7 @@ public static void maybeChangeLiveIndexWriterConfig(Random r, LiveIndexWriterCon if (rarely(r)) { MergePolicy mp = c.getMergePolicy(); + configureRandom(r, c.getCodec().compoundFormat()); if (mp instanceof LogMergePolicy logmp) { logmp.setCalibrateSizeByDeletes(r.nextBoolean()); if (rarely(r)) { @@ -1202,6 +1202,7 @@ public static void maybeChangeLiveIndexWriterConfig(Random r, LiveIndexWriterCon } else { tmp.setSegmentsPerTier(TestUtil.nextInt(r, 10, 50)); } + configureRandom(r, c.getCodec().compoundFormat()); tmp.setDeletesPctAllowed(20 + random().nextDouble() * 30); } didChange = true; diff --git a/lucene/test-framework/src/java/org/apache/lucene/tests/util/TestUtil.java b/lucene/test-framework/src/java/org/apache/lucene/tests/util/TestUtil.java index b230e750a4d8..95fe9177ebb6 100644 --- a/lucene/test-framework/src/java/org/apache/lucene/tests/util/TestUtil.java +++ b/lucene/test-framework/src/java/org/apache/lucene/tests/util/TestUtil.java @@ -1333,9 +1333,6 @@ public static Codec getDefaultCodec() { /** * Returns the actual default codec (e.g. LuceneMNCodec) for this version of Lucene. This may be * different from {@link Codec#getDefault()} because that is randomized. - * - * @param shouldUseCfs - * @return */ public static Codec getDefaultCodec(boolean shouldUseCfs) { Codec codec = getDefaultCodec();