diff --git a/Lucene.Net.sln.DotSettings b/Lucene.Net.sln.DotSettings index 464613e85d..747aae4ad6 100644 --- a/Lucene.Net.sln.DotSettings +++ b/Lucene.Net.sln.DotSettings @@ -1,2 +1,3 @@  - True \ No newline at end of file + True + True \ No newline at end of file diff --git a/src/Lucene.Net.TestFramework/Index/MockRandomMergePolicy.cs b/src/Lucene.Net.TestFramework/Index/MockRandomMergePolicy.cs index 03fc938ac8..ed20cb2a22 100644 --- a/src/Lucene.Net.TestFramework/Index/MockRandomMergePolicy.cs +++ b/src/Lucene.Net.TestFramework/Index/MockRandomMergePolicy.cs @@ -116,8 +116,8 @@ public override MergeSpecification FindForcedMerges(SegmentInfos segmentInfos, i public override MergeSpecification FindForcedDeletesMerges(SegmentInfos segmentInfos) { - // LUCENENET specific - just use int.MinValue to indicate "null" - return FindMerges((MergeTrigger)int.MinValue, segmentInfos); + // LUCENENET specific - use NONE instead of null + return FindMerges(MergeTrigger.NONE, segmentInfos); } protected override void Dispose(bool disposing) @@ -130,4 +130,4 @@ public override bool UseCompoundFile(SegmentInfos infos, SegmentCommitInfo merge return random.Next(5) != 1; } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestIndexCommit.cs b/src/Lucene.Net.Tests/Index/TestIndexCommit.cs index 07c971a07c..8f2c82247d 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexCommit.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexCommit.cs @@ -33,9 +33,9 @@ public virtual void TestEqualsHashCode() // LUCENE-2417: equals and hashCode() impl was inconsistent Directory dir = NewDirectory(); - IndexCommit ic1 = new IndexCommitAnonymousClass(this, dir); + IndexCommit ic1 = new IndexCommitAnonymousClass(dir); - IndexCommit ic2 = new IndexCommitAnonymousClass2(this, dir); + IndexCommit ic2 = new IndexCommitAnonymousClass2(dir); Assert.AreEqual(ic1, ic2); Assert.AreEqual(ic1.GetHashCode(), ic2.GetHashCode(), "hash codes are not equals"); @@ -44,13 +44,10 @@ public virtual void TestEqualsHashCode() private sealed class IndexCommitAnonymousClass : IndexCommit { - private readonly TestIndexCommit outerInstance; + private readonly Directory dir; - private Directory dir; - - public IndexCommitAnonymousClass(TestIndexCommit outerInstance, Directory dir) + public IndexCommitAnonymousClass(Directory dir) { - this.outerInstance = outerInstance; this.dir = dir; } @@ -75,13 +72,10 @@ public override void Delete() private sealed class IndexCommitAnonymousClass2 : IndexCommit { - private readonly TestIndexCommit outerInstance; - - private Directory dir; + private readonly Directory dir; - public IndexCommitAnonymousClass2(TestIndexCommit outerInstance, Directory dir) + public IndexCommitAnonymousClass2(Directory dir) { - this.outerInstance = outerInstance; this.dir = dir; } @@ -104,4 +98,4 @@ public override void Delete() public override int SegmentCount => 2; } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestIndexFileDeleter.cs b/src/Lucene.Net.Tests/Index/TestIndexFileDeleter.cs index 2febff8b3e..4544b363f7 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexFileDeleter.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexFileDeleter.cs @@ -59,7 +59,12 @@ public virtual void TestDeleteLeftoverFiles() mergePolicy.NoCFSRatio = 1.0; mergePolicy.MaxCFSSegmentSizeMB = double.PositiveInfinity; - IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(10).SetMergePolicy(mergePolicy).SetUseCompoundFile(true)); + IndexWriter writer = new IndexWriter( + dir, + NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)). + SetMaxBufferedDocs(10). + SetMergePolicy(mergePolicy).SetUseCompoundFile(true) + ); int i; for (i = 0; i < 35; i++) @@ -75,7 +80,11 @@ public virtual void TestDeleteLeftoverFiles() writer.Dispose(); // Delete one doc so we get a .del file: - writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES).SetUseCompoundFile(true)); + writer = new IndexWriter( + dir, + NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)). + SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES).SetUseCompoundFile(true) + ); Term searchTerm = new Term("id", "7"); writer.DeleteDocuments(searchTerm); writer.Dispose(); @@ -157,7 +166,7 @@ private static ISet DifFiles(string[] files1, string[] files2) { set2.Add(files2[x]); } - IEnumerator i1 = set1.GetEnumerator(); + using IEnumerator i1 = set1.GetEnumerator(); while (i1.MoveNext()) { string o = i1.Current; @@ -166,7 +175,7 @@ private static ISet DifFiles(string[] files1, string[] files2) extra.Add(o); } } - IEnumerator i2 = set2.GetEnumerator(); + using IEnumerator i2 = set2.GetEnumerator(); while (i2.MoveNext()) { string o = i2.Current; @@ -217,4 +226,4 @@ private void AddDoc(IndexWriter writer, int id) writer.AddDocument(doc); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestIndexReaderClose.cs b/src/Lucene.Net.Tests/Index/TestIndexReaderClose.cs index b6f195089a..0963d8c98c 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexReaderClose.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexReaderClose.cs @@ -35,7 +35,7 @@ public class TestIndexReaderClose : LuceneTestCase [Test] public virtual void TestCloseUnderException() { - int iters = 1000 + 1 + Random.nextInt(20); + int iters = 1000 + 1 + Random.Next(20); for (int j = 0; j < iters; j++) { Directory dir = NewDirectory(); @@ -46,7 +46,7 @@ public virtual void TestCloseUnderException() DirectoryReader open = DirectoryReader.Open(dir); bool throwOnClose = !Rarely(); AtomicReader wrap = SlowCompositeReaderWrapper.Wrap(open); - FilterAtomicReader reader = new FilterAtomicReaderAnonymousClass(this, wrap, throwOnClose); + FilterAtomicReader reader = new FilterAtomicReaderAnonymousClass(wrap, throwOnClose); //IList listeners = new JCG.List(); // LUCENENET: This list is unused (and was unused in Java) int listenerCount = Random.Next(20); AtomicInt32 count = new AtomicInt32(); @@ -87,7 +87,7 @@ public virtual void TestCloseUnderException() try { - var aaa = reader.Fields; + _ = reader.Fields; Assert.Fail("we are closed"); } catch (Exception ex) when (ex.IsAlreadyClosedException()) @@ -106,14 +106,11 @@ public virtual void TestCloseUnderException() private sealed class FilterAtomicReaderAnonymousClass : FilterAtomicReader { - private readonly TestIndexReaderClose outerInstance; + private readonly bool throwOnClose; - private bool throwOnClose; - - public FilterAtomicReaderAnonymousClass(TestIndexReaderClose outerInstance, AtomicReader wrap, bool throwOnClose) + public FilterAtomicReaderAnonymousClass(AtomicReader wrap, bool throwOnClose) : base(wrap) { - this.outerInstance = outerInstance; this.throwOnClose = throwOnClose; } @@ -129,7 +126,7 @@ protected internal override void DoClose() private sealed class CountListener : IReaderDisposedListener { - internal readonly AtomicInt32 count; + private readonly AtomicInt32 count; public CountListener(AtomicInt32 count) { @@ -150,4 +147,4 @@ public void OnDispose(IndexReader reader) } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriter.cs b/src/Lucene.Net.Tests/Index/TestIndexWriter.cs index 67e9f9f578..8cf738904c 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexWriter.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexWriter.cs @@ -85,9 +85,8 @@ namespace Lucene.Net.Index [TestFixture] public class TestIndexWriter : LuceneTestCase { - private static readonly FieldType storedTextType = new FieldType(TextField.TYPE_NOT_STORED); - #if FEATURE_INDEXWRITER_TESTS + private static readonly FieldType storedTextType = new FieldType(TextField.TYPE_NOT_STORED); [Test] public virtual void TestDocCount() @@ -155,11 +154,11 @@ public virtual void TestDocCount() /// /// LUCENENET specific - /// Changed from internal static method to private to remove + /// Changed from internal method to private to remove /// inter-dependencies between TestIndexWriter*.cs, TestAddIndexes.cs /// and TestDeletionPolicy.cs tests /// - private void AddDoc(IndexWriter writer) + private static void AddDoc(IndexWriter writer) { Document doc = new Document(); doc.Add(NewTextField("content", "aaa", Field.Store.NO)); @@ -168,11 +167,11 @@ private void AddDoc(IndexWriter writer) /// /// LUCENENET specific - /// Changed from internal static method to private to remove + /// Changed from internal method to private to remove /// inter-dependencies between TestIndexWriter*.cs, TestAddIndexes.cs /// and TestDeletionPolicy.cs tests /// - private void AddDocWithIndex(IndexWriter writer, int index) + private static void AddDocWithIndex(IndexWriter writer, int index) { Document doc = new Document(); doc.Add(NewField("content", "aaa " + index, storedTextType)); @@ -2935,4 +2934,4 @@ public virtual void TestClosingNRTReaderDoesNotCorruptYourIndex() } #endif } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterCommit.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterCommit.cs index 230d6ecf6d..a3776e7648 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexWriterCommit.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexWriterCommit.cs @@ -111,7 +111,7 @@ public virtual void TestCommitOnClose() public virtual void TestCommitOnCloseAbort() { Directory dir = NewDirectory(); - IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(10)); + IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(10)); for (int i = 0; i < 14; i++) { AddDoc(writer); @@ -125,7 +125,9 @@ public virtual void TestCommitOnCloseAbort() Assert.AreEqual(14, hits.Length, "first number of hits"); reader.Dispose(); - writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(10)); + writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetOpenMode(OpenMode.APPEND) + .SetMaxBufferedDocs(10)); for (int j = 0; j < 17; j++) { AddDoc(writer); @@ -152,7 +154,9 @@ public virtual void TestCommitOnCloseAbort() // Now make sure we can re-open the index, add docs, // and all is good: - writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(10)); + writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetOpenMode(OpenMode.APPEND) + .SetMaxBufferedDocs(10)); // On abort, writer in fact may write to the same // segments_N file: @@ -225,7 +229,13 @@ public virtual void TestCommitOnCloseDiskUsage() }); } - IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMaxBufferedDocs(10).SetReaderPooling(false).SetMergePolicy(NewLogMergePolicy(10))); + IndexWriter writer = new IndexWriter( + dir, + NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer). + SetMaxBufferedDocs(10). + SetReaderPooling(false). + SetMergePolicy(NewLogMergePolicy(10)) + ); for (int j = 0; j < 30; j++) { AddDocWithIndex(writer, j); @@ -235,7 +245,15 @@ public virtual void TestCommitOnCloseDiskUsage() dir.TrackDiskUsage = true; long startDiskUsage = dir.MaxUsedSizeInBytes; - writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(10).SetMergeScheduler(new SerialMergeScheduler()).SetReaderPooling(false).SetMergePolicy(NewLogMergePolicy(10))); + writer = new IndexWriter( + dir, + NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer) + .SetOpenMode(OpenMode.APPEND) + .SetMaxBufferedDocs(10) + .SetMergeScheduler(new SerialMergeScheduler()) + .SetReaderPooling(false) + .SetMergePolicy(NewLogMergePolicy(10)) + ); for (int j = 0; j < 1470; j++) { AddDocWithIndex(writer, j); @@ -277,7 +295,12 @@ public virtual void TestCommitOnCloseForceMerge() { ((MockDirectoryWrapper)dir).PreventDoubleWrite = false; } - IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy(10))); + IndexWriter writer = new IndexWriter( + dir, + NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(10) + .SetMergePolicy(NewLogMergePolicy(10)) + ); for (int j = 0; j < 17; j++) { AddDocWithIndex(writer, j); @@ -338,7 +361,8 @@ public virtual void TestCommitThreadSafety() const int NUM_THREADS = 5; const double RUN_SEC = 0.5; var dir = NewDirectory(); - var w = new RandomIndexWriter(Random, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(NewLogMergePolicy())); + var w = new RandomIndexWriter(Random, dir, NewIndexWriterConfig( + TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(NewLogMergePolicy())); TestUtil.ReduceOpenFiles(w.IndexWriter); w.Commit(); var failed = new AtomicBoolean(); @@ -347,7 +371,7 @@ public virtual void TestCommitThreadSafety() for (int i = 0; i < NUM_THREADS; i++) { int finalI = i; - threads[i] = new ThreadAnonymousClass(dir, w, failed, endTime, finalI, NewStringField); + threads[i] = new ThreadAnonymousClass(dir, w, failed, endTime, finalI); threads[i].Start(); } for (int i = 0; i < NUM_THREADS; i++) @@ -361,21 +385,14 @@ public virtual void TestCommitThreadSafety() private sealed class ThreadAnonymousClass : ThreadJob { - private readonly Func newStringField; - private Directory dir; - private RandomIndexWriter w; - private AtomicBoolean failed; - private long endTime; - private int finalI; - - /// - /// LUCENENET specific - /// This is passed in because - /// is no longer static. - /// - public ThreadAnonymousClass(Directory dir, RandomIndexWriter w, AtomicBoolean failed, long endTime, int finalI, Func newStringField) + private readonly Directory dir; + private readonly RandomIndexWriter w; + private readonly AtomicBoolean failed; + private readonly long endTime; + private readonly int finalI; + + public ThreadAnonymousClass(Directory dir, RandomIndexWriter w, AtomicBoolean failed, long endTime, int finalI) { - this.newStringField = newStringField; this.dir = dir; this.w = w; this.failed = failed; @@ -389,7 +406,7 @@ public override void Run() { Document doc = new Document(); DirectoryReader r = DirectoryReader.Open(dir); - Field f = newStringField("f", "", Field.Store.NO); + Field f = NewStringField("f", "", Field.Store.NO); doc.Add(f); int count = 0; do @@ -416,7 +433,7 @@ public override void Run() } catch (Exception t) when (t.IsThrowable()) { - failed.Value = (true); + failed.Value = true; throw RuntimeException.Create(t); } } @@ -428,7 +445,12 @@ public virtual void TestForceCommit() { Directory dir = NewDirectory(); - IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy(5))); + IndexWriter writer = new IndexWriter( + dir, + NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(2) + .SetMergePolicy(NewLogMergePolicy(5)) + ); writer.Commit(); for (int i = 0; i < 23; i++) @@ -468,7 +490,8 @@ public virtual void TestFutureCommit() { Directory dir = NewDirectory(); - IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetIndexDeletionPolicy(NoDeletionPolicy.INSTANCE)); + IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetIndexDeletionPolicy(NoDeletionPolicy.INSTANCE)); Document doc = new Document(); w.AddDocument(doc); @@ -497,7 +520,9 @@ public virtual void TestFutureCommit() Assert.IsNotNull(commit); - w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetIndexDeletionPolicy(NoDeletionPolicy.INSTANCE).SetIndexCommit(commit)); + w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetIndexDeletionPolicy(NoDeletionPolicy.INSTANCE) + .SetIndexCommit(commit)); Assert.AreEqual(1, w.NumDocs); @@ -536,9 +561,7 @@ public virtual void TestZeroCommits() DirectoryReader.ListCommits(dir); Assert.Fail("listCommits should have thrown an exception over empty index"); } -#pragma warning disable 168 - catch (IndexNotFoundException e) -#pragma warning restore 168 + catch (IndexNotFoundException) { // that's expected ! } @@ -554,7 +577,12 @@ public virtual void TestPrepareCommit() { Directory dir = NewDirectory(); - IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy(5))); + IndexWriter writer = new IndexWriter( + dir, + NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(2) + .SetMergePolicy(NewLogMergePolicy(5)) + ); writer.Commit(); for (int i = 0; i < 23; i++) @@ -615,7 +643,12 @@ public virtual void TestPrepareCommitRollback() ((MockDirectoryWrapper)dir).PreventDoubleWrite = false; } - IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy(5))); + IndexWriter writer = new IndexWriter( + dir, + NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(2) + .SetMergePolicy(NewLogMergePolicy(5)) + ); writer.Commit(); for (int i = 0; i < 23; i++) @@ -686,7 +719,7 @@ public virtual void TestPrepareCommitNoChanges() public virtual void TestCommitUserData() { Directory dir = NewDirectory(); - IndexWriter w = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2)); + IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2)); for (int j = 0; j < 17; j++) { AddDoc(w); @@ -698,7 +731,7 @@ public virtual void TestCommitUserData() Assert.AreEqual(0, r.IndexCommit.UserData.Count); r.Dispose(); - w = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2)); + w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2)); for (int j = 0; j < 17; j++) { AddDoc(w); @@ -724,7 +757,7 @@ public virtual void TestCommitUserData() /// Copied from /// to remove inter-class dependency on /// - private void AddDoc(IndexWriter writer) + private static void AddDoc(IndexWriter writer) { Document doc = new Document(); doc.Add(NewTextField("content", "aaa", Field.Store.NO)); @@ -733,16 +766,15 @@ private void AddDoc(IndexWriter writer) /// /// LUCENENET specific - /// Copied from + /// Copied from /// to remove inter-class dependency on . /// - private void AddDocWithIndex(IndexWriter writer, int index) + private static void AddDocWithIndex(IndexWriter writer, int index) { Document doc = new Document(); doc.Add(NewField("content", "aaa " + index, storedTextType)); doc.Add(NewField("id", "" + index, storedTextType)); writer.AddDocument(doc); } - } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterConfig.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterConfig.cs index 97fb748ee2..9c5a9f6d4e 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexWriterConfig.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexWriterConfig.cs @@ -135,8 +135,8 @@ public virtual void TestSettersChaining() // IndexWriterConfig return type and second with LiveIndexWriterConfig. The ones // from LiveIndexWriterConfig are marked 'synthetic', so just collect them and // assert in the end that we also received them from IWC. - // In C# we do not have them marked synthetic so we look at the declaring type instead. - if (m.DeclaringType.Name == "LiveIndexWriterConfig") + // LUCENENET: In C# we do not have them marked synthetic so we look at the declaring type instead. + if (m.DeclaringType?.Name == "LiveIndexWriterConfig") { liveSetters.Add(m.Name); } @@ -158,7 +158,7 @@ public virtual void TestReuse() Directory dir = NewDirectory(); // test that IWC cannot be reused across two IWs IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, null); - (new RandomIndexWriter(Random, dir, conf)).Dispose(); + new RandomIndexWriter(Random, dir, conf).Dispose(); // this should fail try @@ -188,8 +188,8 @@ public virtual void TestReuse() // if it's cloned in advance, it should be ok conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, null); - (new RandomIndexWriter(Random, dir, (IndexWriterConfig)conf.Clone())).Dispose(); - (new RandomIndexWriter(Random, dir, (IndexWriterConfig)conf.Clone())).Dispose(); + new RandomIndexWriter(Random, dir, (IndexWriterConfig)conf.Clone()).Dispose(); + new RandomIndexWriter(Random, dir, (IndexWriterConfig)conf.Clone()).Dispose(); dir.Dispose(); } @@ -237,13 +237,13 @@ public virtual void TestConstants() [Test] public virtual void TestToString() { - string str = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).ToString(); - foreach (System.Reflection.FieldInfo f in (typeof(IndexWriterConfig).GetFields( + string str = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).ToString(); + foreach (System.Reflection.FieldInfo f in typeof(IndexWriterConfig).GetFields( BindingFlags.Instance | BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.DeclaredOnly | - BindingFlags.Static))) + BindingFlags.Static)) { if (f.IsStatic) { @@ -500,4 +500,4 @@ public virtual void TestLiveChangeToCFS() dir.Dispose(); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterDelete.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterDelete.cs index b60622718d..a64e9af0dc 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexWriterDelete.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexWriterDelete.cs @@ -15,8 +15,9 @@ using System.Text; using System.Threading; using JCG = J2N.Collections.Generic; -using Assert = Lucene.Net.TestFramework.Assert; using Console = Lucene.Net.Util.SystemConsole; +// ReSharper disable once RedundantUsingDirective - keep until we have an analyzer to look out for accidental NUnit asserts +using Assert = Lucene.Net.TestFramework.Assert; namespace Lucene.Net.Index { @@ -62,13 +63,14 @@ public class TestIndexWriterDelete : LuceneTestCase [Test] public virtual void TestSimpleCase() { - string[] keywords = new string[] { "1", "2" }; - string[] unindexed = new string[] { "Netherlands", "Italy" }; - string[] unstored = new string[] { "Amsterdam has lots of bridges", "Venice has lots of canals" }; - string[] text = new string[] { "Amsterdam", "Venice" }; + string[] keywords = { "1", "2" }; + string[] unindexed = { "Netherlands", "Italy" }; + string[] unstored = { "Amsterdam has lots of bridges", "Venice has lots of canals" }; + string[] text = { "Amsterdam", "Venice" }; Directory dir = NewDirectory(); - IndexWriter modifier = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)).SetMaxBufferedDeleteTerms(1)); + IndexWriter modifier = new IndexWriter(dir, NewIndexWriterConfig( + TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)).SetMaxBufferedDeleteTerms(1)); FieldType custom1 = new FieldType(); custom1.IsStored = true; @@ -110,9 +112,11 @@ public virtual void TestSimpleCase() public virtual void TestNonRAMDelete() { Directory dir = NewDirectory(); - IndexWriter modifier = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)).SetMaxBufferedDocs(2).SetMaxBufferedDeleteTerms(2)); + IndexWriter modifier = new IndexWriter(dir, NewIndexWriterConfig( + TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)) + .SetMaxBufferedDocs(2).SetMaxBufferedDeleteTerms(2)); int id = 0; - int value = 100; + const int value = 100; for (int i = 0; i < 7; i++) { @@ -144,7 +148,8 @@ public virtual void TestNonRAMDelete() public virtual void TestMaxBufferedDeletes() { Directory dir = NewDirectory(); - IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)).SetMaxBufferedDeleteTerms(1)); + IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig( + TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)).SetMaxBufferedDeleteTerms(1)); writer.AddDocument(new Document()); writer.DeleteDocuments(new Term("foobar", "1")); @@ -166,9 +171,11 @@ public virtual void TestRAMDeletes() Console.WriteLine("TEST: t=" + t); } Directory dir = NewDirectory(); - IndexWriter modifier = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)).SetMaxBufferedDocs(4).SetMaxBufferedDeleteTerms(4)); + IndexWriter modifier = new IndexWriter(dir, NewIndexWriterConfig( + TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)).SetMaxBufferedDocs(4) + .SetMaxBufferedDeleteTerms(4)); int id = 0; - int value = 100; + const int value = 100; AddDoc(modifier, ++id, value); if (0 == t) @@ -211,7 +218,9 @@ public virtual void TestRAMDeletes() public virtual void TestBothDeletes() { Directory dir = NewDirectory(); - IndexWriter modifier = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)).SetMaxBufferedDocs(100).SetMaxBufferedDeleteTerms(100)); + IndexWriter modifier = new IndexWriter(dir, NewIndexWriterConfig( + TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)).SetMaxBufferedDocs(100) + .SetMaxBufferedDeleteTerms(100)); int id = 0; int value = 100; @@ -248,10 +257,12 @@ public virtual void TestBothDeletes() public virtual void TestBatchDeletes() { Directory dir = NewDirectory(); - IndexWriter modifier = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)).SetMaxBufferedDocs(2).SetMaxBufferedDeleteTerms(2)); + IndexWriter modifier = new IndexWriter(dir, NewIndexWriterConfig( + TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)).SetMaxBufferedDocs(2) + .SetMaxBufferedDeleteTerms(2)); int id = 0; - int value = 100; + const int value = 100; for (int i = 0; i < 7; i++) { @@ -293,10 +304,12 @@ public virtual void TestBatchDeletes() public virtual void TestDeleteAll() { Directory dir = NewDirectory(); - IndexWriter modifier = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)).SetMaxBufferedDocs(2).SetMaxBufferedDeleteTerms(2)); + IndexWriter modifier = new IndexWriter(dir, NewIndexWriterConfig( + TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)).SetMaxBufferedDocs(2) + .SetMaxBufferedDeleteTerms(2)); int id = 0; - int value = 100; + const int value = 100; for (int i = 0; i < 7; i++) { @@ -347,12 +360,12 @@ public virtual void TestDeleteAllNoDeadLock() for (int i = 0; i < numThreads; i++) { int offset = i; - threads[i] = new ThreadAnonymousClass(this, modifier, latch, doneLatch, offset); + threads[i] = new ThreadAnonymousClass(modifier, latch, doneLatch, offset); threads[i].Start(); } latch.Signal(); //Wait for 1 millisecond - while (!doneLatch.Wait(new TimeSpan(0, 0, 0, 0, 1))) + while (!doneLatch.Wait(TimeSpan.FromMilliseconds(1))) { modifier.DeleteAll(); if (Verbose) @@ -379,16 +392,13 @@ public virtual void TestDeleteAllNoDeadLock() private sealed class ThreadAnonymousClass : ThreadJob { - private readonly TestIndexWriterDelete outerInstance; - private readonly RandomIndexWriter modifier; private readonly CountdownEvent latch; private readonly CountdownEvent doneLatch; private readonly int offset; - public ThreadAnonymousClass(TestIndexWriterDelete outerInstance, RandomIndexWriter modifier, CountdownEvent latch, CountdownEvent doneLatch, int offset) + public ThreadAnonymousClass(RandomIndexWriter modifier, CountdownEvent latch, CountdownEvent doneLatch, int offset) { - this.outerInstance = outerInstance; this.modifier = modifier; this.latch = latch; this.doneLatch = doneLatch; @@ -398,7 +408,7 @@ public ThreadAnonymousClass(TestIndexWriterDelete outerInstance, RandomIndexWrit public override void Run() { int id = offset * 1000; - int value = 100; + const int value = 100; try { latch.Wait(); @@ -439,10 +449,12 @@ public override void Run() public virtual void TestDeleteAllRollback() { Directory dir = NewDirectory(); - IndexWriter modifier = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)).SetMaxBufferedDocs(2).SetMaxBufferedDeleteTerms(2)); + IndexWriter modifier = new IndexWriter(dir, NewIndexWriterConfig( + TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)).SetMaxBufferedDocs(2) + .SetMaxBufferedDeleteTerms(2)); int id = 0; - int value = 100; + const int value = 100; for (int i = 0; i < 7; i++) { @@ -476,10 +488,12 @@ public virtual void TestDeleteAllRollback() public virtual void TestDeleteAllNRT() { Directory dir = NewDirectory(); - IndexWriter modifier = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)).SetMaxBufferedDocs(2).SetMaxBufferedDeleteTerms(2)); + IndexWriter modifier = new IndexWriter(dir, NewIndexWriterConfig( + TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)).SetMaxBufferedDocs(2) + .SetMaxBufferedDeleteTerms(2)); int id = 0; - int value = 100; + const int value = 100; for (int i = 0; i < 7; i++) { @@ -567,8 +581,8 @@ public virtual void TestUpdatesOnDiskFull() private void DoTestOperationsOnDiskFull(bool updates) { Term searchTerm = new Term("content", "aaa"); - int START_COUNT = 157; - int END_COUNT = 144; + const int START_COUNT = 157; + const int END_COUNT = 144; // First build up a starting index: MockDirectoryWrapper startDir = NewMockDirectory(); @@ -611,11 +625,8 @@ private void DoTestOperationsOnDiskFull(bool updates) .SetMaxBufferedDeleteTerms(1000) .SetMergeScheduler(new ConcurrentMergeScheduler()); - IConcurrentMergeScheduler scheduler = config.MergeScheduler as IConcurrentMergeScheduler; - if (scheduler != null) - { - scheduler.SetSuppressExceptions(); - } + // LUCENENET note: Original Java code does an unsafe cast here, so we should as well. Do not change to a safe type check. + ((IConcurrentMergeScheduler)config.MergeScheduler).SetSuppressExceptions(); IndexWriter modifier = new IndexWriter(dir, config); @@ -826,17 +837,20 @@ private void DoTestOperationsOnDiskFull(bool updates) [Test] public virtual void TestErrorAfterApplyDeletes() { - Failure failure = new FailureAnonymousClass(this); + Failure failure = new FailureAnonymousClass(); // create a couple of files - string[] keywords = new string[] { "1", "2" }; - string[] unindexed = new string[] { "Netherlands", "Italy" }; - string[] unstored = new string[] { "Amsterdam has lots of bridges", "Venice has lots of canals" }; - string[] text = new string[] { "Amsterdam", "Venice" }; + string[] keywords = { "1", "2" }; + string[] unindexed = { "Netherlands", "Italy" }; + string[] unstored = { "Amsterdam has lots of bridges", "Venice has lots of canals" }; + string[] text = { "Amsterdam", "Venice" }; MockDirectoryWrapper dir = NewMockDirectory(); - IndexWriter modifier = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)).SetMaxBufferedDeleteTerms(2).SetReaderPooling(false).SetMergePolicy(NewLogMergePolicy())); + IndexWriter modifier = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)) + .SetMaxBufferedDeleteTerms(2) + .SetReaderPooling(false) + .SetMergePolicy(NewLogMergePolicy())); MergePolicy lmp = modifier.Config.MergePolicy; lmp.NoCFSRatio = 1.0; @@ -893,7 +907,7 @@ public virtual void TestErrorAfterApplyDeletes() { Console.WriteLine("TEST: add empty doc"); } - Document doc_ = new Document(); + Document doc_ = new Document(); // LUCENENET: renamed to doc_ to avoid conflict with the local variable doc above modifier.AddDocument(doc_); // commit the changes, the buffered deletes, and the new doc @@ -941,18 +955,9 @@ public virtual void TestErrorAfterApplyDeletes() private sealed class FailureAnonymousClass : Failure { - private readonly TestIndexWriterDelete outerInstance; - - public FailureAnonymousClass(TestIndexWriterDelete outerInstance) - { - this.outerInstance = outerInstance; - sawMaybe = false; - failed = false; - } - - internal bool sawMaybe; - internal bool failed; - internal Thread thread; + private bool sawMaybe = false; + private bool failed = false; + private Thread thread; public override Failure Reset() { @@ -973,9 +978,9 @@ public override void Eval(MockDirectoryWrapper dir) { // LUCENENET specific: for these to work in release mode, we have added [MethodImpl(MethodImplOptions.NoInlining)] // to each possible target of the StackTraceHelper. If these change, so must the attribute on the target methods. - bool seen = + bool seen = StackTraceHelper.DoesStackTraceContainMethod("ApplyDeletesAndUpdates") || - StackTraceHelper.DoesStackTraceContainMethod("SlowFileExists"); + StackTraceHelper.DoesStackTraceContainMethod("SlowFileExists"); if (!seen) { @@ -1001,7 +1006,7 @@ public override void Eval(MockDirectoryWrapper dir) Console.WriteLine(Environment.StackTrace); } sawMaybe = true; - } + } } } } @@ -1011,14 +1016,14 @@ public override void Eval(MockDirectoryWrapper dir) [Test] public virtual void TestErrorInDocsWriterAdd() { - Failure failure = new FailureAnonymousClass2(this); + Failure failure = new FailureAnonymousClass2(); // create a couple of files - string[] keywords = new string[] { "1", "2" }; - string[] unindexed = new string[] { "Netherlands", "Italy" }; - string[] unstored = new string[] { "Amsterdam has lots of bridges", "Venice has lots of canals" }; - string[] text = new string[] { "Amsterdam", "Venice" }; + string[] keywords = { "1", "2" }; + string[] unindexed = { "Netherlands", "Italy" }; + string[] unstored = { "Amsterdam has lots of bridges", "Venice has lots of canals" }; + string[] text = { "Amsterdam", "Venice" }; MockDirectoryWrapper dir = NewMockDirectory(); IndexWriter modifier = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false))); @@ -1056,15 +1061,7 @@ public virtual void TestErrorInDocsWriterAdd() private sealed class FailureAnonymousClass2 : Failure { - private readonly TestIndexWriterDelete outerInstance; - - public FailureAnonymousClass2(TestIndexWriterDelete outerInstance) - { - this.outerInstance = outerInstance; - failed = false; - } - - internal bool failed; + private bool failed = false; public override Failure Reset() { @@ -1150,11 +1147,14 @@ public virtual void TestIndexingThenDeleting() Random r = Random; Directory dir = NewDirectory(); // note this test explicitly disables payloads - Analyzer analyzer = Analyzer.NewAnonymous(createComponents: (fieldName, reader) => + Analyzer analyzer = Analyzer.NewAnonymous(createComponents: (_, reader) => { return new TokenStreamComponents(new MockTokenizer(reader, MockTokenizer.WHITESPACE, true)); }); - IndexWriter w = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetRAMBufferSizeMB(1.0).SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH).SetMaxBufferedDeleteTerms(IndexWriterConfig.DISABLE_AUTO_FLUSH)); + IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer) + .SetRAMBufferSizeMB(1.0) + .SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH) + .SetMaxBufferedDeleteTerms(IndexWriterConfig.DISABLE_AUTO_FLUSH)); Document doc = new Document(); doc.Add(NewTextField("field", "go 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20", Field.Store.NO)); int num = AtLeast(3); @@ -1203,7 +1203,11 @@ public virtual void TestFlushPushedDeletesByRAM() // Cannot use RandomIndexWriter because we don't want to // ever call commit() for this test: // note: tiny rambuffer used, as with a 1MB buffer the test is too slow (flush @ 128,999) - IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetRAMBufferSizeMB(0.1f).SetMaxBufferedDocs(1000).SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES).SetReaderPooling(false)); + IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetRAMBufferSizeMB(0.1f) + .SetMaxBufferedDocs(1000) + .SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES) + .SetReaderPooling(false)); int count = 0; while (true) { @@ -1256,7 +1260,12 @@ public virtual void TestFlushPushedDeletesByCount() // Cannot use RandomIndexWriter because we don't want to // ever call commit() for this test: int flushAtDelCount = AtLeast(1020); - IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDeleteTerms(flushAtDelCount).SetMaxBufferedDocs(1000).SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES).SetReaderPooling(false)); + IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDeleteTerms(flushAtDelCount) + .SetMaxBufferedDocs(1000) + .SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH) + .SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES) + .SetReaderPooling(false)); int count = 0; while (true) { @@ -1304,7 +1313,11 @@ public virtual void TestApplyDeletesOnFlush() AtomicInt32 docsInSegment = new AtomicInt32(); AtomicBoolean closing = new AtomicBoolean(); AtomicBoolean sawAfterFlush = new AtomicBoolean(); - IndexWriter w = new IndexWriterAnonymousClass(this, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetRAMBufferSizeMB(0.5).SetMaxBufferedDocs(-1).SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES).SetReaderPooling(false), docsInSegment, closing, sawAfterFlush); + IndexWriter w = new IndexWriterAnonymousClass(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetRAMBufferSizeMB(0.5) + .SetMaxBufferedDocs(-1) + .SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES) + .SetReaderPooling(false), docsInSegment, closing, sawAfterFlush); int id = 0; while (true) { @@ -1333,7 +1346,7 @@ public virtual void TestApplyDeletesOnFlush() } id++; } - closing.Value = (true); + closing.Value = true; Assert.IsTrue(sawAfterFlush); w.Dispose(); dir.Dispose(); @@ -1341,16 +1354,13 @@ public virtual void TestApplyDeletesOnFlush() private sealed class IndexWriterAnonymousClass : IndexWriter { - private readonly TestIndexWriterDelete outerInstance; - private readonly AtomicInt32 docsInSegment; private readonly AtomicBoolean closing; private readonly AtomicBoolean sawAfterFlush; - public IndexWriterAnonymousClass(TestIndexWriterDelete outerInstance, Directory dir, IndexWriterConfig setReaderPooling, AtomicInt32 docsInSegment, AtomicBoolean closing, AtomicBoolean sawAfterFlush) + public IndexWriterAnonymousClass(Directory dir, IndexWriterConfig setReaderPooling, AtomicInt32 docsInSegment, AtomicBoolean closing, AtomicBoolean sawAfterFlush) : base(dir, setReaderPooling) { - this.outerInstance = outerInstance; this.docsInSegment = docsInSegment; this.closing = closing; this.sawAfterFlush = sawAfterFlush; @@ -1360,7 +1370,7 @@ protected override void DoAfterFlush() { Assert.IsTrue(closing || docsInSegment >= 7, "only " + docsInSegment + " in segment"); docsInSegment.Value = 0; - sawAfterFlush.Value = (true); + sawAfterFlush.Value = true; } } @@ -1441,4 +1451,4 @@ public virtual void TestTryDeleteDocument() d.Dispose(); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterExceptions.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterExceptions.cs index 9cd853ae35..e54ed5c0b6 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexWriterExceptions.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexWriterExceptions.cs @@ -12,6 +12,7 @@ using RandomizedTesting.Generators; using System; using System.Collections.Generic; +using System.Globalization; using System.IO; using System.Threading; using Assert = Lucene.Net.TestFramework.Assert; @@ -74,8 +75,8 @@ public class TestIndexWriterExceptions : LuceneTestCase { private class DocCopyIterator : IEnumerable { - internal readonly Document doc; - internal readonly int count; + private readonly Document doc; + private readonly int count; /* private field types */ /* private field types */ @@ -213,8 +214,8 @@ public override void Run() { Console.WriteLine(Thread.CurrentThread.Name + ": TEST: IndexerThread: cycle"); } - outerInstance.doFail.Value = (this.Instance); - string id = "" + r.Next(50); + outerInstance.doFail.Value = this.Instance; + string id = r.Next(50).ToString(CultureInfo.InvariantCulture); // LUCENENET: using InvariantCulture ToString overload instead of implicit `"" + r.Next(50)` idField.SetStringValue(id); Term idTerm = new Term("id", id); try @@ -256,7 +257,7 @@ public override void Run() break; } - outerInstance.doFail.Value = (null); + outerInstance.doFail.Value = null; // After a possible exception (above) I should be able // to add a new document without hitting an @@ -303,13 +304,14 @@ public void Apply(string name) if (Verbose) { Console.WriteLine(Thread.CurrentThread.Name + ": NOW FAIL: " + name); - Console.WriteLine((new Exception()).StackTrace); + Console.WriteLine(new Exception().StackTrace); } throw new TestPoint1Exception(Thread.CurrentThread.Name + ": intentionally failing at " + name); // LUCENENET TODO: Need to change this to RuntimeException once we add a custom (or flagged) exception that is created by RuntimeException.Create } } } + // LUCENENET specific exception type private class TestPoint1Exception : Exception, IRuntimeException { public TestPoint1Exception(string message) : base(message) @@ -507,7 +509,7 @@ public virtual void TestExceptionDocumentsWriterInit() public virtual void TestExceptionJustBeforeFlush() { Directory dir = NewDirectory(); - IndexWriter w = RandomIndexWriter.MockIndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2), new TestPoint1(this)); + IndexWriter w = RandomIndexWriter.MockIndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2), new TestPoint1(this)); Document doc = new Document(); doc.Add(NewTextField("field", "a field", Field.Store.YES)); w.AddDocument(doc); @@ -519,7 +521,7 @@ public virtual void TestExceptionJustBeforeFlush() return new TokenStreamComponents(tokenizer, new CrashingFilter(fieldName, tokenizer)); }, reuseStrategy: Analyzer.PER_FIELD_REUSE_STRATEGY); - Document crashDoc = new Document(); + Document crashDoc = new Document(); crashDoc.Add(NewTextField("crash", "do it on token 4", Field.Store.YES)); try { @@ -657,7 +659,7 @@ public TokenFilterAnonymousClass(MockTokenizer tokenizer) private int count; - public sealed override bool IncrementToken() + public override bool IncrementToken() { if (count++ == 5) { @@ -694,7 +696,7 @@ public override void Eval(MockDirectoryWrapper dir) { // LUCENENET specific: for these to work in release mode, we have added [MethodImpl(MethodImplOptions.NoInlining)] // to each possible target of the StackTraceHelper. If these change, so must the attribute on the target methods. - bool sawAppend = StackTraceHelper.DoesStackTraceContainMethod(typeof(FreqProxTermsWriterPerField).Name, "Flush"); + bool sawAppend = StackTraceHelper.DoesStackTraceContainMethod(nameof(FreqProxTermsWriterPerField), "Flush"); bool sawFlush = StackTraceHelper.DoesStackTraceContainMethod("Flush"); if (sawAppend && sawFlush && count++ >= 30) @@ -716,9 +718,10 @@ public virtual void TestDocumentsWriterAbort() failure.SetDoFail(); dir.FailOn(failure); - IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2)); + IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(2)); Document doc = new Document(); - string contents = "aa bb cc dd ee ff gg hh ii jj kk"; + const string contents = "aa bb cc dd ee ff gg hh ii jj kk"; doc.Add(NewTextField("content", contents, Field.Store.NO)); bool hitError = false; for (int i = 0; i < 200; i++) @@ -759,7 +762,8 @@ public virtual void TestDocumentsWriterExceptions() Console.WriteLine("TEST: cycle i=" + i); } Directory dir = NewDirectory(); - IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMergePolicy(NewLogMergePolicy())); + IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer) + .SetMergePolicy(NewLogMergePolicy())); // don't allow a sudden merge to clean up the deleted // doc below: @@ -824,7 +828,8 @@ public virtual void TestDocumentsWriterExceptions() } reader.Dispose(); - writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMaxBufferedDocs(10)); + writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer) + .SetMaxBufferedDocs(10)); doc = new Document(); doc.Add(NewField("contents", "here are some contents", DocCopyIterator.custom5)); for (int j = 0; j < 17; j++) @@ -870,7 +875,9 @@ public virtual void TestDocumentsWriterExceptionThreads() Directory dir = NewDirectory(); { - IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMaxBufferedDocs(-1).SetMergePolicy(Random.NextBoolean() ? NoMergePolicy.COMPOUND_FILES : NoMergePolicy.NO_COMPOUND_FILES)); + IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer) + .SetMaxBufferedDocs(-1) + .SetMergePolicy(Random.NextBoolean() ? NoMergePolicy.COMPOUND_FILES : NoMergePolicy.NO_COMPOUND_FILES)); // don't use a merge policy here they depend on the DWPThreadPool and its max thread states etc. int finalI = i; @@ -912,7 +919,8 @@ public virtual void TestDocumentsWriterExceptionThreads() Assert.AreEqual(NUM_THREAD * NUM_ITER, numDel); - IndexWriter indWriter = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMaxBufferedDocs(10)); + IndexWriter indWriter = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer) + .SetMaxBufferedDocs(10)); Document doc = new Document(); doc.Add(NewField("contents", "here are some contents", DocCopyIterator.custom5)); for (int j = 0; j < 17; j++) @@ -1009,8 +1017,7 @@ public override void Eval(MockDirectoryWrapper dir) { // LUCENENET specific: for these to work in release mode, we have added [MethodImpl(MethodImplOptions.NoInlining)] // to each possible target of the StackTraceHelper. If these change, so must the attribute on the target methods. - bool foundMethod = - StackTraceHelper.DoesStackTraceContainMethod(typeof(MockDirectoryWrapper).Name, "Sync"); + bool foundMethod = StackTraceHelper.DoesStackTraceContainMethod(nameof(MockDirectoryWrapper), "Sync"); if (m_doFail && foundMethod) { @@ -1095,9 +1102,9 @@ public override void Eval(MockDirectoryWrapper dir) { // LUCENENET specific: for these to work in release mode, we have added [MethodImpl(MethodImplOptions.NoInlining)] // to each possible target of the StackTraceHelper. If these change, so must the attribute on the target methods. - bool isCommit = StackTraceHelper.DoesStackTraceContainMethod(typeof(SegmentInfos).Name, stage); - bool isDelete = StackTraceHelper.DoesStackTraceContainMethod(typeof(MockDirectoryWrapper).Name, "DeleteFile"); - bool isInGlobalFieldMap = StackTraceHelper.DoesStackTraceContainMethod(typeof(SegmentInfos).Name, "WriteGlobalFieldMap"); + bool isCommit = StackTraceHelper.DoesStackTraceContainMethod(nameof(SegmentInfos), stage); + bool isDelete = StackTraceHelper.DoesStackTraceContainMethod(nameof(MockDirectoryWrapper), "DeleteFile"); + bool isInGlobalFieldMap = StackTraceHelper.DoesStackTraceContainMethod(nameof(SegmentInfos), "WriteGlobalFieldMap"); if (isInGlobalFieldMap && dontFailDuringGlobalFieldMap) { @@ -1122,7 +1129,11 @@ public override void Eval(MockDirectoryWrapper dir) [Test] public virtual void TestExceptionsDuringCommit() { - FailOnlyInCommit[] failures = new FailOnlyInCommit[] { new FailOnlyInCommit(false, FailOnlyInCommit.PREPARE_STAGE), new FailOnlyInCommit(true, FailOnlyInCommit.PREPARE_STAGE), new FailOnlyInCommit(false, FailOnlyInCommit.FINISH_STAGE) }; + FailOnlyInCommit[] failures = new FailOnlyInCommit[] { + new FailOnlyInCommit(false, FailOnlyInCommit.PREPARE_STAGE), + new FailOnlyInCommit(true, FailOnlyInCommit.PREPARE_STAGE), + new FailOnlyInCommit(false, FailOnlyInCommit.FINISH_STAGE) + }; foreach (FailOnlyInCommit failure in failures) { @@ -1176,11 +1187,7 @@ public virtual void TestForceMergeExceptions() } MockDirectoryWrapper dir = new MockDirectoryWrapper(Random, new RAMDirectory(startDir, NewIOContext(Random))); conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergeScheduler(new ConcurrentMergeScheduler()); - var scheduler = conf.MergeScheduler as IConcurrentMergeScheduler; - if (scheduler != null) - { - scheduler.SetSuppressExceptions(); - } + ((IConcurrentMergeScheduler)conf.MergeScheduler).SetSuppressExceptions(); w = new IndexWriter(dir, conf); dir.RandomIOExceptionRate = 0.5; try @@ -1207,7 +1214,8 @@ public virtual void TestOutOfMemoryErrorCausesCloseToFail() { AtomicBoolean thrown = new AtomicBoolean(false); Directory dir = NewDirectory(); - IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetInfoStream(new TOOMInfoStreamAnonymousClass(thrown))); + IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetInfoStream(new TOOMInfoStreamAnonymousClass(thrown))); try { @@ -1332,10 +1340,10 @@ public virtual void TestSegmentsChecksumError() Console.WriteLine(e.StackTrace); Assert.Fail("segmentInfos failed to retry fallback to correct segments_N file"); } - reader.Dispose(); + reader!.Dispose(); // LUCENENET [!]: using null suppression to match Java behavior // should remove the corrumpted segments_N - (new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, null))).Dispose(); + new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, null)).Dispose(); dir.Dispose(); } @@ -1403,7 +1411,9 @@ public virtual void TestSimulatedCorruptIndex2() dir.CheckIndexOnDispose = false; // we are corrupting it! IndexWriter writer = null; - writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(NewLogMergePolicy(true)).SetUseCompoundFile(true)); + writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMergePolicy(NewLogMergePolicy(true)) + .SetUseCompoundFile(true)); MergePolicy lmp = writer.Config.MergePolicy; // Force creation of CFS: lmp.NoCFSRatio = 1.0; @@ -1503,7 +1513,7 @@ public virtual void TestSimulatedCrashedWriter() { Assert.Fail("reader failed to open on a crashed index"); } - reader.Dispose(); + reader!.Dispose(); // LUCENENET [!]: using null suppression to match Java behavior try { @@ -1529,7 +1539,10 @@ public virtual void TestSimulatedCrashedWriter() [Test] public virtual void TestTermVectorExceptions() { - FailOnTermVectors[] failures = new FailOnTermVectors[] { new FailOnTermVectors(FailOnTermVectors.AFTER_INIT_STAGE), new FailOnTermVectors(FailOnTermVectors.INIT_STAGE) }; + FailOnTermVectors[] failures = new FailOnTermVectors[] { + new FailOnTermVectors(FailOnTermVectors.AFTER_INIT_STAGE), + new FailOnTermVectors(FailOnTermVectors.INIT_STAGE) + }; int num = AtLeast(1); for (int j = 0; j < num; j++) { @@ -1619,7 +1632,7 @@ public override void Eval(MockDirectoryWrapper dir) { // LUCENENET specific: for these to work in release mode, we have added [MethodImpl(MethodImplOptions.NoInlining)] // to each possible target of the StackTraceHelper. If these change, so must the attribute on the target methods. - bool fail = StackTraceHelper.DoesStackTraceContainMethod(typeof(TermVectorsConsumer).Name, stage); + bool fail = StackTraceHelper.DoesStackTraceContainMethod(nameof(TermVectorsConsumer), stage); if (fail) { @@ -1814,7 +1827,7 @@ public virtual void TestExceptionOnCtor() uoe.doFail = true; try { - new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, null)); + _ = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, null)); // LUCENENET: discard result Assert.Fail("should have gotten a UOE"); } catch (Exception expected) when (expected.IsUnsupportedOperationException()) @@ -2030,9 +2043,7 @@ public virtual void TestTooManyFileException() { iw = new IndexWriter(dir, iwc); } -#pragma warning disable 168 - catch (CorruptIndexException ex) -#pragma warning restore 168 + catch (CorruptIndexException) { // Exceptions are fine - we are running out of file handlers here continue; @@ -2059,7 +2070,6 @@ public virtual void TestTooManyFileException() private sealed class FailureAnonymousClass : Failure { - public override Failure Reset() { m_doFail = false; @@ -2125,7 +2135,7 @@ public virtual void TestNoLostDeletesOrUpdates() for (int i = 0; i < numDocs; i++) { Document doc = new Document(); - doc.Add(new StringField("id", (docBase + i).ToString(), Field.Store.NO)); + doc.Add(new StringField("id", (docBase + i).ToString(CultureInfo.InvariantCulture), Field.Store.NO)); if (DefaultCodecSupportsDocValues) { doc.Add(new NumericDocValuesField("f", 1L)); @@ -2143,7 +2153,7 @@ public virtual void TestNoLostDeletesOrUpdates() // TODO: we could also install an infoStream and try // to fail in "more evil" places inside BDS - shouldFail.Value = (true); + shouldFail.Value = true; bool doClose = false; try @@ -2163,20 +2173,20 @@ public virtual void TestNoLostDeletesOrUpdates() } if (Random.NextBoolean()) // update only numeric field { - w.UpdateNumericDocValue(new Term("id", (docBase + i).ToString()), "f", value); - w.UpdateNumericDocValue(new Term("id", (docBase + i).ToString()), "cf", value * 2); + w.UpdateNumericDocValue(new Term("id", (docBase + i).ToString(CultureInfo.InvariantCulture)), "f", value); + w.UpdateNumericDocValue(new Term("id", (docBase + i).ToString(CultureInfo.InvariantCulture)), "cf", value * 2); } else if (Random.NextBoolean()) { - w.UpdateBinaryDocValue(new Term("id", (docBase + i).ToString()), "bf", TestBinaryDocValuesUpdates.ToBytes(value)); - w.UpdateBinaryDocValue(new Term("id", (docBase + i).ToString()), "bcf", TestBinaryDocValuesUpdates.ToBytes(value * 2)); + w.UpdateBinaryDocValue(new Term("id", (docBase + i).ToString(CultureInfo.InvariantCulture)), "bf", TestBinaryDocValuesUpdates.ToBytes(value)); + w.UpdateBinaryDocValue(new Term("id", (docBase + i).ToString(CultureInfo.InvariantCulture)), "bcf", TestBinaryDocValuesUpdates.ToBytes(value * 2)); } else { - w.UpdateNumericDocValue(new Term("id", (docBase + i).ToString()), "f", value); - w.UpdateNumericDocValue(new Term("id", (docBase + i).ToString()), "cf", value * 2); - w.UpdateBinaryDocValue(new Term("id", (docBase + i).ToString()), "bf", TestBinaryDocValuesUpdates.ToBytes(value)); - w.UpdateBinaryDocValue(new Term("id", (docBase + i).ToString()), "bcf", TestBinaryDocValuesUpdates.ToBytes(value * 2)); + w.UpdateNumericDocValue(new Term("id", (docBase + i).ToString(CultureInfo.InvariantCulture)), "f", value); + w.UpdateNumericDocValue(new Term("id", (docBase + i).ToString(CultureInfo.InvariantCulture)), "cf", value * 2); + w.UpdateBinaryDocValue(new Term("id", (docBase + i).ToString(CultureInfo.InvariantCulture)), "bf", TestBinaryDocValuesUpdates.ToBytes(value)); + w.UpdateBinaryDocValue(new Term("id", (docBase + i).ToString(CultureInfo.InvariantCulture)), "bcf", TestBinaryDocValuesUpdates.ToBytes(value * 2)); } } @@ -2185,10 +2195,10 @@ public virtual void TestNoLostDeletesOrUpdates() { if (Verbose) { - Console.WriteLine(" delete id=" + (docBase + i).ToString()); + Console.WriteLine(" delete id=" + (docBase + i).ToString(CultureInfo.InvariantCulture)); } deleteCount++; - w.DeleteDocuments(new Term("id", "" + (docBase + i).ToString())); + w.DeleteDocuments(new Term("id", (docBase + i).ToString(CultureInfo.InvariantCulture))); // LUCENENET: using InvariantCulture ToString overload instead of implicit `"" +` conversion } } } @@ -2283,8 +2293,8 @@ public virtual void TestNoLostDeletesOrUpdates() { if (liveDocs is null || liveDocs.Get(i)) { - Assert.AreEqual(cf.Get(i), f.Get(i) * 2, "doc=" + (docBase + i).ToString()); - Assert.AreEqual(TestBinaryDocValuesUpdates.GetValue(bcf, i, scratch), TestBinaryDocValuesUpdates.GetValue(bf, i, scratch) * 2, "doc=" + (docBase + i).ToString()); + Assert.AreEqual(cf.Get(i), f.Get(i) * 2, "doc=" + (docBase + i)); + Assert.AreEqual(TestBinaryDocValuesUpdates.GetValue(bcf, i, scratch), TestBinaryDocValuesUpdates.GetValue(bf, i, scratch) * 2, "doc=" + (docBase + i)); } } } @@ -2351,9 +2361,9 @@ public override void Eval(MockDirectoryWrapper dir) if (Verbose) { Console.WriteLine("TEST: now fail; thread=" + Thread.CurrentThread.Name + " exc:"); - Console.WriteLine((new Exception()).StackTrace); + Console.WriteLine(new Exception().StackTrace); } - shouldFail.Value = (false); + shouldFail.Value = false; throw new FakeIOException(); } } @@ -2364,7 +2374,7 @@ private sealed class ConcurrentMergeSchedulerAnonymousClass : ConcurrentMergeSch protected override void HandleMergeException(Exception exc) { // suppress only FakeIOException: - if (!(exc is FakeIOException)) + if (exc is not FakeIOException) { base.HandleMergeException(exc); } @@ -2490,9 +2500,7 @@ public virtual void TestRandomExceptionDuringRollback() { iw.Rollback(); } -#pragma warning disable 168 - catch (FakeIOException expected) -#pragma warning restore 168 + catch (FakeIOException) { } @@ -2524,11 +2532,11 @@ public override void Eval(MockDirectoryWrapper dir) if (Verbose) { Console.WriteLine("TEST: now fail; thread=" + Thread.CurrentThread.Name + " exc:"); - Console.WriteLine((new Exception()).StackTrace); + Console.WriteLine(new Exception().StackTrace); } throw new FakeIOException(); } } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterForceMerge.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterForceMerge.cs index 880268e598..c7999f3932 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexWriterForceMerge.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexWriterForceMerge.cs @@ -1,7 +1,6 @@ using Lucene.Net.Documents; using Lucene.Net.Index.Extensions; using NUnit.Framework; -using System; using Assert = Lucene.Net.TestFramework.Assert; using Console = Lucene.Net.Util.SystemConsole; @@ -52,7 +51,10 @@ public virtual void TestPartialMerge() LogDocMergePolicy ldmp = new LogDocMergePolicy(); ldmp.MinMergeDocs = 1; ldmp.MergeFactor = 5; - IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(2).SetMergePolicy(ldmp)); + IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetOpenMode(OpenMode.CREATE) + .SetMaxBufferedDocs(2) + .SetMergePolicy(ldmp)); for (int j = 0; j < numDocs; j++) { writer.AddDocument(doc); @@ -65,7 +67,8 @@ public virtual void TestPartialMerge() ldmp = new LogDocMergePolicy(); ldmp.MergeFactor = 5; - writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(ldmp)); + writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMergePolicy(ldmp)); writer.ForceMerge(3); writer.Dispose(); @@ -147,7 +150,9 @@ public virtual void TestMaxNumSegments2() public virtual void TestForceMergeTempSpaceUsage() { MockDirectoryWrapper dir = NewMockDirectory(); - IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy())); + IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(10) + .SetMergePolicy(NewLogMergePolicy())); if (Verbose) { Console.WriteLine("TEST: config1=" + writer.Config); @@ -185,7 +190,10 @@ public virtual void TestForceMergeTempSpaceUsage() // Import to use same term index interval else a // smaller one here could increase the disk usage and // cause a false failure: - writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetTermIndexInterval(termIndexInterval).SetMergePolicy(NewLogMergePolicy())); + writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetOpenMode(OpenMode.APPEND) + .SetTermIndexInterval(termIndexInterval) + .SetMergePolicy(NewLogMergePolicy())); writer.ForceMerge(1); writer.Dispose(); long maxDiskUsage = dir.MaxUsedSizeInBytes; @@ -202,7 +210,10 @@ public virtual void TestBackgroundForceMerge() Directory dir = NewDirectory(); for (int pass = 0; pass < 2; pass++) { - IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy(51))); + IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetOpenMode(OpenMode.CREATE) + .SetMaxBufferedDocs(2) + .SetMergePolicy(NewLogMergePolicy(51))); Document doc = new Document(); doc.Add(NewStringField("field", "aaa", Field.Store.NO)); for (int i = 0; i < 100; i++) @@ -242,23 +253,15 @@ public virtual void TestBackgroundForceMerge() /// /// LUCENENET specific /// - /// Copied from + /// Copied from /// to remove inter-class dependency on TestIndexWriter. /// - private void AddDoc(IndexWriter writer) - { - Document doc = new Document(); - doc.Add(NewTextField("content", "aaa", Field.Store.NO)); - writer.AddDocument(doc); - } - - private void AddDocWithIndex(IndexWriter writer, int index) + private static void AddDocWithIndex(IndexWriter writer, int index) { Document doc = new Document(); doc.Add(NewField("content", "aaa " + index, storedTextType)); doc.Add(NewField("id", "" + index, storedTextType)); writer.AddDocument(doc); } - } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterLockRelease.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterLockRelease.cs index 4c22c52ea0..daa1e04245 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexWriterLockRelease.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexWriterLockRelease.cs @@ -1,7 +1,6 @@ using Lucene.Net.Index.Extensions; using NUnit.Framework; using System; -using System.IO; namespace Lucene.Net.Index { @@ -40,13 +39,15 @@ public virtual void TestIndexWriterLockRelease_Mem() Directory dir = NewFSDirectory(CreateTempDir("testLockRelease")); try { - new IndexWriter(dir, (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetOpenMode(OpenMode.APPEND)); + // LUCENENET: discard the result + _ = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND)); } catch (Exception e) when (e.IsNoSuchFileExceptionOrFileNotFoundException()) { try { - new IndexWriter(dir, (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetOpenMode(OpenMode.APPEND)); + // LUCENENET: discard the result + _ = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND)); } catch (Exception e1) when (e1.IsNoSuchFileExceptionOrFileNotFoundException()) { @@ -58,4 +59,4 @@ public virtual void TestIndexWriterLockRelease_Mem() } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterMergePolicy.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterMergePolicy.cs index 6440363205..b43b3d54ea 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexWriterMergePolicy.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexWriterMergePolicy.cs @@ -38,7 +38,9 @@ public virtual void TestNormalCase() { Directory dir = NewDirectory(); - IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(10).SetMergePolicy(new LogDocMergePolicy())); + IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(10) + .SetMergePolicy(new LogDocMergePolicy())); for (int i = 0; i < 100; i++) { @@ -56,7 +58,9 @@ public virtual void TestNoOverMerge() { Directory dir = NewDirectory(); - IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(10).SetMergePolicy(new LogDocMergePolicy())); + IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(10) + .SetMergePolicy(new LogDocMergePolicy())); bool noOverMerge = false; for (int i = 0; i < 100; i++) @@ -83,7 +87,9 @@ public virtual void TestForceFlush() LogDocMergePolicy mp = new LogDocMergePolicy(); mp.MinMergeDocs = 100; mp.MergeFactor = 10; - IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(10).SetMergePolicy(mp)); + IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(10) + .SetMergePolicy(mp)); for (int i = 0; i < 100; i++) { @@ -92,7 +98,10 @@ public virtual void TestForceFlush() mp = new LogDocMergePolicy(); mp.MergeFactor = 10; - writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(10).SetMergePolicy(mp)); + writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetOpenMode(OpenMode.APPEND) + .SetMaxBufferedDocs(10) + .SetMergePolicy(mp)); mp.MinMergeDocs = 100; CheckInvariants(writer); } @@ -107,7 +116,10 @@ public virtual void TestMergeFactorChange() { Directory dir = NewDirectory(); - IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy()).SetMergeScheduler(new SerialMergeScheduler())); + IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(10) + .SetMergePolicy(NewLogMergePolicy()) + .SetMergeScheduler(new SerialMergeScheduler())); for (int i = 0; i < 250; i++) { @@ -135,7 +147,10 @@ public virtual void TestMaxBufferedDocsChange() { Directory dir = NewDirectory(); - IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(101).SetMergePolicy(new LogDocMergePolicy()).SetMergeScheduler(new SerialMergeScheduler())); + IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(101) + .SetMergePolicy(new LogDocMergePolicy()) + .SetMergeScheduler(new SerialMergeScheduler())); // leftmost* segment has 1 doc // rightmost* segment has 100 docs @@ -148,13 +163,21 @@ public virtual void TestMaxBufferedDocsChange() } writer.Dispose(); - writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(101).SetMergePolicy(new LogDocMergePolicy()).SetMergeScheduler(new SerialMergeScheduler())); + writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetOpenMode(OpenMode.APPEND) + .SetMaxBufferedDocs(101) + .SetMergePolicy(new LogDocMergePolicy()) + .SetMergeScheduler(new SerialMergeScheduler())); } writer.Dispose(); LogDocMergePolicy ldmp = new LogDocMergePolicy(); ldmp.MergeFactor = 10; - writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(10).SetMergePolicy(ldmp).SetMergeScheduler(new SerialMergeScheduler())); + writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetOpenMode(OpenMode.APPEND) + .SetMaxBufferedDocs(10) + .SetMergePolicy(ldmp) + .SetMergeScheduler(new SerialMergeScheduler())); // merge policy only fixes segments on levels where merges // have been triggered, so check invariants after all adds @@ -185,7 +208,9 @@ public virtual void TestMergeDocCount0() LogDocMergePolicy ldmp = new LogDocMergePolicy(); ldmp.MergeFactor = 100; - IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(10).SetMergePolicy(ldmp)); + IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(10) + .SetMergePolicy(ldmp)); for (int i = 0; i < 250; i++) { @@ -195,7 +220,8 @@ public virtual void TestMergeDocCount0() writer.Dispose(); // delete some docs without merging - writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES)); + writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES)); writer.DeleteDocuments(new Term("content", "aaa")); writer.Dispose(); @@ -308,4 +334,4 @@ private void AssertSetters(MergePolicy lmp) // TODO: Add more checks for other non-double setters! } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterMerging.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterMerging.cs index 576939dd1f..b8e5a6658b 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexWriterMerging.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexWriterMerging.cs @@ -49,7 +49,7 @@ public class TestIndexWriterMerging : LuceneTestCase [Test] public virtual void TestLucene() { - int num = 100; + const int num = 100; Directory indexA = NewDirectory(); Directory indexB = NewDirectory(); @@ -70,7 +70,8 @@ public virtual void TestLucene() Directory merged = NewDirectory(); - IndexWriter writer = new IndexWriter(merged, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(NewLogMergePolicy(2))); + IndexWriter writer = new IndexWriter(merged, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMergePolicy(NewLogMergePolicy(2))); writer.AddIndexes(indexA, indexB); writer.ForceMerge(1); writer.Dispose(); @@ -106,7 +107,10 @@ private bool VerifyIndex(Directory directory, int startAt) private void FillIndex(Random random, Directory dir, int start, int numDocs) { - IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy(2))); + IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)) + .SetOpenMode(OpenMode.CREATE) + .SetMaxBufferedDocs(2) + .SetMergePolicy(NewLogMergePolicy(2))); for (int i = start; i < (start + numDocs); i++) { @@ -124,7 +128,9 @@ private void FillIndex(Random random, Directory dir, int start, int numDocs) public virtual void TestForceMergeDeletes() { Directory dir = NewDirectory(); - IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2).SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH)); + IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(2) + .SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH)); Document document = new Document(); FieldType customType = new FieldType(); @@ -154,7 +160,8 @@ public virtual void TestForceMergeDeletes() Assert.AreEqual(10, ir.NumDocs); ir.Dispose(); - IndexWriterConfig dontMergeConfig = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMergePolicy(NoMergePolicy.COMPOUND_FILES); + IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMergePolicy(NoMergePolicy.COMPOUND_FILES); writer = new IndexWriter(dir, dontMergeConfig); writer.DeleteDocuments(new Term("id", "0")); writer.DeleteDocuments(new Term("id", "7")); @@ -182,7 +189,10 @@ public virtual void TestForceMergeDeletes() public virtual void TestForceMergeDeletes2() { Directory dir = NewDirectory(); - IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2).SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).SetMergePolicy(NewLogMergePolicy(50))); + IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(2) + .SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH) + .SetMergePolicy(NewLogMergePolicy(50))); Document document = new Document(); @@ -213,7 +223,8 @@ public virtual void TestForceMergeDeletes2() Assert.AreEqual(98, ir.NumDocs); ir.Dispose(); - IndexWriterConfig dontMergeConfig = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMergePolicy(NoMergePolicy.COMPOUND_FILES); + IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMergePolicy(NoMergePolicy.COMPOUND_FILES); writer = new IndexWriter(dir, dontMergeConfig); for (int i = 0; i < 98; i += 2) { @@ -225,7 +236,8 @@ public virtual void TestForceMergeDeletes2() Assert.AreEqual(49, ir.NumDocs); ir.Dispose(); - writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(NewLogMergePolicy(3))); + writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMergePolicy(NewLogMergePolicy(3))); Assert.AreEqual(49, writer.NumDocs); writer.ForceMergeDeletes(); writer.Dispose(); @@ -243,7 +255,10 @@ public virtual void TestForceMergeDeletes2() public virtual void TestForceMergeDeletes3() { Directory dir = NewDirectory(); - IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2).SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH).SetMergePolicy(NewLogMergePolicy(50))); + IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(2) + .SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH) + .SetMergePolicy(NewLogMergePolicy(50))); FieldType customType = new FieldType(); customType.IsStored = true; @@ -273,7 +288,8 @@ public virtual void TestForceMergeDeletes3() Assert.AreEqual(98, ir.NumDocs); ir.Dispose(); - IndexWriterConfig dontMergeConfig = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMergePolicy(NoMergePolicy.COMPOUND_FILES); + IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMergePolicy(NoMergePolicy.COMPOUND_FILES); writer = new IndexWriter(dir, dontMergeConfig); for (int i = 0; i < 98; i += 2) { @@ -298,13 +314,6 @@ public virtual void TestForceMergeDeletes3() // merging a segment with >= 20 (maxMergeDocs) docs private class MyMergeScheduler : MergeScheduler { - private readonly TestIndexWriterMerging outerInstance; - - public MyMergeScheduler(TestIndexWriterMerging outerInstance) - { - this.outerInstance = outerInstance; - } - public override void Merge(IndexWriter writer, MergeTrigger trigger, bool newMergesFound) { UninterruptableMonitor.Enter(this); @@ -340,7 +349,7 @@ protected override void Dispose(bool disposing) public virtual void TestSetMaxMergeDocs() { Directory dir = NewDirectory(); - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergeScheduler(new MyMergeScheduler(this)).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy()); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergeScheduler(new MyMergeScheduler()).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy()); LogMergePolicy lmp = (LogMergePolicy)conf.MergePolicy; lmp.MaxMergeDocs = 20; lmp.MergeFactor = 2; @@ -379,7 +388,10 @@ public virtual void TestNoWaitClose() Console.WriteLine("TEST: pass=" + pass); } - IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(2).SetMergePolicy(NewLogMergePolicy()); + IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetOpenMode(OpenMode.CREATE) + .SetMaxBufferedDocs(2) + .SetMergePolicy(NewLogMergePolicy()); if (pass == 2) { conf.SetMergeScheduler(new SerialMergeScheduler()); @@ -413,7 +425,7 @@ public virtual void TestNoWaitClose() IndexWriter finalWriter = writer; IList failure = new JCG.List(); - ThreadJob t1 = new ThreadAnonymousClass(this, doc, finalWriter, failure); + ThreadJob t1 = new ThreadAnonymousClass(doc, finalWriter, failure); if (failure.Count > 0) { @@ -430,7 +442,9 @@ public virtual void TestNoWaitClose() reader.Dispose(); // Reopen - writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.APPEND).SetMergePolicy(NewLogMergePolicy())); + writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetOpenMode(OpenMode.APPEND) + .SetMergePolicy(NewLogMergePolicy())); } writer.Dispose(); } @@ -440,15 +454,12 @@ public virtual void TestNoWaitClose() private sealed class ThreadAnonymousClass : ThreadJob { - private readonly TestIndexWriterMerging outerInstance; - - private Document doc; - private IndexWriter finalWriter; - private IList failure; + private readonly Document doc; + private readonly IndexWriter finalWriter; + private readonly IList failure; - public ThreadAnonymousClass(TestIndexWriterMerging outerInstance, Document doc, IndexWriter finalWriter, IList failure) + public ThreadAnonymousClass(Document doc, IndexWriter finalWriter, IList failure) { - this.outerInstance = outerInstance; this.doc = doc; this.finalWriter = finalWriter; this.failure = failure; @@ -470,9 +481,7 @@ public override void Run() done = true; break; } -#pragma warning disable 168 - catch (NullReferenceException e) // LUCENENET TODO: We should fix the components so this cannot occur (assuming it can). -#pragma warning restore 168 + catch (NullReferenceException) // LUCENENET TODO: We should fix the components so this cannot occur (assuming it can). { done = true; break; @@ -490,4 +499,4 @@ public override void Run() } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterOnDiskFull.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterOnDiskFull.cs index 063f42f8cc..e8dcb6c64c 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexWriterOnDiskFull.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexWriterOnDiskFull.cs @@ -573,7 +573,7 @@ public override void Eval(MockDirectoryWrapper dir) // LUCENENET specific: for these to work in release mode, we have added [MethodImpl(MethodImplOptions.NoInlining)] // to each possible target of the StackTraceHelper. If these change, so must the attribute on the target methods. - if (StackTraceHelper.DoesStackTraceContainMethod(typeof(SegmentMerger).Name, "MergeTerms") && !didFail1) + if (StackTraceHelper.DoesStackTraceContainMethod(nameof(SegmentMerger), "MergeTerms") && !didFail1) { didFail1 = true; throw new IOException("fake disk full during mergeTerms"); @@ -581,7 +581,7 @@ public override void Eval(MockDirectoryWrapper dir) // LUCENENET specific: for these to work in release mode, we have added [MethodImpl(MethodImplOptions.NoInlining)] // to each possible target of the StackTraceHelper. If these change, so must the attribute on the target methods. - if (StackTraceHelper.DoesStackTraceContainMethod(typeof(LiveDocsFormat).Name, "WriteLiveDocs") && !didFail2) + if (StackTraceHelper.DoesStackTraceContainMethod(nameof(LiveDocsFormat), "WriteLiveDocs") && !didFail2) { didFail2 = true; throw new IOException("fake disk full while writing LiveDocs"); @@ -704,4 +704,4 @@ private void AddDocWithIndex(IndexWriter writer, int index) writer.AddDocument(doc); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterOnJRECrash.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterOnJRECrash.cs index 0b4a47c994..32e1f4c765 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexWriterOnJRECrash.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexWriterOnJRECrash.cs @@ -192,6 +192,7 @@ public Process ForkTest(string tempDir, string tempProcessToKillFile) return p; } +#region LUCENENET-specific methods for ForkTest private static string TestRunParameter(string name, string value) { // See: https://github.com/microsoft/vstest/issues/862#issuecomment-621737720 @@ -204,7 +205,7 @@ private static string Escape(string value) private const string BackSlash = "\\"; private const string Space = " "; - private TextWriter BeginOutput(Process p, out ThreadJob stdOutPumper, out ThreadJob stdErrPumper) + private static TextWriter BeginOutput(Process p, out ThreadJob stdOutPumper, out ThreadJob stdErrPumper) { // We pump everything to stderr. TextWriter childOut = Console.Error; @@ -214,7 +215,7 @@ private TextWriter BeginOutput(Process p, out ThreadJob stdOutPumper, out Thread return childOut; } - private void EndOutput(Process p, TextWriter childOut, ThreadJob stdOutPumper, ThreadJob stdErrPumper) + private static void EndOutput(Process p, TextWriter childOut, ThreadJob stdOutPumper, ThreadJob stdErrPumper) { p.WaitForExit(10000); stdOutPumper.Join(); @@ -224,16 +225,17 @@ private void EndOutput(Process p, TextWriter childOut, ThreadJob stdOutPumper, T private string GetTargetFramework() { - var targetFrameworkAttribute = GetType().Assembly.GetAttributes(inherit: false).Where(a => a.Key == "TargetFramework").FirstOrDefault(); + var targetFrameworkAttribute = GetType().Assembly.GetAttributes(inherit: false).FirstOrDefault(a => a.Key == "TargetFramework"); if (targetFrameworkAttribute is null) Assert.Fail("TargetFramework metadata not found in this assembly."); return targetFrameworkAttribute.Value; } - private string GetTargetPlatform() + private static string GetTargetPlatform() { return Environment.Is64BitProcess ? "x64" : "x86"; } +#endregion /// /// A pipe thread. It'd be nice to reuse guava's implementation for this... @@ -248,8 +250,8 @@ public static ThreadJob Start(TextReader from, TextWriter to) private sealed class ThreadPumperAnonymousClass : ThreadJob { - private TextReader from; - private TextWriter to; + private readonly TextReader from; + private readonly TextWriter to; public ThreadPumperAnonymousClass(TextReader from, TextWriter to) { @@ -334,7 +336,7 @@ public virtual bool CheckIndexes(FileSystemInfo file) } // LUCENENET: Wait for our test to spin up and log its PID so we can kill it. - private int WaitForProcessToKillLogFile(string processToKillFile) + private static int WaitForProcessToKillLogFile(string processToKillFile) { bool exists = false; Thread.Sleep(500); diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterOutOfFileDescriptors.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterOutOfFileDescriptors.cs index 1af20cb86f..b516d2165d 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexWriterOutOfFileDescriptors.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexWriterOutOfFileDescriptors.cs @@ -1,6 +1,5 @@ using System; using System.Collections.Generic; -using System.IO; using NUnit.Framework; using JCG = J2N.Collections.Generic; using Assert = Lucene.Net.TestFramework.Assert; @@ -195,4 +194,4 @@ public virtual void Test() dir.Dispose(); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterReader.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterReader.cs index c969c0f057..a0e8334f78 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexWriterReader.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexWriterReader.cs @@ -1,11 +1,14 @@ -using J2N.Threading; +using Lucene.Net.Search; +using Lucene.Net.Util; +#if FEATURE_INDEXWRITER_TESTS +using J2N.Threading; using J2N.Threading.Atomic; -using Lucene.Net.Attributes; +using Lucene.Net.Analysis; +using Lucene.Net.Codecs; using Lucene.Net.Documents; using Lucene.Net.Index.Extensions; using Lucene.Net.Store; using Lucene.Net.Support.Threading; -using Lucene.Net.Util; using NUnit.Framework; using RandomizedTesting.Generators; using System; @@ -14,6 +17,7 @@ using JCG = J2N.Collections.Generic; using Assert = Lucene.Net.TestFramework.Assert; using Console = Lucene.Net.Util.SystemConsole; +#endif namespace Lucene.Net.Index { @@ -34,29 +38,12 @@ namespace Lucene.Net.Index * limitations under the License. */ - using BytesRef = Lucene.Net.Util.BytesRef; - using Codec = Lucene.Net.Codecs.Codec; - using Directory = Lucene.Net.Store.Directory; - using DocIdSetIterator = Lucene.Net.Search.DocIdSetIterator; - using Document = Documents.Document; - using FakeIOException = Lucene.Net.Store.FakeIOException; - using Field = Field; - using IndexSearcher = Lucene.Net.Search.IndexSearcher; - using InfoStream = Lucene.Net.Util.InfoStream; - using LuceneTestCase = Lucene.Net.Util.LuceneTestCase; - using MockAnalyzer = Lucene.Net.Analysis.MockAnalyzer; - using MockDirectoryWrapper = Lucene.Net.Store.MockDirectoryWrapper; - using Query = Lucene.Net.Search.Query; - using RAMDirectory = Lucene.Net.Store.RAMDirectory; - using TermQuery = Lucene.Net.Search.TermQuery; - using TestUtil = Lucene.Net.Util.TestUtil; - using TextField = TextField; - using TopDocs = Lucene.Net.Search.TopDocs; - [TestFixture] public class TestIndexWriterReader : LuceneTestCase { +#if FEATURE_INDEXWRITER_TESTS private readonly int numThreads = TestNightly ? 5 : 3; +#endif public static int Count(Term t, IndexReader r) { @@ -67,7 +54,7 @@ public static int Count(Term t, IndexReader r) { while (td.NextDoc() != DocIdSetIterator.NO_MORE_DOCS) { - var _ = td.DocID; + _ = td.DocID; count++; } } @@ -700,8 +687,6 @@ public virtual void DoTestIndexWriterReopenSegment(bool doFullMerge) dir1.Dispose(); } -#endif - /* * Delete a document by term and return the doc id * @@ -724,7 +709,9 @@ public void CreateIndex(Random random, Directory dir1, string indexName, bool mu } w.Dispose(); } +#endif + // ReSharper disable once UnusedMember.Global - used in J-S test project, not in I-J public static void CreateIndexNoClose(bool multiSegment, string indexName, IndexWriter w) { for (int i = 0; i < 100; i++) @@ -738,7 +725,6 @@ public static void CreateIndexNoClose(bool multiSegment, string indexName, Index } #if FEATURE_INDEXWRITER_TESTS - private class MyWarmer : IndexWriter.IndexReaderWarmer { internal int warmCount; @@ -873,7 +859,7 @@ public virtual void TestDuringAddIndexes() Directory dir1 = GetAssertNoDeletesDirectory(NewDirectory()); IndexWriter writer = new IndexWriter( - dir1, + dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) .SetMergePolicy(NewLogMergePolicy(2))); @@ -1265,14 +1251,14 @@ public virtual void TestNoTermsIndex() // Don't proceed if picked Codec is in the list of illegal ones. string format = TestUtil.GetPostingsFormat("f"); - AssumeFalse("Format: " + format + " does not support ReaderTermsIndexDivisor!", - (format.Equals("FSTPulsing41", StringComparison.Ordinal) || - format.Equals("FSTOrdPulsing41", StringComparison.Ordinal) || - format.Equals("FST41", StringComparison.Ordinal) || - format.Equals("FSTOrd41", StringComparison.Ordinal) || - format.Equals("SimpleText", StringComparison.Ordinal) || - format.Equals("Memory", StringComparison.Ordinal) || - format.Equals("MockRandom", StringComparison.Ordinal) || + AssumeFalse("Format: " + format + " does not support ReaderTermsIndexDivisor!", + (format.Equals("FSTPulsing41", StringComparison.Ordinal) || + format.Equals("FSTOrdPulsing41", StringComparison.Ordinal) || + format.Equals("FST41", StringComparison.Ordinal) || + format.Equals("FSTOrd41", StringComparison.Ordinal) || + format.Equals("SimpleText", StringComparison.Ordinal) || + format.Equals("Memory", StringComparison.Ordinal) || + format.Equals("MockRandom", StringComparison.Ordinal) || format.Equals("Direct", StringComparison.Ordinal))); Directory dir = NewDirectory(); @@ -1435,4 +1421,4 @@ public virtual void TestTooManySegments() } #endif } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterUnicode.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterUnicode.cs index 63f4416ce3..96d8350f4e 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexWriterUnicode.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexWriterUnicode.cs @@ -40,7 +40,32 @@ namespace Lucene.Net.Index [TestFixture] public class TestIndexWriterUnicode : LuceneTestCase { - internal readonly string[] utf8Data = new string[] { "ab\udc17cd", "ab\ufffdcd", "\udc17abcd", "\ufffdabcd", "\udc17", "\ufffd", "ab\udc17\udc17cd", "ab\ufffd\ufffdcd", "\udc17\udc17abcd", "\ufffd\ufffdabcd", "\udc17\udc17", "\ufffd\ufffd", "ab\ud917cd", "ab\ufffdcd", "\ud917abcd", "\ufffdabcd", "\ud917", "\ufffd", "ab\ud917\ud917cd", "ab\ufffd\ufffdcd", "\ud917\ud917abcd", "\ufffd\ufffdabcd", "\ud917\ud917", "\ufffd\ufffd", "ab\udc17\ud917cd", "ab\ufffd\ufffdcd", "\udc17\ud917abcd", "\ufffd\ufffdabcd", "\udc17\ud917", "\ufffd\ufffd", "ab\udc17\ud917\udc17\ud917cd", "ab\ufffd\ud917\udc17\ufffdcd", "\udc17\ud917\udc17\ud917abcd", "\ufffd\ud917\udc17\ufffdabcd", "\udc17\ud917\udc17\ud917", "\ufffd\ud917\udc17\ufffd" }; + internal readonly string[] utf8Data = new string[] + { + // unpaired low surrogate + "ab\udc17cd", "ab\ufffdcd", + "\udc17abcd", "\ufffdabcd", + "\udc17", "\ufffd", + "ab\udc17\udc17cd", "ab\ufffd\ufffdcd", + "\udc17\udc17abcd", "\ufffd\ufffdabcd", + "\udc17\udc17", "\ufffd\ufffd", + + // unpaired high surrogate + "ab\ud917cd", "ab\ufffdcd", + "\ud917abcd", "\ufffdabcd", + "\ud917", "\ufffd", + "ab\ud917\ud917cd", "ab\ufffd\ufffdcd", + "\ud917\ud917abcd", "\ufffd\ufffdabcd", + "\ud917\ud917", "\ufffd\ufffd", + + // backwards surrogates + "ab\udc17\ud917cd", "ab\ufffd\ufffdcd", + "\udc17\ud917abcd", "\ufffd\ufffdabcd", + "\udc17\ud917", "\ufffd\ufffd", + "ab\udc17\ud917\udc17\ud917cd", "ab\ufffd\ud917\udc17\ufffdcd", + "\udc17\ud917\udc17\ud917abcd", "\ufffd\ud917\udc17\ufffdabcd", + "\udc17\ud917\udc17\ud917", "\ufffd\ud917\udc17\ufffd" + }; private int NextInt(int lim) { @@ -58,8 +83,8 @@ private bool FillUnicode(char[] buffer, char[] expected, int offset, int count) bool hasIllegal = false; if (offset > 0 && buffer[offset] >= 0xdc00 && buffer[offset] < 0xe000) - // Don't start in the middle of a valid surrogate pair { + // Don't start in the middle of a valid surrogate pair offset--; } @@ -173,7 +198,7 @@ private void CheckTermsOrder(IndexReader r, ISet allTerms, bool isTop) } // Test seeking: - IEnumerator it = seenTerms.GetEnumerator(); + using IEnumerator it = seenTerms.GetEnumerator(); while (it.MoveNext()) { BytesRef tr = new BytesRef(it.Current); @@ -199,9 +224,7 @@ public virtual void TestRandomUnicodeStrings() UnicodeUtil.UTF16toUTF8(buffer, 0, 20, utf8); if (!hasIllegal) { -#pragma warning disable 612, 618 - var b = (new string(buffer, 0, 20)).GetBytes(IOUtils.CHARSET_UTF_8); -#pragma warning restore 612, 618 + var b = new string(buffer, 0, 20).GetBytes(Encoding.UTF8); Assert.AreEqual(b.Length, utf8.Length); for (int i = 0; i < b.Length; i++) { @@ -228,8 +251,8 @@ public virtual void TestAllUnicodeChars() for (int ch = 0; ch < 0x0010FFFF; ch++) { if (ch == 0xd800) - // Skip invalid code points { + // Skip invalid code points ch = 0xe000; } @@ -383,4 +406,4 @@ public virtual void TestTermUTF16SortOrder() dir.Dispose(); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestIndexWriterWithThreads.cs b/src/Lucene.Net.Tests/Index/TestIndexWriterWithThreads.cs index bae2bd50dd..66a23f8355 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexWriterWithThreads.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexWriterWithThreads.cs @@ -1,6 +1,5 @@ using J2N.Threading; using J2N.Threading.Atomic; -using Lucene.Net.Attributes; using Lucene.Net.Documents; using Lucene.Net.Index.Extensions; using Lucene.Net.Store; @@ -61,8 +60,6 @@ public class TestIndexWriterWithThreads : LuceneTestCase // Used by test cases below private class IndexerThread : ThreadJob { - private readonly Func newField; - internal bool diskFull; internal Exception error; //internal ObjectDisposedException ace; // LUCENENET: Not used @@ -71,16 +68,10 @@ private class IndexerThread : ThreadJob internal volatile int addCount; internal int timeToRunInMilliseconds = 200; - /// - /// LUCENENET specific - /// Passed in because - /// is no longer static. - /// - public IndexerThread(IndexWriter writer, bool noErrors, Func newField) + public IndexerThread(IndexWriter writer, bool noErrors) { this.writer = writer; this.noErrors = noErrors; - this.newField = newField; } public override void Run() @@ -91,7 +82,7 @@ public override void Run() customType.StoreTermVectorPositions = true; customType.StoreTermVectorOffsets = true; - doc.Add(newField("field", "aaa bbb ccc ddd eee fff ggg hhh iii jjj", customType)); + doc.Add(NewField("field", "aaa bbb ccc ddd eee fff ggg hhh iii jjj", customType)); doc.Add(new NumericDocValuesField("dv", 5)); int idUpto = 0; @@ -165,7 +156,7 @@ public override void Run() [Test] public virtual void TestImmediateDiskFullWithThreads() { - int NUM_THREADS = 3; + const int NUM_THREADS = 3; int numIterations = TestNightly ? 10 : 3; for (int iter = 0; iter < numIterations; iter++) { @@ -188,7 +179,7 @@ public virtual void TestImmediateDiskFullWithThreads() for (int i = 0; i < NUM_THREADS; i++) { - threads[i] = new IndexerThread(writer, true, NewField); + threads[i] = new IndexerThread(writer, true); } for (int i = 0; i < NUM_THREADS; i++) @@ -219,7 +210,7 @@ public virtual void TestImmediateDiskFullWithThreads() [Test] public virtual void TestCloseWithThreads() { - int NUM_THREADS = 3; + const int NUM_THREADS = 3; int numIterations = TestNightly ? 7 : 3; for (int iter = 0; iter < numIterations; iter++) { @@ -240,11 +231,11 @@ public virtual void TestCloseWithThreads() for (int i = 0; i < NUM_THREADS; i++) { - threads[i] = new IndexerThread(writer, false, NewField) + threads[i] = new IndexerThread(writer, false) - // LUCENENET NOTE - ConcurrentMergeScheduler + // LUCENENET NOTE - ConcurrentMergeScheduler // used to take too long for this test to index a single document - // so, increased the time from 200 to 300 ms. + // so, increased the time from 200 to 300 ms. // But it has now been restored to 200 ms like Lucene. { timeToRunInMilliseconds = 200 }; } @@ -310,7 +301,7 @@ public virtual void TestCloseWithThreads() // failure to trigger an IOException public virtual void TestMultipleThreadsFailure(Failure failure) { - int NUM_THREADS = 3; + const int NUM_THREADS = 3; for (int iter = 0; iter < 2; iter++) { @@ -331,7 +322,7 @@ public virtual void TestMultipleThreadsFailure(Failure failure) for (int i = 0; i < NUM_THREADS; i++) { - threads[i] = new IndexerThread(writer, true, NewField); + threads[i] = new IndexerThread(writer, true); } for (int i = 0; i < NUM_THREADS; i++) @@ -425,7 +416,7 @@ public virtual void TestSingleThreadFailure(Failure failure) // Throws IOException during FieldsWriter.flushDocument and during DocumentsWriter.abort private class FailOnlyOnAbortOrFlush : Failure { - internal bool onlyOnce; + private bool onlyOnce; public FailOnlyOnAbortOrFlush(bool onlyOnce) { @@ -566,7 +557,6 @@ public virtual void TestOpenTwoIndexWritersOnDifferentThreads() DelayedIndexAndCloseRunnable thread1 = new DelayedIndexAndCloseRunnable(dir, oneIWConstructed); DelayedIndexAndCloseRunnable thread2 = new DelayedIndexAndCloseRunnable(dir, oneIWConstructed); - thread1.Start(); thread2.Start(); oneIWConstructed.Wait(); @@ -600,11 +590,11 @@ public virtual void TestOpenTwoIndexWritersOnDifferentThreads() internal class DelayedIndexAndCloseRunnable : ThreadJob { - internal readonly Directory dir; + private readonly Directory dir; internal bool failed = false; internal Exception failure = null; - internal readonly CountdownEvent startIndexing = new CountdownEvent(1); - internal CountdownEvent iwConstructed; + private readonly CountdownEvent startIndexing = new CountdownEvent(1); + private CountdownEvent iwConstructed; public DelayedIndexAndCloseRunnable(Directory dir, CountdownEvent iwConstructed) { @@ -637,7 +627,7 @@ public override void Run() failed = true; failure = e; Console.WriteLine(e.ToString()); - return; + // return; // LUCENENET: redundant return } } } @@ -676,7 +666,7 @@ public virtual void TestRollbackAndCommitWithThreads() try { threads[threadID].Join(); - } + } catch (Exception e) { Console.WriteLine("EXCEPTION in ThreadAnonymousClass: " + Environment.NewLine + e); @@ -720,7 +710,7 @@ public override void Run() switch (x) { case 0: - rollbackLock.@Lock(); + rollbackLock.Lock(); if (Verbose) { Console.WriteLine("\nTEST: " + Thread.CurrentThread.Name + ": now rollback"); @@ -732,7 +722,7 @@ public override void Run() { Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": rollback done; now open new writer"); } - writerRef.Value = + writerRef.Value = new IndexWriter(d, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); } finally @@ -742,7 +732,7 @@ public override void Run() break; case 1: - commitLock.@Lock(); + commitLock.Lock(); if (Verbose) { Console.WriteLine("\nTEST: " + Thread.CurrentThread.Name + ": now commit"); @@ -795,11 +785,11 @@ public override void Run() } catch (Exception t) when (t.IsThrowable()) { - failed.Value = (true); + failed.Value = true; throw RuntimeException.Create(t); } } } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestIndexableField.cs b/src/Lucene.Net.Tests/Index/TestIndexableField.cs index 158006c0b6..9f9bb63a27 100644 --- a/src/Lucene.Net.Tests/Index/TestIndexableField.cs +++ b/src/Lucene.Net.Tests/Index/TestIndexableField.cs @@ -7,6 +7,10 @@ using Assert = Lucene.Net.TestFramework.Assert; using Console = Lucene.Net.Util.SystemConsole; +#if !FEATURE_RANDOM_NEXTINT64_NEXTSINGLE +using RandomizedTesting.Generators; // for Random.NextSingle extension method +#endif + namespace Lucene.Net.Index { /* @@ -48,12 +52,11 @@ public class TestIndexableField : LuceneTestCase { private class MyField : IIndexableField { - private readonly TestIndexableField outerInstance; - - internal readonly int counter; - internal readonly IIndexableFieldType fieldType; + private readonly int counter; + private readonly IIndexableFieldType fieldType; - public MyField() + // LUCENENET specific: only used to create an instance of the anonymous class + private MyField() { fieldType = new IndexableFieldTypeAnonymousClass(this); } @@ -98,21 +101,20 @@ public bool StoreTermVectorPayloads public bool OmitNorms => false; - public IndexOptions IndexOptions => Index.IndexOptions.DOCS_AND_FREQS_AND_POSITIONS; + public IndexOptions IndexOptions => IndexOptions.DOCS_AND_FREQS_AND_POSITIONS; public DocValuesType DocValueType => DocValuesType.NONE; } - public MyField(TestIndexableField outerInstance, int counter) + public MyField(int counter) : this() { - this.outerInstance = outerInstance; this.counter = counter; } public string Name => "f" + counter; - public float Boost => 1.0f + (float)Random.NextDouble(); + public float Boost => 1.0f + Random.NextSingle(); public BytesRef GetBinaryValue() { @@ -223,7 +225,8 @@ public object GetNumericValue() public TokenStream GetTokenStream(Analyzer analyzer) { - return GetReaderValue() != null ? analyzer.GetTokenStream(Name, GetReaderValue()) : analyzer.GetTokenStream(Name, new StringReader(GetStringValue())); + return GetReaderValue() != null ? analyzer.GetTokenStream(Name, GetReaderValue()) : + analyzer.GetTokenStream(Name, new StringReader(GetStringValue())); } } @@ -257,7 +260,7 @@ public virtual void TestArbitraryFields() int finalBaseCount = baseCount; baseCount += fieldCount - 1; - w.AddDocument(new EnumerableAnonymousClass(this, fieldCount, finalDocCount, finalBaseCount)); + w.AddDocument(new EnumerableAnonymousClass(fieldCount, finalDocCount, finalBaseCount)); } IndexReader r = w.GetReader(); @@ -377,15 +380,12 @@ public virtual void TestArbitraryFields() private sealed class EnumerableAnonymousClass : IEnumerable { - private readonly TestIndexableField outerInstance; - - private int fieldCount; - private int finalDocCount; - private int finalBaseCount; + private readonly int fieldCount; + private readonly int finalDocCount; + private readonly int finalBaseCount; - public EnumerableAnonymousClass(TestIndexableField outerInstance, int fieldCount, int finalDocCount, int finalBaseCount) + public EnumerableAnonymousClass(int fieldCount, int finalDocCount, int finalBaseCount) { - this.outerInstance = outerInstance; this.fieldCount = fieldCount; this.finalDocCount = finalDocCount; this.finalBaseCount = finalBaseCount; @@ -393,7 +393,7 @@ public EnumerableAnonymousClass(TestIndexableField outerInstance, int fieldCount public IEnumerator GetEnumerator() { - return new EnumeratorAnonymousClass(this, outerInstance); + return new EnumeratorAnonymousClass(this); } System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() @@ -404,15 +404,13 @@ System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() private sealed class EnumeratorAnonymousClass : IEnumerator { private readonly EnumerableAnonymousClass outerInstance; - private readonly TestIndexableField outerTextIndexableField; - public EnumeratorAnonymousClass(EnumerableAnonymousClass outerInstance, TestIndexableField outerTextIndexableField) + public EnumeratorAnonymousClass(EnumerableAnonymousClass outerInstance) { this.outerInstance = outerInstance; - this.outerTextIndexableField = outerTextIndexableField; } - internal int fieldUpto; + private int fieldUpto; private IIndexableField current; public bool MoveNext() @@ -430,7 +428,7 @@ public bool MoveNext() } else { - current = new MyField(outerTextIndexableField, outerInstance.finalBaseCount + (fieldUpto++ - 1)); + current = new MyField(outerInstance.finalBaseCount + (fieldUpto++ - 1)); } return true; @@ -451,4 +449,4 @@ public void Reset() } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestIntBlockPool.cs b/src/Lucene.Net.Tests/Index/TestIntBlockPool.cs index 6e327e640a..21c4d861a3 100644 --- a/src/Lucene.Net.Tests/Index/TestIntBlockPool.cs +++ b/src/Lucene.Net.Tests/Index/TestIntBlockPool.cs @@ -37,7 +37,7 @@ public class TestIntBlockPool : LuceneTestCase [Test] public virtual void TestSingleWriterReader() { - Counter bytesUsed = Util.Counter.NewCounter(); + Counter bytesUsed = Counter.NewCounter(); Int32BlockPool pool = new Int32BlockPool(new ByteTrackingAllocator(bytesUsed)); for (int j = 0; j < 2; j++) @@ -74,7 +74,7 @@ public virtual void TestSingleWriterReader() [Test] public virtual void TestMultipleWriterReader() { - Counter bytesUsed = Util.Counter.NewCounter(); + Counter bytesUsed = Counter.NewCounter(); Int32BlockPool pool = new Int32BlockPool(new ByteTrackingAllocator(bytesUsed)); for (int j = 0; j < 2; j++) { @@ -130,7 +130,7 @@ public virtual void TestMultipleWriterReader() private class ByteTrackingAllocator : Int32BlockPool.Allocator { - internal readonly Counter bytesUsed; + private readonly Counter bytesUsed; public ByteTrackingAllocator(Counter bytesUsed) : this(Int32BlockPool.INT32_BLOCK_SIZE, bytesUsed) @@ -183,4 +183,4 @@ public virtual int NextValue() } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestLazyProxSkipping.cs b/src/Lucene.Net.Tests/Index/TestLazyProxSkipping.cs index 289caf5837..d86b96400f 100644 --- a/src/Lucene.Net.Tests/Index/TestLazyProxSkipping.cs +++ b/src/Lucene.Net.Tests/Index/TestLazyProxSkipping.cs @@ -2,7 +2,6 @@ using Lucene.Net.Index.Extensions; using NUnit.Framework; using System; -using System.IO; using Assert = Lucene.Net.TestFramework.Assert; namespace Lucene.Net.Index @@ -76,7 +75,7 @@ public override IndexInput OpenInput(string name, IOContext context) private void CreateIndex(int numHits) { - int numDocs = 500; + const int numDocs = 500; Analyzer analyzer = Analyzer.NewAnonymous(createComponents: (fieldName, reader2) => { @@ -198,7 +197,7 @@ internal class SeeksCountingStream : IndexInput { private readonly TestLazyProxSkipping outerInstance; - internal IndexInput input; + private IndexInput input; internal SeeksCountingStream(TestLazyProxSkipping outerInstance, IndexInput input) : base("SeekCountingStream(" + input + ")") @@ -241,4 +240,4 @@ public override object Clone() } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestLogMergePolicy.cs b/src/Lucene.Net.Tests/Index/TestLogMergePolicy.cs index b8a01ccec9..c928b2cf30 100644 --- a/src/Lucene.Net.Tests/Index/TestLogMergePolicy.cs +++ b/src/Lucene.Net.Tests/Index/TestLogMergePolicy.cs @@ -1,5 +1,3 @@ -using NUnit.Framework; - namespace Lucene.Net.Index { /* @@ -19,6 +17,7 @@ namespace Lucene.Net.Index * limitations under the License. */ + // ReSharper disable once UnusedType.Global - tests are in base class public class TestLogMergePolicy : BaseMergePolicyTestCase { protected override MergePolicy NewMergePolicy() @@ -26,4 +25,4 @@ protected override MergePolicy NewMergePolicy() return NewLogMergePolicy(Random); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestLongPostings.cs b/src/Lucene.Net.Tests/Index/TestLongPostings.cs index 6171c8e28a..28b8355122 100644 --- a/src/Lucene.Net.Tests/Index/TestLongPostings.cs +++ b/src/Lucene.Net.Tests/Index/TestLongPostings.cs @@ -140,7 +140,9 @@ public virtual void TestLongPostings_Mem() } IndexReader r; - IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetMergePolicy(NewLogMergePolicy()); + IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetOpenMode(OpenMode.CREATE) + .SetMergePolicy(NewLogMergePolicy()); iwc.SetRAMBufferSizeMB(16.0 + 16.0 * Random.NextDouble()); iwc.SetMaxBufferedDocs(-1); RandomIndexWriter riw = new RandomIndexWriter(Random, dir, iwc); @@ -253,10 +255,10 @@ public virtual void TestLongPostings_Mem() Assert.AreEqual(pos, postings.NextPosition()); if (Random.NextBoolean()) { - var dummy = postings.GetPayload(); + _ = postings.GetPayload(); if (Random.NextBoolean()) { - dummy = postings.GetPayload(); // get it again + _ = postings.GetPayload(); // get it again } } } @@ -316,10 +318,10 @@ public virtual void TestLongPostings_Mem() Assert.AreEqual(pos, postings.NextPosition()); if (Random.NextBoolean()) { - var dummy = postings.GetPayload(); + _ = postings.GetPayload(); if (Random.NextBoolean()) { - dummy = postings.GetPayload(); // get it again + _ = postings.GetPayload(); // get it again } } } @@ -569,4 +571,4 @@ public virtual void DoTestLongPostingsNoPositions(IndexOptions options) dir.Dispose(); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestMaxTermFrequency.cs b/src/Lucene.Net.Tests/Index/TestMaxTermFrequency.cs index 80acf04d5a..97172328d9 100644 --- a/src/Lucene.Net.Tests/Index/TestMaxTermFrequency.cs +++ b/src/Lucene.Net.Tests/Index/TestMaxTermFrequency.cs @@ -54,7 +54,7 @@ public override void SetUp() base.SetUp(); dir = NewDirectory(); IndexWriterConfig config = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.SIMPLE, true)).SetMergePolicy(NewLogMergePolicy()); - config.SetSimilarity(new TestSimilarity(this)); + config.SetSimilarity(new TestSimilarity()); RandomIndexWriter writer = new RandomIndexWriter(Random, dir, config); Document doc = new Document(); Field foo = NewTextField("foo", "", Field.Store.NO); @@ -117,13 +117,6 @@ private string AddValue() /// internal class TestSimilarity : TFIDFSimilarity { - private readonly TestMaxTermFrequency outerInstance; - - public TestSimilarity(TestMaxTermFrequency outerInstance) - { - this.outerInstance = outerInstance; - } - public override float LengthNorm(FieldInvertState state) { return state.MaxTermFrequency; @@ -170,4 +163,4 @@ public override float ScorePayload(int doc, int start, int end, BytesRef payload } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestMixedCodecs.cs b/src/Lucene.Net.Tests/Index/TestMixedCodecs.cs index 7a65c34296..d91116e8b4 100644 --- a/src/Lucene.Net.Tests/Index/TestMixedCodecs.cs +++ b/src/Lucene.Net.Tests/Index/TestMixedCodecs.cs @@ -47,7 +47,7 @@ public virtual void Test() Directory dir = NewDirectory(); RandomIndexWriter w = null; - int docsLeftInthisSegment = 0; + int docsLeftInThisSegment = 0; int docUpto = 0; while (docUpto < NUM_DOCS) @@ -56,7 +56,7 @@ public virtual void Test() { Console.WriteLine("TEST: " + docUpto + " of " + NUM_DOCS); } - if (docsLeftInthisSegment == 0) + if (docsLeftInThisSegment == 0) { IndexWriterConfig iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); if (Random.NextBoolean()) @@ -71,13 +71,13 @@ public virtual void Test() w.Dispose(); } w = new RandomIndexWriter(Random, dir, iwc); - docsLeftInthisSegment = TestUtil.NextInt32(Random, 10, 100); + docsLeftInThisSegment = TestUtil.NextInt32(Random, 10, 100); } Document doc = new Document(); doc.Add(NewStringField("id", Convert.ToString(docUpto), Field.Store.YES)); w.AddDocument(doc); docUpto++; - docsLeftInthisSegment--; + docsLeftInThisSegment--; } if (Verbose) @@ -107,4 +107,4 @@ public virtual void Test() dir.Dispose(); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestMixedDocValuesUpdates.cs b/src/Lucene.Net.Tests/Index/TestMixedDocValuesUpdates.cs index 648afe4a0b..94da5114b7 100644 --- a/src/Lucene.Net.Tests/Index/TestMixedDocValuesUpdates.cs +++ b/src/Lucene.Net.Tests/Index/TestMixedDocValuesUpdates.cs @@ -7,7 +7,6 @@ using RandomizedTesting.Generators; using System; using System.Collections.Generic; -using System.IO; using System.Threading; using Assert = Lucene.Net.TestFramework.Assert; using JCG = J2N.Collections.Generic; @@ -278,7 +277,7 @@ public virtual void TestStressMultiThreading() { string f = "f" + i; string cf = "cf" + i; - threads[i] = new ThreadAnonymousClass(this, "UpdateThread-" + i, writer, numDocs, done, numUpdates, f, cf); + threads[i] = new ThreadAnonymousClass("UpdateThread-" + i, writer, numDocs, done, numUpdates, f, cf); } foreach (ThreadJob t in threads) @@ -325,8 +324,6 @@ public virtual void TestStressMultiThreading() private sealed class ThreadAnonymousClass : ThreadJob { - private readonly TestMixedDocValuesUpdates outerInstance; - private readonly IndexWriter writer; private readonly int numDocs; private readonly CountdownEvent done; @@ -334,10 +331,9 @@ private sealed class ThreadAnonymousClass : ThreadJob private readonly string f; private readonly string cf; - public ThreadAnonymousClass(TestMixedDocValuesUpdates outerInstance, string str, IndexWriter writer, int numDocs, CountdownEvent done, AtomicInt32 numUpdates, string f, string cf) + public ThreadAnonymousClass(string str, IndexWriter writer, int numDocs, CountdownEvent done, AtomicInt32 numUpdates, string f, string cf) : base(str) { - this.outerInstance = outerInstance; this.writer = writer; this.numDocs = numDocs; this.done = done; @@ -574,4 +570,4 @@ public virtual void TestTonsOfUpdates() dir.Dispose(); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestMultiDocValues.cs b/src/Lucene.Net.Tests/Index/TestMultiDocValues.cs index cdf3ff73d9..963211fa86 100644 --- a/src/Lucene.Net.Tests/Index/TestMultiDocValues.cs +++ b/src/Lucene.Net.Tests/Index/TestMultiDocValues.cs @@ -1,11 +1,14 @@ using Lucene.Net.Documents; using Lucene.Net.Index.Extensions; using NUnit.Framework; -using RandomizedTesting.Generators; using System.Collections.Generic; using JCG = J2N.Collections.Generic; using Assert = Lucene.Net.TestFramework.Assert; +#if !FEATURE_RANDOM_NEXTINT64_NEXTSINGLE +using RandomizedTesting.Generators; +#endif + namespace Lucene.Net.Index { /* @@ -438,4 +441,4 @@ public virtual void TestDocsWithField() dir.Dispose(); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestMultiFields.cs b/src/Lucene.Net.Tests/Index/TestMultiFields.cs index be65897675..b69cf5226c 100644 --- a/src/Lucene.Net.Tests/Index/TestMultiFields.cs +++ b/src/Lucene.Net.Tests/Index/TestMultiFields.cs @@ -4,7 +4,6 @@ using Lucene.Net.Store; using Lucene.Net.Util; using NUnit.Framework; -using System; using System.Collections.Generic; using JCG = J2N.Collections.Generic; using Assert = Lucene.Net.TestFramework.Assert; @@ -57,7 +56,7 @@ public virtual void TestRandom() IList terms = new JCG.List(); int numDocs = TestUtil.NextInt32(Random, 1, 100 * RandomMultiplier); - Documents.Document doc = new Documents.Document(); + Document doc = new Document(); Field f = NewStringField("field", "", Field.Store.NO); doc.Add(f); Field id = NewStringField("id", "", Field.Store.NO); @@ -192,7 +191,7 @@ public virtual void TestSeparateEnums() { Directory dir = NewDirectory(); IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); - Documents.Document d = new Documents.Document(); + Document d = new Document(); d.Add(NewStringField("f", "j", Field.Store.NO)); w.AddDocument(d); w.Commit(); @@ -212,7 +211,7 @@ public virtual void TestTermDocsEnum() { Directory dir = NewDirectory(); IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); - Documents.Document d = new Documents.Document(); + Document d = new Document(); d.Add(NewStringField("f", "j", Field.Store.NO)); w.AddDocument(d); w.Commit(); @@ -227,4 +226,4 @@ public virtual void TestTermDocsEnum() dir.Dispose(); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestMultiLevelSkipList.cs b/src/Lucene.Net.Tests/Index/TestMultiLevelSkipList.cs index 73fb1de38c..8e1f433f50 100644 --- a/src/Lucene.Net.Tests/Index/TestMultiLevelSkipList.cs +++ b/src/Lucene.Net.Tests/Index/TestMultiLevelSkipList.cs @@ -128,7 +128,7 @@ public virtual void CheckSkipTo(DocsAndPositionsEnum tp, int target, int maxCoun private class PayloadAnalyzer : Analyzer { - internal readonly AtomicInt32 payloadCount = new AtomicInt32(-1); + private readonly AtomicInt32 payloadCount = new AtomicInt32(-1); protected internal override TokenStreamComponents CreateComponents(string fieldName, TextReader reader) { @@ -140,7 +140,7 @@ protected internal override TokenStreamComponents CreateComponents(string fieldN private class PayloadFilter : TokenFilter { internal IPayloadAttribute payloadAtt; - internal AtomicInt32 payloadCount; + private AtomicInt32 payloadCount; protected internal PayloadFilter(AtomicInt32 payloadCount, TokenStream input) : base(input) @@ -168,7 +168,7 @@ internal class CountingStream : IndexInput { private readonly TestMultiLevelSkipList outerInstance; - internal IndexInput input; + private IndexInput input; internal CountingStream(TestMultiLevelSkipList outerInstance, IndexInput input) : base("CountingStream(" + input + ")") @@ -212,4 +212,4 @@ public override object Clone() } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestNRTReaderWithThreads.cs b/src/Lucene.Net.Tests/Index/TestNRTReaderWithThreads.cs index fb6f81c5ab..c0d93b747a 100644 --- a/src/Lucene.Net.Tests/Index/TestNRTReaderWithThreads.cs +++ b/src/Lucene.Net.Tests/Index/TestNRTReaderWithThreads.cs @@ -1,14 +1,15 @@ using J2N.Threading; using J2N.Threading.Atomic; -using Lucene.Net.Attributes; using Lucene.Net.Index.Extensions; -using Lucene.Net.Support.Threading; using NUnit.Framework; -using RandomizedTesting.Generators; using System; using System.Threading; using Console = Lucene.Net.Util.SystemConsole; +#if !FEATURE_RANDOM_NEXTINT64_NEXTSINGLE +using RandomizedTesting.Generators; +#endif + namespace Lucene.Net.Index { /* @@ -49,7 +50,9 @@ public virtual void TestIndexing() { wrapper.AssertNoDeleteOpenFile = true; } - var writer = new IndexWriter(mainDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy(false, 2))); + var writer = new IndexWriter(mainDir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(10) + .SetMergePolicy(NewLogMergePolicy(false, 2))); IndexReader reader = writer.GetReader(); // start pooling readers reader.Dispose(); var indexThreads = new RunThread[4]; @@ -60,7 +63,7 @@ public virtual void TestIndexing() indexThreads[x].Start(); } long startTime = J2N.Time.NanoTime() / J2N.Time.MillisecondsPerNanosecond; // LUCENENET: Use NanoTime() rather than CurrentTimeMilliseconds() for more accurate/reliable results - long duration = 1000; + const long duration = 1000; while (((J2N.Time.NanoTime() / J2N.Time.MillisecondsPerNanosecond) - startTime) < duration) // LUCENENET: Use NanoTime() rather than CurrentTimeMilliseconds() for more accurate/reliable results { Thread.Sleep(100); @@ -144,4 +147,4 @@ public override void Run() } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestNRTThreads.cs b/src/Lucene.Net.Tests/Index/TestNRTThreads.cs index 6124374734..c842e542d9 100644 --- a/src/Lucene.Net.Tests/Index/TestNRTThreads.cs +++ b/src/Lucene.Net.Tests/Index/TestNRTThreads.cs @@ -1,7 +1,6 @@ using Lucene.Net.Diagnostics; using NUnit.Framework; using RandomizedTesting.Generators; -using System; using System.Collections.Generic; using System.Threading.Tasks; using Assert = Lucene.Net.TestFramework.Assert; @@ -178,4 +177,4 @@ public virtual void TestNRTThreads_Mem() RunTest("TestNRTThreads"); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestNeverDelete.cs b/src/Lucene.Net.Tests/Index/TestNeverDelete.cs index 1b0e57373d..db4287f8e8 100644 --- a/src/Lucene.Net.Tests/Index/TestNeverDelete.cs +++ b/src/Lucene.Net.Tests/Index/TestNeverDelete.cs @@ -9,7 +9,6 @@ using JCG = J2N.Collections.Generic; using Assert = Lucene.Net.TestFramework.Assert; using Console = Lucene.Net.Util.SystemConsole; -using Lucene.Net.Support.Threading; namespace Lucene.Net.Index { @@ -65,7 +64,7 @@ public virtual void TestIndexing() long stopTime = (J2N.Time.NanoTime() / J2N.Time.MillisecondsPerNanosecond) + AtLeast(1000); // LUCENENET: Use NanoTime() rather than CurrentTimeMilliseconds() for more accurate/reliable results for (int x = 0; x < indexThreads.Length; x++) { - indexThreads[x] = new ThreadAnonymousClass(w, stopTime, NewStringField, NewTextField); + indexThreads[x] = new ThreadAnonymousClass(w, stopTime); indexThreads[x].Name = "Thread " + x; indexThreads[x].Start(); } @@ -103,34 +102,18 @@ public virtual void TestIndexing() w.Dispose(); d.Dispose(); - System.IO.Directory.Delete(tmpDir.FullName, true); + Directory.Delete(tmpDir.FullName, true); } private sealed class ThreadAnonymousClass : ThreadJob { - private readonly Func newStringField; - private readonly Func newTextField; + private readonly RandomIndexWriter w; + private readonly long stopTime; - private RandomIndexWriter w; - private long stopTime; - - /// - /// LUCENENET specific - /// Passed in because - /// is no longer static - /// - /// - /// LUCENENET specific - /// Passed in because - /// is no longer static - /// - public ThreadAnonymousClass(RandomIndexWriter w, long stopTime, - Func newStringField, Func newTextField) + public ThreadAnonymousClass(RandomIndexWriter w, long stopTime) { this.w = w; this.stopTime = stopTime; - this.newStringField = newStringField; - this.newTextField = newTextField; } public override void Run() @@ -141,8 +124,8 @@ public override void Run() while (J2N.Time.NanoTime() / J2N.Time.MillisecondsPerNanosecond < stopTime) // LUCENENET: Use NanoTime() rather than CurrentTimeMilliseconds() for more accurate/reliable results { Document doc = new Document(); - doc.Add(newStringField("dc", "" + docCount, Field.Store.YES)); - doc.Add(newTextField("field", "here is some text", Field.Store.YES)); + doc.Add(NewStringField("dc", "" + docCount, Field.Store.YES)); + doc.Add(NewTextField("field", "here is some text", Field.Store.YES)); w.AddDocument(doc); if (docCount % 13 == 0) @@ -159,4 +142,4 @@ public override void Run() } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestNoMergePolicy.cs b/src/Lucene.Net.Tests/Index/TestNoMergePolicy.cs index 49b1b55b1d..e91e144148 100644 --- a/src/Lucene.Net.Tests/Index/TestNoMergePolicy.cs +++ b/src/Lucene.Net.Tests/Index/TestNoMergePolicy.cs @@ -32,7 +32,7 @@ public class TestNoMergePolicy : LuceneTestCase public virtual void TestNoMergePolicy_Mem() { MergePolicy mp = NoMergePolicy.NO_COMPOUND_FILES; - Assert.IsNull(mp.FindMerges(/*null*/ (MergeTrigger)int.MinValue, (SegmentInfos)null)); + Assert.IsNull(mp.FindMerges(/*null*/ MergeTrigger.NONE, (SegmentInfos)null)); // LUCENENET specific: using MergeTrigger.NONE instead of null Assert.IsNull(mp.FindForcedMerges(null, 0, null)); Assert.IsNull(mp.FindForcedDeletesMerges(null)); Assert.IsFalse(mp.UseCompoundFile(null, null)); @@ -82,4 +82,4 @@ public virtual void TestMethodsOverridden() } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestNoMergeScheduler.cs b/src/Lucene.Net.Tests/Index/TestNoMergeScheduler.cs index 403ccb077a..5f22aee545 100644 --- a/src/Lucene.Net.Tests/Index/TestNoMergeScheduler.cs +++ b/src/Lucene.Net.Tests/Index/TestNoMergeScheduler.cs @@ -2,6 +2,7 @@ using NUnit.Framework; using RandomizedTesting.Generators; using System; +using System.Linq; using System.Reflection; using Assert = Lucene.Net.TestFramework.Assert; @@ -34,7 +35,7 @@ public virtual void TestNoMergeScheduler_Mem() { MergeScheduler ms = NoMergeScheduler.INSTANCE; ms.Dispose(); - ms.Merge(null, RandomPicks.RandomFrom(Random, (MergeTrigger[])Enum.GetValues(typeof(MergeTrigger))), Random.NextBoolean()); + ms.Merge(null, RandomPicks.RandomFrom(Random, ((MergeTrigger[])Enum.GetValues(typeof(MergeTrigger))).Where(i => i != MergeTrigger.NONE).ToArray()), Random.NextBoolean()); } [Test] @@ -69,4 +70,4 @@ public virtual void TestMethodsOverridden() } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestNorms.cs b/src/Lucene.Net.Tests/Index/TestNorms.cs index 634cefdf46..6f62155b12 100644 --- a/src/Lucene.Net.Tests/Index/TestNorms.cs +++ b/src/Lucene.Net.Tests/Index/TestNorms.cs @@ -48,17 +48,10 @@ namespace Lucene.Net.Index [TestFixture] public class TestNorms : LuceneTestCase { - private readonly string byteTestField = "normsTestByte"; + private const string byteTestField = "normsTestByte"; internal class CustomNormEncodingSimilarity : TFIDFSimilarity { - private readonly TestNorms outerInstance; - - public CustomNormEncodingSimilarity(TestNorms outerInstance) - { - this.outerInstance = outerInstance; - } - public override long EncodeNormValue(float f) { return (long)f; @@ -113,7 +106,7 @@ public virtual void TestCustomEncoder() MockAnalyzer analyzer = new MockAnalyzer(Random); IndexWriterConfig config = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - config.SetSimilarity(new CustomNormEncodingSimilarity(this)); + config.SetSimilarity(new CustomNormEncodingSimilarity()); RandomIndexWriter writer = new RandomIndexWriter(Random, dir, config); Document doc = new Document(); Field foo = NewTextField("foo", "", Field.Store.NO); @@ -169,10 +162,10 @@ public virtual void TestMaxByteNorms() public virtual void BuildIndex(Directory dir) { Random random = Random; - MockAnalyzer analyzer = new MockAnalyzer(LuceneTestCase.Random); - analyzer.MaxTokenLength = TestUtil.NextInt32(LuceneTestCase.Random, 1, IndexWriter.MAX_TERM_LENGTH); + MockAnalyzer analyzer = new MockAnalyzer(Random); + analyzer.MaxTokenLength = TestUtil.NextInt32(Random, 1, IndexWriter.MAX_TERM_LENGTH); IndexWriterConfig config = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); - Similarity provider = new MySimProvider(this); + Similarity provider = new MySimProvider(); config.SetSimilarity(provider); RandomIndexWriter writer = new RandomIndexWriter(random, dir, config); LineFileDocs docs = new LineFileDocs(random, DefaultCodecSupportsDocValues); @@ -180,7 +173,7 @@ public virtual void BuildIndex(Directory dir) for (int i = 0; i < num; i++) { Document doc = docs.NextDoc(); - int boost = LuceneTestCase.Random.Next(255); + int boost = Random.Next(255); Field f = new TextField(byteTestField, "" + boost, Field.Store.YES); f.Boost = boost; doc.Add(f); @@ -198,13 +191,6 @@ public virtual void BuildIndex(Directory dir) public class MySimProvider : PerFieldSimilarityWrapper { - private readonly TestNorms outerInstance; - - public MySimProvider(TestNorms outerInstance) - { - this.outerInstance = outerInstance; - } - internal Similarity @delegate = new DefaultSimilarity(); public override float QueryNorm(float sumOfSquaredWeights) @@ -214,7 +200,7 @@ public override float QueryNorm(float sumOfSquaredWeights) public override Similarity Get(string field) { - if (outerInstance.byteTestField.Equals(field, StringComparison.Ordinal)) + if (byteTestField.Equals(field, StringComparison.Ordinal)) { return new ByteEncodingBoostSimilarity(); } @@ -249,4 +235,4 @@ public override SimScorer GetSimScorer(SimWeight weight, AtomicReaderContext con } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestNumericDocValuesUpdates.cs b/src/Lucene.Net.Tests/Index/TestNumericDocValuesUpdates.cs index 5827838ecc..9ff044885b 100644 --- a/src/Lucene.Net.Tests/Index/TestNumericDocValuesUpdates.cs +++ b/src/Lucene.Net.Tests/Index/TestNumericDocValuesUpdates.cs @@ -7,7 +7,6 @@ using RandomizedTesting.Generators; using System; using System.Collections.Generic; -using System.IO; using System.Threading; using Assert = Lucene.Net.TestFramework.Assert; using JCG = J2N.Collections.Generic; @@ -73,7 +72,8 @@ private Document Doc(int id) public virtual void TestUpdatesAreFlushed() { Directory dir = NewDirectory(); - IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)).SetRAMBufferSizeMB(0.00000001)); + IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.WHITESPACE, false)) + .SetRAMBufferSizeMB(0.00000001)); writer.AddDocument(Doc(0)); // val=1 writer.AddDocument(Doc(1)); // val=2 writer.AddDocument(Doc(3)); // val=2 @@ -139,7 +139,7 @@ public virtual void TestUpdateFewSegments() conf.SetMaxBufferedDocs(2); // generate few segments conf.SetMergePolicy(NoMergePolicy.COMPOUND_FILES); // prevent merges for this test IndexWriter writer = new IndexWriter(dir, conf); - int numDocs = 10; + const int numDocs = 10; long[] expectedValues = new long[numDocs]; for (int i = 0; i < numDocs; i++) { @@ -619,7 +619,7 @@ public virtual void TestDifferentDVFormatPerField() { Directory dir = NewDirectory(); IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); - conf.SetCodec(new Lucene46CodecAnonymousClass(this)); + conf.SetCodec(new Lucene46CodecAnonymousClass()); IndexWriter writer = new IndexWriter(dir, conf); Document doc = new Document(); @@ -652,13 +652,6 @@ public virtual void TestDifferentDVFormatPerField() private sealed class Lucene46CodecAnonymousClass : Lucene46Codec { - private readonly TestNumericDocValuesUpdates outerInstance; - - public Lucene46CodecAnonymousClass(TestNumericDocValuesUpdates outerInstance) - { - this.outerInstance = outerInstance; - } - public override DocValuesFormat GetDocValuesFormatForField(string field) { return new Lucene45DocValuesFormat(); @@ -1155,7 +1148,7 @@ public virtual void TestStressMultiThreading() { string f = "f" + i; string cf = "cf" + i; - threads[i] = new ThreadAnonymousClass(this, "UpdateThread-" + i, writer, numDocs, done, numUpdates, f, cf); + threads[i] = new ThreadAnonymousClass("UpdateThread-" + i, writer, numDocs, done, numUpdates, f, cf); } foreach (ThreadJob t in threads) @@ -1196,8 +1189,6 @@ public virtual void TestStressMultiThreading() private sealed class ThreadAnonymousClass : ThreadJob { - private readonly TestNumericDocValuesUpdates outerInstance; - private readonly IndexWriter writer; private readonly int numDocs; private readonly CountdownEvent done; @@ -1205,10 +1196,9 @@ private sealed class ThreadAnonymousClass : ThreadJob private readonly string f; private readonly string cf; - public ThreadAnonymousClass(TestNumericDocValuesUpdates outerInstance, string str, IndexWriter writer, int numDocs, CountdownEvent done, AtomicInt32 numUpdates, string f, string cf) + public ThreadAnonymousClass(string str, IndexWriter writer, int numDocs, CountdownEvent done, AtomicInt32 numUpdates, string f, string cf) : base(str) { - this.outerInstance = outerInstance; this.writer = writer; this.numDocs = numDocs; this.done = done; @@ -1368,7 +1358,7 @@ public virtual void TestChangeCodec() Directory dir = NewDirectory(); IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); conf.SetMergePolicy(NoMergePolicy.COMPOUND_FILES); // disable merges to simplify test assertions. - conf.SetCodec(new Lucene46CodecAnonymousClass2(this)); + conf.SetCodec(new Lucene46CodecAnonymousClass2()); IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone()); Document doc = new Document(); doc.Add(new StringField("id", "d0", Store.NO)); @@ -1378,7 +1368,7 @@ public virtual void TestChangeCodec() writer.Dispose(); // change format - conf.SetCodec(new Lucene46CodecAnonymousClass3(this)); + conf.SetCodec(new Lucene46CodecAnonymousClass3()); writer = new IndexWriter(dir, (IndexWriterConfig)conf.Clone()); doc = new Document(); doc.Add(new StringField("id", "d1", Store.NO)); @@ -1402,13 +1392,6 @@ public virtual void TestChangeCodec() private sealed class Lucene46CodecAnonymousClass2 : Lucene46Codec { - private readonly TestNumericDocValuesUpdates outerInstance; - - public Lucene46CodecAnonymousClass2(TestNumericDocValuesUpdates outerInstance) - { - this.outerInstance = outerInstance; - } - public override DocValuesFormat GetDocValuesFormatForField(string field) { return new Lucene45DocValuesFormat(); @@ -1417,13 +1400,6 @@ public override DocValuesFormat GetDocValuesFormatForField(string field) private sealed class Lucene46CodecAnonymousClass3 : Lucene46Codec { - private readonly TestNumericDocValuesUpdates outerInstance; - - public Lucene46CodecAnonymousClass3(TestNumericDocValuesUpdates outerInstance) - { - this.outerInstance = outerInstance; - } - public override DocValuesFormat GetDocValuesFormatForField(string field) { return new AssertingDocValuesFormat(); @@ -1689,4 +1665,4 @@ public virtual void TestUpdateTwoNonexistingTerms() dir.Dispose(); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestOmitNorms.cs b/src/Lucene.Net.Tests/Index/TestOmitNorms.cs index 82730e68f9..1783738229 100644 --- a/src/Lucene.Net.Tests/Index/TestOmitNorms.cs +++ b/src/Lucene.Net.Tests/Index/TestOmitNorms.cs @@ -91,7 +91,9 @@ public virtual void TestMixedMerge() { Directory ram = NewDirectory(); Analyzer analyzer = new MockAnalyzer(Random); - IndexWriter writer = new IndexWriter(ram, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMaxBufferedDocs(3).SetMergePolicy(NewLogMergePolicy(2))); + IndexWriter writer = new IndexWriter(ram, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer) + .SetMaxBufferedDocs(3) + .SetMergePolicy(NewLogMergePolicy(2))); Document d = new Document(); // this field will have norms @@ -145,7 +147,9 @@ public virtual void TestMixedRAM() { Directory ram = NewDirectory(); Analyzer analyzer = new MockAnalyzer(Random); - IndexWriter writer = new IndexWriter(ram, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy(2))); + IndexWriter writer = new IndexWriter(ram, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer) + .SetMaxBufferedDocs(10) + .SetMergePolicy(NewLogMergePolicy(2))); Document d = new Document(); // this field will have norms @@ -200,7 +204,9 @@ public virtual void TestNoNrmFile() { Directory ram = NewDirectory(); Analyzer analyzer = new MockAnalyzer(Random); - IndexWriter writer = new IndexWriter(ram, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMaxBufferedDocs(3).SetMergePolicy(NewLogMergePolicy())); + IndexWriter writer = new IndexWriter(ram, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer) + .SetMaxBufferedDocs(3) + .SetMergePolicy(NewLogMergePolicy())); LogMergePolicy lmp = (LogMergePolicy)writer.Config.MergePolicy; lmp.MergeFactor = 2; lmp.NoCFSRatio = 0.0; @@ -330,4 +336,4 @@ internal virtual NumericDocValues GetNorms(string field, Field f1, Field f2) return norms1; } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestOmitPositions.cs b/src/Lucene.Net.Tests/Index/TestOmitPositions.cs index 5e897fd2d3..27d00ebd20 100644 --- a/src/Lucene.Net.Tests/Index/TestOmitPositions.cs +++ b/src/Lucene.Net.Tests/Index/TestOmitPositions.cs @@ -207,7 +207,9 @@ public virtual void TestNoPrxFile() { Directory ram = NewDirectory(); Analyzer analyzer = new MockAnalyzer(Random); - IndexWriter writer = new IndexWriter(ram, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMaxBufferedDocs(3).SetMergePolicy(NewLogMergePolicy())); + IndexWriter writer = new IndexWriter(ram, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer) + .SetMaxBufferedDocs(3) + .SetMergePolicy(NewLogMergePolicy())); LogMergePolicy lmp = (LogMergePolicy)writer.Config.MergePolicy; lmp.MergeFactor = 2; lmp.NoCFSRatio = 0.0; @@ -293,4 +295,4 @@ public virtual void TestMixing() dir.Dispose(); // checkindex } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestOmitTf.cs b/src/Lucene.Net.Tests/Index/TestOmitTf.cs index 981d202fc8..546a8579c1 100644 --- a/src/Lucene.Net.Tests/Index/TestOmitTf.cs +++ b/src/Lucene.Net.Tests/Index/TestOmitTf.cs @@ -163,7 +163,9 @@ public virtual void TestMixedMerge() { Directory ram = NewDirectory(); Analyzer analyzer = new MockAnalyzer(Random); - IndexWriter writer = new IndexWriter(ram, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMaxBufferedDocs(3).SetMergePolicy(NewLogMergePolicy(2))); + IndexWriter writer = new IndexWriter(ram, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer) + .SetMaxBufferedDocs(3) + .SetMergePolicy(NewLogMergePolicy(2))); Document d = new Document(); // this field will have Tf @@ -217,7 +219,9 @@ public virtual void TestMixedRAM() { Directory ram = NewDirectory(); Analyzer analyzer = new MockAnalyzer(Random); - IndexWriter writer = new IndexWriter(ram, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMaxBufferedDocs(10).SetMergePolicy(NewLogMergePolicy(2))); + IndexWriter writer = new IndexWriter(ram, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer) + .SetMaxBufferedDocs(10) + .SetMergePolicy(NewLogMergePolicy(2))); Document d = new Document(); // this field will have Tf @@ -269,7 +273,9 @@ public virtual void TestNoPrxFile() { Directory ram = NewDirectory(); Analyzer analyzer = new MockAnalyzer(Random); - IndexWriter writer = new IndexWriter(ram, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMaxBufferedDocs(3).SetMergePolicy(NewLogMergePolicy())); + IndexWriter writer = new IndexWriter(ram, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer) + .SetMaxBufferedDocs(3) + .SetMergePolicy(NewLogMergePolicy())); LogMergePolicy lmp = (LogMergePolicy)writer.Config.MergePolicy; lmp.MergeFactor = 2; lmp.NoCFSRatio = 0.0; @@ -313,10 +319,13 @@ public virtual void TestBasic() { Directory dir = NewDirectory(); Analyzer analyzer = new MockAnalyzer(Random); - IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetMaxBufferedDocs(2).SetSimilarity(new SimpleSimilarity()).SetMergePolicy(NewLogMergePolicy(2))); + IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer) + .SetMaxBufferedDocs(2) + .SetSimilarity(new SimpleSimilarity()) + .SetMergePolicy(NewLogMergePolicy(2))); StringBuilder sb = new StringBuilder(265); - string term = "term"; + const string term = "term"; for (int i = 0; i < 30; i++) { Document doc = new Document(); @@ -374,23 +383,23 @@ public virtual void TestBasic() } // else OK because positions are not indexed } - searcher.Search(q1, new CountingHitCollectorAnonymousClass(this)); + searcher.Search(q1, new CountingHitCollectorAnonymousClass()); //System.out.println(CountingHitCollector.getCount()); - searcher.Search(q2, new CountingHitCollectorAnonymousClass2(this)); + searcher.Search(q2, new CountingHitCollectorAnonymousClass2()); //System.out.println(CountingHitCollector.getCount()); - searcher.Search(q3, new CountingHitCollectorAnonymousClass3(this)); + searcher.Search(q3, new CountingHitCollectorAnonymousClass3()); //System.out.println(CountingHitCollector.getCount()); - searcher.Search(q4, new CountingHitCollectorAnonymousClass4(this)); + searcher.Search(q4, new CountingHitCollectorAnonymousClass4()); //System.out.println(CountingHitCollector.getCount()); BooleanQuery bq = new BooleanQuery(); bq.Add(q1, Occur.MUST); bq.Add(q4, Occur.MUST); - searcher.Search(bq, new CountingHitCollectorAnonymousClass5(this)); + searcher.Search(bq, new CountingHitCollectorAnonymousClass5()); Assert.AreEqual(15, CountingHitCollector.Count); reader.Dispose(); @@ -399,21 +408,14 @@ public virtual void TestBasic() private sealed class CountingHitCollectorAnonymousClass : CountingHitCollector { - private readonly TestOmitTf outerInstance; - - public CountingHitCollectorAnonymousClass(TestOmitTf outerInstance) - { - this.outerInstance = outerInstance; - } - private Scorer scorer; - public override sealed void SetScorer(Scorer scorer) + public override void SetScorer(Scorer scorer) { this.scorer = scorer; } - public override sealed void Collect(int doc) + public override void Collect(int doc) { //System.out.println("Q1: Doc=" + doc + " score=" + score); float score = scorer.GetScore(); @@ -424,21 +426,14 @@ public override sealed void Collect(int doc) private sealed class CountingHitCollectorAnonymousClass2 : CountingHitCollector { - private readonly TestOmitTf outerInstance; - - public CountingHitCollectorAnonymousClass2(TestOmitTf outerInstance) - { - this.outerInstance = outerInstance; - } - private Scorer scorer; - public override sealed void SetScorer(Scorer scorer) + public override void SetScorer(Scorer scorer) { this.scorer = scorer; } - public override sealed void Collect(int doc) + public override void Collect(int doc) { //System.out.println("Q2: Doc=" + doc + " score=" + score); float score = scorer.GetScore(); @@ -449,21 +444,14 @@ public override sealed void Collect(int doc) private sealed class CountingHitCollectorAnonymousClass3 : CountingHitCollector { - private readonly TestOmitTf outerInstance; - - public CountingHitCollectorAnonymousClass3(TestOmitTf outerInstance) - { - this.outerInstance = outerInstance; - } - private Scorer scorer; - public override sealed void SetScorer(Scorer scorer) + public override void SetScorer(Scorer scorer) { this.scorer = scorer; } - public override sealed void Collect(int doc) + public override void Collect(int doc) { //System.out.println("Q1: Doc=" + doc + " score=" + score); float score = scorer.GetScore(); @@ -475,21 +463,14 @@ public override sealed void Collect(int doc) private sealed class CountingHitCollectorAnonymousClass4 : CountingHitCollector { - private readonly TestOmitTf outerInstance; - - public CountingHitCollectorAnonymousClass4(TestOmitTf outerInstance) - { - this.outerInstance = outerInstance; - } - private Scorer scorer; - public override sealed void SetScorer(Scorer scorer) + public override void SetScorer(Scorer scorer) { this.scorer = scorer; } - public override sealed void Collect(int doc) + public override void Collect(int doc) { float score = scorer.GetScore(); //System.out.println("Q1: Doc=" + doc + " score=" + score); @@ -501,14 +482,8 @@ public override sealed void Collect(int doc) private sealed class CountingHitCollectorAnonymousClass5 : CountingHitCollector { - private readonly TestOmitTf outerInstance; - - public CountingHitCollectorAnonymousClass5(TestOmitTf outerInstance) - { - this.outerInstance = outerInstance; - } - - public override sealed void Collect(int doc) + // ReSharper disable once RedundantOverriddenMember - matches Java code, and adds comment + public override void Collect(int doc) { //System.out.println("BQ: Doc=" + doc + " score=" + score); base.Collect(doc); @@ -519,7 +494,7 @@ public class CountingHitCollector : ICollector { internal static int count = 0; internal static int sum = 0; - internal int docBase = -1; + private int docBase = -1; internal CountingHitCollector() { @@ -571,4 +546,4 @@ public virtual void TestStats() dir.Dispose(); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestParallelAtomicReader.cs b/src/Lucene.Net.Tests/Index/TestParallelAtomicReader.cs index 3d2483d468..370baf1794 100644 --- a/src/Lucene.Net.Tests/Index/TestParallelAtomicReader.cs +++ b/src/Lucene.Net.Tests/Index/TestParallelAtomicReader.cs @@ -94,7 +94,9 @@ public virtual void TestRefCounts1() Directory dir2 = GetDir2(Random); AtomicReader ir1, ir2; // close subreaders, ParallelReader will not change refCounts, but close on its own close - ParallelAtomicReader pr = new ParallelAtomicReader(ir1 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1)), ir2 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir2))); + ParallelAtomicReader pr = new ParallelAtomicReader( + ir1 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1)), + ir2 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir2))); // check RefCounts Assert.AreEqual(1, ir1.RefCount); @@ -136,7 +138,9 @@ public virtual void TestCloseInnerReader() AtomicReader ir1 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1)); // with overlapping - ParallelAtomicReader pr = new ParallelAtomicReader(true, new AtomicReader[] { ir1 }, new AtomicReader[] { ir1 }); + ParallelAtomicReader pr = new ParallelAtomicReader(true, + new AtomicReader[] { ir1 }, + new AtomicReader[] { ir1 }); ir1.Dispose(); @@ -175,7 +179,7 @@ public virtual void TestIncompatibleIndexes() try { - new ParallelAtomicReader(ir1, ir2); + _ = new ParallelAtomicReader(ir1, ir2); Assert.Fail("didn't get exptected exception: indexes don't have same number of documents"); } catch (Exception e) when (e.IsIllegalArgumentException()) @@ -185,7 +189,9 @@ public virtual void TestIncompatibleIndexes() try { - new ParallelAtomicReader(Random.NextBoolean(), new AtomicReader[] { ir1, ir2 }, new AtomicReader[] { ir1, ir2 }); + _ = new ParallelAtomicReader(Random.NextBoolean(), + new AtomicReader[] { ir1, ir2 }, + new AtomicReader[] { ir1, ir2 }); Assert.Fail("didn't get expected exception: indexes don't have same number of documents"); } catch (Exception e) when (e.IsIllegalArgumentException()) @@ -210,7 +216,9 @@ public virtual void TestIgnoreStoredFields() AtomicReader ir2 = SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir2)); // with overlapping - ParallelAtomicReader pr = new ParallelAtomicReader(false, new AtomicReader[] { ir1, ir2 }, new AtomicReader[] { ir1 }); + ParallelAtomicReader pr = new ParallelAtomicReader(false, + new AtomicReader[] { ir1, ir2 }, + new AtomicReader[] { ir1 }); Assert.AreEqual("v1", pr.Document(0).Get("f1")); Assert.AreEqual("v1", pr.Document(0).Get("f2")); Assert.IsNull(pr.Document(0).Get("f3")); @@ -223,7 +231,9 @@ public virtual void TestIgnoreStoredFields() pr.Dispose(); // no stored fields at all - pr = new ParallelAtomicReader(false, new AtomicReader[] { ir2 }, new AtomicReader[0]); + pr = new ParallelAtomicReader(false, + new AtomicReader[] { ir2 }, + new AtomicReader[0]); Assert.IsNull(pr.Document(0).Get("f1")); Assert.IsNull(pr.Document(0).Get("f2")); Assert.IsNull(pr.Document(0).Get("f3")); @@ -236,7 +246,9 @@ public virtual void TestIgnoreStoredFields() pr.Dispose(); // without overlapping - pr = new ParallelAtomicReader(true, new AtomicReader[] { ir2 }, new AtomicReader[] { ir1 }); + pr = new ParallelAtomicReader(true, + new AtomicReader[] { ir2 }, + new AtomicReader[] { ir1 }); Assert.AreEqual("v1", pr.Document(0).Get("f1")); Assert.AreEqual("v1", pr.Document(0).Get("f2")); Assert.IsNull(pr.Document(0).Get("f3")); @@ -251,7 +263,9 @@ public virtual void TestIgnoreStoredFields() // no main readers try { - new ParallelAtomicReader(true, new AtomicReader[0], new AtomicReader[] { ir1 }); + _ = new ParallelAtomicReader(true, + new AtomicReader[0], + new AtomicReader[] { ir1 }); Assert.Fail("didn't get expected exception: need a non-empty main-reader array"); } catch (Exception iae) when (iae.IsIllegalArgumentException()) @@ -308,7 +322,9 @@ private IndexSearcher Parallel(Random random) { dir1 = GetDir1(random); dir2 = GetDir2(random); - ParallelAtomicReader pr = new ParallelAtomicReader(SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1)), SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir2))); + ParallelAtomicReader pr = new ParallelAtomicReader( + SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir1)), + SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(dir2))); TestUtil.CheckReader(pr); return NewSearcher(pr); } @@ -345,4 +361,4 @@ private Directory GetDir2(Random random) return dir2; } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestParallelCompositeReader.cs b/src/Lucene.Net.Tests/Index/TestParallelCompositeReader.cs index e444039636..7f95fa7f03 100644 --- a/src/Lucene.Net.Tests/Index/TestParallelCompositeReader.cs +++ b/src/Lucene.Net.Tests/Index/TestParallelCompositeReader.cs @@ -102,7 +102,9 @@ public virtual void TestRefCounts1() Directory dir2 = GetDir2(Random); DirectoryReader ir1, ir2; // close subreaders, ParallelReader will not change refCounts, but close on its own close - ParallelCompositeReader pr = new ParallelCompositeReader(ir1 = DirectoryReader.Open(dir1), ir2 = DirectoryReader.Open(dir2)); + ParallelCompositeReader pr = new ParallelCompositeReader( + ir1 = DirectoryReader.Open(dir1), + ir2 = DirectoryReader.Open(dir2)); IndexReader psub1 = pr.GetSequentialSubReaders()[0]; // check RefCounts Assert.AreEqual(1, ir1.RefCount); @@ -152,7 +154,9 @@ public virtual void TestReaderClosedListener1() CompositeReader ir1 = DirectoryReader.Open(dir1); // with overlapping - ParallelCompositeReader pr = new ParallelCompositeReader(false, new CompositeReader[] { ir1 }, new CompositeReader[] { ir1 }); + ParallelCompositeReader pr = new ParallelCompositeReader(false, + new CompositeReader[] { ir1 }, + new CompositeReader[] { ir1 }); int[] listenerClosedCount = new int[1]; @@ -160,7 +164,7 @@ public virtual void TestReaderClosedListener1() foreach (AtomicReaderContext cxt in pr.Leaves) { - cxt.Reader.AddReaderDisposedListener(new ReaderClosedListenerAnonymousClass(this, listenerClosedCount)); + cxt.Reader.AddReaderDisposedListener(new CountingReaderClosedListenerAnonymousClass(listenerClosedCount)); } pr.Dispose(); ir1.Dispose(); @@ -168,15 +172,12 @@ public virtual void TestReaderClosedListener1() dir1.Dispose(); } - private sealed class ReaderClosedListenerAnonymousClass : IReaderDisposedListener + private sealed class CountingReaderClosedListenerAnonymousClass : IReaderDisposedListener { - private readonly TestParallelCompositeReader outerInstance; - private readonly int[] listenerClosedCount; - public ReaderClosedListenerAnonymousClass(TestParallelCompositeReader outerInstance, int[] listenerClosedCount) + public CountingReaderClosedListenerAnonymousClass(int[] listenerClosedCount) { - this.outerInstance = outerInstance; this.listenerClosedCount = listenerClosedCount; } @@ -194,7 +195,9 @@ public virtual void TestReaderClosedListener2() CompositeReader ir1 = DirectoryReader.Open(dir1); // with overlapping - ParallelCompositeReader pr = new ParallelCompositeReader(true, new CompositeReader[] { ir1 }, new CompositeReader[] { ir1 }); + ParallelCompositeReader pr = new ParallelCompositeReader(true, + new CompositeReader[] { ir1 }, + new CompositeReader[] { ir1 }); int[] listenerClosedCount = new int[1]; @@ -202,31 +205,13 @@ public virtual void TestReaderClosedListener2() foreach (AtomicReaderContext cxt in pr.Leaves) { - cxt.Reader.AddReaderDisposedListener(new ReaderClosedListenerAnonymousClass2(this, listenerClosedCount)); + cxt.Reader.AddReaderDisposedListener(new CountingReaderClosedListenerAnonymousClass(listenerClosedCount)); } pr.Dispose(); Assert.AreEqual(3, listenerClosedCount[0]); dir1.Dispose(); } - private sealed class ReaderClosedListenerAnonymousClass2 : IReaderDisposedListener - { - private readonly TestParallelCompositeReader outerInstance; - - private readonly int[] listenerClosedCount; - - public ReaderClosedListenerAnonymousClass2(TestParallelCompositeReader outerInstance, int[] listenerClosedCount) - { - this.outerInstance = outerInstance; - this.listenerClosedCount = listenerClosedCount; - } - - public void OnDispose(IndexReader reader) - { - listenerClosedCount[0]++; - } - } - [Test] public virtual void TestCloseInnerReader() { @@ -235,7 +220,9 @@ public virtual void TestCloseInnerReader() Assert.AreEqual(1, ir1.GetSequentialSubReaders()[0].RefCount); // with overlapping - ParallelCompositeReader pr = new ParallelCompositeReader(true, new CompositeReader[] { ir1 }, new CompositeReader[] { ir1 }); + ParallelCompositeReader pr = new ParallelCompositeReader(true, + new CompositeReader[] { ir1 }, + new CompositeReader[] { ir1 }); IndexReader psub = pr.GetSequentialSubReaders()[0]; Assert.AreEqual(1, psub.RefCount); @@ -287,7 +274,7 @@ public virtual void TestIncompatibleIndexes1() DirectoryReader ir1 = DirectoryReader.Open(dir1), ir2 = DirectoryReader.Open(dir2); try { - new ParallelCompositeReader(ir1, ir2); + _ = new ParallelCompositeReader(ir1, ir2); Assert.Fail("didn't get expected exception: indexes don't have same number of documents"); } catch (Exception e) when (e.IsIllegalArgumentException()) @@ -296,7 +283,7 @@ public virtual void TestIncompatibleIndexes1() } try { - new ParallelCompositeReader(Random.NextBoolean(), ir1, ir2); + _ = new ParallelCompositeReader(Random.NextBoolean(), ir1, ir2); Assert.Fail("didn't get expected exception: indexes don't have same number of documents"); } catch (Exception e) when (e.IsIllegalArgumentException()) @@ -323,7 +310,7 @@ public virtual void TestIncompatibleIndexes2() CompositeReader[] readers = new CompositeReader[] { ir1, ir2 }; try { - new ParallelCompositeReader(readers); + _ = new ParallelCompositeReader(readers); Assert.Fail("didn't get expected exception: indexes don't have same subreader structure"); } catch (Exception e) when (e.IsIllegalArgumentException()) @@ -332,7 +319,7 @@ public virtual void TestIncompatibleIndexes2() } try { - new ParallelCompositeReader(Random.NextBoolean(), readers, readers); + _ = new ParallelCompositeReader(Random.NextBoolean(), readers, readers); Assert.Fail("didn't get expected exception: indexes don't have same subreader structure"); } catch (Exception e) when (e.IsIllegalArgumentException()) @@ -359,7 +346,7 @@ public virtual void TestIncompatibleIndexes3() CompositeReader[] readers = new CompositeReader[] { ir1, ir2 }; try { - new ParallelCompositeReader(readers); + _ = new ParallelCompositeReader(readers); Assert.Fail("didn't get expected exception: indexes don't have same subreader structure"); } catch (Exception e) when (e.IsIllegalArgumentException()) @@ -368,7 +355,7 @@ public virtual void TestIncompatibleIndexes3() } try { - new ParallelCompositeReader(Random.NextBoolean(), readers, readers); + _ = new ParallelCompositeReader(Random.NextBoolean(), readers, readers); Assert.Fail("didn't get expected exception: indexes don't have same subreader structure"); } catch (Exception e) when (e.IsIllegalArgumentException()) @@ -394,7 +381,9 @@ public virtual void TestIgnoreStoredFields() CompositeReader ir2 = DirectoryReader.Open(dir2); // with overlapping - ParallelCompositeReader pr = new ParallelCompositeReader(false, new CompositeReader[] { ir1, ir2 }, new CompositeReader[] { ir1 }); + ParallelCompositeReader pr = new ParallelCompositeReader(false, + new CompositeReader[] { ir1, ir2 }, + new CompositeReader[] { ir1 }); Assert.AreEqual("v1", pr.Document(0).Get("f1")); Assert.AreEqual("v1", pr.Document(0).Get("f2")); Assert.IsNull(pr.Document(0).Get("f3")); @@ -408,7 +397,9 @@ public virtual void TestIgnoreStoredFields() pr.Dispose(); // no stored fields at all - pr = new ParallelCompositeReader(false, new CompositeReader[] { ir2 }, new CompositeReader[0]); + pr = new ParallelCompositeReader(false, + new CompositeReader[] { ir2 }, + new CompositeReader[0]); Assert.IsNull(pr.Document(0).Get("f1")); Assert.IsNull(pr.Document(0).Get("f2")); Assert.IsNull(pr.Document(0).Get("f3")); @@ -422,7 +413,9 @@ public virtual void TestIgnoreStoredFields() pr.Dispose(); // without overlapping - pr = new ParallelCompositeReader(true, new CompositeReader[] { ir2 }, new CompositeReader[] { ir1 }); + pr = new ParallelCompositeReader(true, + new CompositeReader[] { ir2 }, + new CompositeReader[] { ir1 }); Assert.AreEqual("v1", pr.Document(0).Get("f1")); Assert.AreEqual("v1", pr.Document(0).Get("f2")); Assert.IsNull(pr.Document(0).Get("f3")); @@ -438,7 +431,9 @@ public virtual void TestIgnoreStoredFields() // no main readers try { - new ParallelCompositeReader(true, new CompositeReader[0], new CompositeReader[] { ir1 }); + _ = new ParallelCompositeReader(true, + new CompositeReader[0], + new CompositeReader[] { ir1 }); Assert.Fail("didn't get expected exception: need a non-empty main-reader array"); } catch (Exception iae) when (iae.IsIllegalArgumentException()) @@ -567,7 +562,8 @@ private IndexSearcher Parallel(Random random, bool compositeComposite) private Directory GetDir1(Random random) { Directory dir1 = NewDirectory(); - IndexWriter w1 = new IndexWriter(dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES)); + IndexWriter w1 = new IndexWriter(dir1, NewIndexWriterConfig(TEST_VERSION_CURRENT, + new MockAnalyzer(random)).SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES)); Document d1 = new Document(); d1.Add(NewTextField("f1", "v1", Field.Store.YES)); d1.Add(NewTextField("f2", "v1", Field.Store.YES)); @@ -594,7 +590,8 @@ private Directory GetDir1(Random random) private Directory GetDir2(Random random) { Directory dir2 = NewDirectory(); - IndexWriter w2 = new IndexWriter(dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES)); + IndexWriter w2 = new IndexWriter(dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, + new MockAnalyzer(random)).SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES)); Document d1 = new Document(); d1.Add(NewTextField("f3", "v1", Field.Store.YES)); d1.Add(NewTextField("f4", "v1", Field.Store.YES)); @@ -621,7 +618,8 @@ private Directory GetDir2(Random random) private Directory GetInvalidStructuredDir2(Random random) { Directory dir2 = NewDirectory(); - IndexWriter w2 = new IndexWriter(dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES)); + IndexWriter w2 = new IndexWriter(dir2, NewIndexWriterConfig(TEST_VERSION_CURRENT, + new MockAnalyzer(random)).SetMergePolicy(NoMergePolicy.NO_COMPOUND_FILES)); Document d1 = new Document(); d1.Add(NewTextField("f3", "v1", Field.Store.YES)); d1.Add(NewTextField("f4", "v1", Field.Store.YES)); @@ -644,4 +642,4 @@ private Directory GetInvalidStructuredDir2(Random random) return dir2; } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestParallelReaderEmptyIndex.cs b/src/Lucene.Net.Tests/Index/TestParallelReaderEmptyIndex.cs index 9f10064b82..891b1aa691 100644 --- a/src/Lucene.Net.Tests/Index/TestParallelReaderEmptyIndex.cs +++ b/src/Lucene.Net.Tests/Index/TestParallelReaderEmptyIndex.cs @@ -54,7 +54,9 @@ public virtual void TestEmptyIndex() IndexWriter iwOut = new IndexWriter(rdOut, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))); - ParallelAtomicReader apr = new ParallelAtomicReader(SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(rd1)), SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(rd2))); + ParallelAtomicReader apr = new ParallelAtomicReader( + SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(rd1)), + SlowCompositeReaderWrapper.Wrap(DirectoryReader.Open(rd2))); // When unpatched, Lucene crashes here with a NoSuchElementException (caused by ParallelTermEnum) iwOut.AddIndexes(apr); @@ -64,7 +66,9 @@ public virtual void TestEmptyIndex() iwOut.AddIndexes(new ParallelAtomicReader()); iwOut.ForceMerge(1); - ParallelCompositeReader cpr = new ParallelCompositeReader(DirectoryReader.Open(rd1), DirectoryReader.Open(rd2)); + ParallelCompositeReader cpr = new ParallelCompositeReader( + DirectoryReader.Open(rd1), + DirectoryReader.Open(rd2)); // When unpatched, Lucene crashes here with a NoSuchElementException (caused by ParallelTermEnum) iwOut.AddIndexes(cpr); @@ -108,7 +112,7 @@ public virtual void TestEmptyIndexWithVectors() iw.AddDocument(doc); iw.Dispose(); - IndexWriterConfig dontMergeConfig = (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetMergePolicy(NoMergePolicy.COMPOUND_FILES); + IndexWriterConfig dontMergeConfig = new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(NoMergePolicy.COMPOUND_FILES); if (Verbose) { Console.WriteLine("\nTEST: make 2nd writer"); @@ -160,4 +164,4 @@ public virtual void TestEmptyIndexWithVectors() rdOut.Dispose(); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestPayloads.cs b/src/Lucene.Net.Tests/Index/TestPayloads.cs index edec401fbd..e35cc6aa5f 100644 --- a/src/Lucene.Net.Tests/Index/TestPayloads.cs +++ b/src/Lucene.Net.Tests/Index/TestPayloads.cs @@ -15,7 +15,6 @@ using System.Text; using JCG = J2N.Collections.Generic; using Assert = Lucene.Net.TestFramework.Assert; -using Console = Lucene.Net.Util.SystemConsole; namespace Lucene.Net.Index { @@ -85,9 +84,7 @@ public virtual void TestPayloadFieldBit() // enabled in only some documents d.Add(NewTextField("f3", "this field has payloads in some docs", Field.Store.NO)); // only add payload data for field f2 -#pragma warning disable 612, 618 analyzer.SetPayloadData("f2", "somedata".GetBytes(IOUtils.CHARSET_UTF_8), 0, 1); -#pragma warning restore 612, 618 writer.AddDocument(d); // flush writer.Dispose(); @@ -109,10 +106,8 @@ public virtual void TestPayloadFieldBit() d.Add(NewTextField("f2", "this field has payloads in all docs", Field.Store.NO)); d.Add(NewTextField("f3", "this field has payloads in some docs", Field.Store.NO)); // add payload data for field f2 and f3 -#pragma warning disable 612, 618 analyzer.SetPayloadData("f2", "somedata".GetBytes(IOUtils.CHARSET_UTF_8), 0, 1); analyzer.SetPayloadData("f3", "somedata".GetBytes(IOUtils.CHARSET_UTF_8), 0, 3); -#pragma warning restore 612, 618 writer.AddDocument(d); // force merge @@ -143,7 +138,9 @@ public virtual void TestPayloadsEncoding() private void PerformTest(Directory dir) { PayloadAnalyzer analyzer = new PayloadAnalyzer(); - IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetOpenMode(OpenMode.CREATE).SetMergePolicy(NewLogMergePolicy())); + IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer) + .SetOpenMode(OpenMode.CREATE) + .SetMergePolicy(NewLogMergePolicy())); // should be in sync with value in TermInfosWriter const int skipInterval = 16; @@ -203,7 +200,10 @@ private void PerformTest(Directory dir) var tps = new DocsAndPositionsEnum[numTerms]; for (int i = 0; i < numTerms; i++) { - tps[i] = MultiFields.GetTermPositionsEnum(reader, MultiFields.GetLiveDocs(reader), terms[i].Field, new BytesRef(terms[i].Text)); + tps[i] = MultiFields.GetTermPositionsEnum(reader, + MultiFields.GetLiveDocs(reader), + terms[i].Field, + new BytesRef(terms[i].Text)); } while (tps[0].NextDoc() != DocIdSetIterator.NO_MORE_DOCS) @@ -234,7 +234,10 @@ private void PerformTest(Directory dir) /* * test lazy skipping */ - DocsAndPositionsEnum tp = MultiFields.GetTermPositionsEnum(reader, MultiFields.GetLiveDocs(reader), terms[0].Field, new BytesRef(terms[0].Text)); + DocsAndPositionsEnum tp = MultiFields.GetTermPositionsEnum(reader, + MultiFields.GetLiveDocs(reader), + terms[0].Field, + new BytesRef(terms[0].Text)); tp.NextDoc(); tp.NextPosition(); // NOTE: prior rev of this test was failing to first @@ -258,7 +261,10 @@ private void PerformTest(Directory dir) /* * Test different lengths at skip points */ - tp = MultiFields.GetTermPositionsEnum(reader, MultiFields.GetLiveDocs(reader), terms[1].Field, new BytesRef(terms[1].Text)); + tp = MultiFields.GetTermPositionsEnum(reader, + MultiFields.GetLiveDocs(reader), + terms[1].Field, + new BytesRef(terms[1].Text)); tp.NextDoc(); tp.NextPosition(); Assert.AreEqual(1, tp.GetPayload().Length, "Wrong payload length."); @@ -304,9 +310,7 @@ private void PerformTest(Directory dir) reader.Dispose(); } -#pragma warning disable 612, 618 - internal static readonly Encoding utf8 = IOUtils.CHARSET_UTF_8; -#pragma warning restore 612, 618 + internal static readonly Encoding utf8 = Encoding.UTF8; private void GenerateRandomData(byte[] data) { @@ -403,7 +407,7 @@ protected internal override TokenStreamComponents CreateComponents(string fieldN { fieldToData.TryGetValue(fieldName, out PayloadData payload); Tokenizer ts = new MockTokenizer(reader, MockTokenizer.WHITESPACE, false); - TokenStream tokenStream = (payload != null) ? (TokenStream)new PayloadFilter(ts, payload.Data, payload.Offset, payload.Length) : ts; + TokenStream tokenStream = (payload != null) ? new PayloadFilter(ts, payload.Data, payload.Offset, payload.Length) : ts; return new TokenStreamComponents(ts, tokenStream); } @@ -561,8 +565,6 @@ public override void Run() private class PoolingPayloadTokenStream : TokenStream { - private readonly TestPayloads outerInstance; - private readonly byte[] payload; private bool first; private readonly ByteArrayPool pool; @@ -573,10 +575,9 @@ private class PoolingPayloadTokenStream : TokenStream internal PoolingPayloadTokenStream(TestPayloads outerInstance, ByteArrayPool pool) { - this.outerInstance = outerInstance; this.pool = pool; payload = pool.Get(); - this.outerInstance.GenerateRandomData(payload); + outerInstance.GenerateRandomData(payload); term = Encoding.UTF8.GetString(payload); first = true; payloadAtt = AddAttribute(); @@ -607,7 +608,7 @@ protected override void Dispose(bool disposing) private class ByteArrayPool { - internal readonly IList pool; + private readonly IList pool; internal ByteArrayPool(int capacity, int size) { @@ -758,4 +759,4 @@ public virtual void TestMixupMultiValued() dir.Dispose(); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestPerSegmentDeletes.cs b/src/Lucene.Net.Tests/Index/TestPerSegmentDeletes.cs index 214c6407ad..e1b080690c 100644 --- a/src/Lucene.Net.Tests/Index/TestPerSegmentDeletes.cs +++ b/src/Lucene.Net.Tests/Index/TestPerSegmentDeletes.cs @@ -1,13 +1,16 @@ using J2N.Collections.Generic.Extensions; using Lucene.Net.Index.Extensions; using NUnit.Framework; -using RandomizedTesting.Generators; using System; using System.Collections.Generic; using Assert = Lucene.Net.TestFramework.Assert; using Console = Lucene.Net.Util.SystemConsole; using JCG = J2N.Collections.Generic; +#if !FEATURE_RANDOM_NEXTINT64_NEXTSINGLE +using RandomizedTesting.Generators; +#endif + namespace Lucene.Net.Index { /* @@ -49,7 +52,7 @@ public virtual void TestDeletes1() iwc.SetMergeScheduler(new SerialMergeScheduler()); iwc.SetMaxBufferedDocs(5000); iwc.SetRAMBufferSizeMB(100); - RangeMergePolicy fsmp = new RangeMergePolicy(this, false); + RangeMergePolicy fsmp = new RangeMergePolicy(false); iwc.SetMergePolicy(fsmp); IndexWriter writer = new IndexWriter(dir, iwc); for (int x = 0; x < 5; x++) @@ -174,17 +177,17 @@ public virtual void TestDeletes1() dir.Dispose(); } - /// - /// static boolean hasPendingDeletes(SegmentInfos infos) { - /// for (SegmentInfo info : infos) { - /// if (info.deletes.Any()) { - /// return true; - /// } - /// } - /// return false; - /// } - /// - /// + // static boolean hasPendingDeletes(SegmentInfos infos) { + // for (SegmentInfo info : infos) { + // if (info.deletes.Any()) { + // return true; + // } + // } + // return false; + // } + // + + // ReSharper disable once UnusedMember.Global - used by commented-out code above, leaving for future reference internal virtual void Part2(IndexWriter writer, RangeMergePolicy fsmp) { for (int x = 20; x < 25; x++) @@ -221,6 +224,7 @@ internal virtual void Part2(IndexWriter writer, RangeMergePolicy fsmp) //System.out.println("segdels4:" + writer.docWriter.deletesToString()); } + // ReSharper disable once UnusedMember.Global - used by commented-out code above, leaving for future reference internal virtual bool SegThere(SegmentCommitInfo info, SegmentInfos infos) { foreach (SegmentCommitInfo si in infos.Segments) @@ -233,6 +237,7 @@ internal virtual bool SegThere(SegmentCommitInfo info, SegmentInfos infos) return false; } + // ReSharper disable once UnusedMember.Global - used by commented-out code above, leaving for future reference public static void PrintDelDocs(IBits bits) { if (bits is null) @@ -271,17 +276,14 @@ public static int[] ToArray(DocsEnum docsEnum) public class RangeMergePolicy : MergePolicy { - private readonly TestPerSegmentDeletes outerInstance; - internal bool doMerge = false; internal int start; internal int length; internal readonly bool useCompoundFile; - internal RangeMergePolicy(TestPerSegmentDeletes outerInstance, bool useCompoundFile) + internal RangeMergePolicy(bool useCompoundFile) { - this.outerInstance = outerInstance; this.useCompoundFile = useCompoundFile; } @@ -318,4 +320,4 @@ public override bool UseCompoundFile(SegmentInfos segments, SegmentCommitInfo ne } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestPersistentSnapshotDeletionPolicy.cs b/src/Lucene.Net.Tests/Index/TestPersistentSnapshotDeletionPolicy.cs index b9d19367f2..18539d0e77 100644 --- a/src/Lucene.Net.Tests/Index/TestPersistentSnapshotDeletionPolicy.cs +++ b/src/Lucene.Net.Tests/Index/TestPersistentSnapshotDeletionPolicy.cs @@ -45,13 +45,14 @@ public override void TearDown() private SnapshotDeletionPolicy GetDeletionPolicy(Directory dir) { - return new PersistentSnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy(), dir, OpenMode.CREATE); + return new PersistentSnapshotDeletionPolicy( + new KeepOnlyLastCommitDeletionPolicy(), dir, OpenMode.CREATE); } [Test] public virtual void TestExistingSnapshots() { - int numSnapshots = 3; + const int numSnapshots = 3; MockDirectoryWrapper dir = NewMockDirectory(); IndexWriter writer = new IndexWriter(dir, GetConfig(Random, GetDeletionPolicy(dir))); PersistentSnapshotDeletionPolicy psdp = (PersistentSnapshotDeletionPolicy)writer.Config.IndexDeletionPolicy; @@ -100,7 +101,8 @@ public virtual void TestExistingSnapshots() public virtual void TestNoSnapshotInfos() { Directory dir = NewDirectory(); - new PersistentSnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy(), dir, OpenMode.CREATE); + _ = new PersistentSnapshotDeletionPolicy( + new KeepOnlyLastCommitDeletionPolicy(), dir, OpenMode.CREATE); dir.Dispose(); } @@ -110,7 +112,8 @@ public virtual void TestMissingSnapshots() Directory dir = NewDirectory(); try { - new PersistentSnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy(), dir, OpenMode.APPEND); + _ = new PersistentSnapshotDeletionPolicy( + new KeepOnlyLastCommitDeletionPolicy(), dir, OpenMode.APPEND); Assert.Fail("did not hit expected exception"); } catch (Exception ise) when (ise.IsIllegalStateException()) @@ -124,8 +127,9 @@ public virtual void TestMissingSnapshots() public virtual void TestExceptionDuringSave() { MockDirectoryWrapper dir = NewMockDirectory(); - dir.FailOn(new FailureAnonymousClass(this, dir)); - IndexWriter writer = new IndexWriter(dir, GetConfig(Random, new PersistentSnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy(), dir, OpenMode.CREATE_OR_APPEND))); + dir.FailOn(new FailureAnonymousClass()); + IndexWriter writer = new IndexWriter(dir, GetConfig(Random, new PersistentSnapshotDeletionPolicy( + new KeepOnlyLastCommitDeletionPolicy(), dir, OpenMode.CREATE_OR_APPEND))); writer.AddDocument(new Document()); writer.Commit(); @@ -153,21 +157,11 @@ public virtual void TestExceptionDuringSave() private sealed class FailureAnonymousClass : Failure { - private readonly TestPersistentSnapshotDeletionPolicy outerInstance; - - private MockDirectoryWrapper dir; - - public FailureAnonymousClass(TestPersistentSnapshotDeletionPolicy outerInstance, MockDirectoryWrapper dir) - { - this.outerInstance = outerInstance; - this.dir = dir; - } - public override void Eval(MockDirectoryWrapper dir) { // LUCENENET specific: for these to work in release mode, we have added [MethodImpl(MethodImplOptions.NoInlining)] // to each possible target of the StackTraceHelper. If these change, so must the attribute on the target methods. - if (StackTraceHelper.DoesStackTraceContainMethod(typeof(PersistentSnapshotDeletionPolicy).Name, "Persist")) + if (StackTraceHelper.DoesStackTraceContainMethod(nameof(PersistentSnapshotDeletionPolicy), "Persist")) { throw new IOException("now fail on purpose"); } @@ -185,7 +179,8 @@ public virtual void TestSnapshotRelease() psdp.Release(snapshots[0]); - psdp = new PersistentSnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy(), dir, OpenMode.APPEND); + psdp = new PersistentSnapshotDeletionPolicy( + new KeepOnlyLastCommitDeletionPolicy(), dir, OpenMode.APPEND); Assert.AreEqual(0, psdp.SnapshotCount, "Should have no snapshots !"); dir.Dispose(); } @@ -201,7 +196,8 @@ public virtual void TestSnapshotReleaseByGeneration() psdp.Release(snapshots[0].Generation); - psdp = new PersistentSnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy(), dir, OpenMode.APPEND); + psdp = new PersistentSnapshotDeletionPolicy( + new KeepOnlyLastCommitDeletionPolicy(), dir, OpenMode.APPEND); Assert.AreEqual(0, psdp.SnapshotCount, "Should have no snapshots !"); dir.Dispose(); } @@ -256,4 +252,4 @@ public override void TestMissingCommits() #endregion } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs b/src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs index 9949241337..1536a16654 100644 --- a/src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs +++ b/src/Lucene.Net.Tests/Index/TestPostingsOffsets.cs @@ -7,7 +7,6 @@ using RandomizedTesting.Generators; using System; using System.Collections.Generic; -using System.Linq; using JCG = J2N.Collections.Generic; using Assert = Lucene.Net.TestFramework.Assert; @@ -82,7 +81,13 @@ public virtual void TestBasic() ft.StoreTermVectorPositions = Random.NextBoolean(); ft.StoreTermVectorOffsets = Random.NextBoolean(); } - Token[] tokens = new Token[] { MakeToken("a", 1, 0, 6), MakeToken("b", 1, 8, 9), MakeToken("a", 1, 9, 17), MakeToken("c", 1, 19, 50) }; + Token[] tokens = new Token[] + { + MakeToken("a", 1, 0, 6), + MakeToken("b", 1, 8, 9), + MakeToken("a", 1, 9, 17), + MakeToken("c", 1, 19, 50), + }; doc.Add(new Field("content", new CannedTokenStream(tokens), ft)); w.AddDocument(doc); @@ -138,7 +143,7 @@ public virtual void TestPayloads() public virtual void DoTestNumbers(bool withPayloads) { Directory dir = NewDirectory(); - Analyzer analyzer = withPayloads ? (Analyzer)new MockPayloadAnalyzer() : new MockAnalyzer(Random); + Analyzer analyzer = withPayloads ? new MockPayloadAnalyzer() : new MockAnalyzer(Random); iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer); iwc.SetMergePolicy(NewLogMergePolicy()); // will rely on docids a bit for skipping RandomIndexWriter w = new RandomIndexWriter(Random, dir, iwc); @@ -489,7 +494,12 @@ public virtual void TestBackwardsOffsets() { try { - CheckTokens(new Token[] { MakeToken("foo", 1, 0, 3), MakeToken("foo", 1, 4, 7), MakeToken("foo", 0, 3, 6) }); + CheckTokens(new Token[] + { + MakeToken("foo", 1, 0, 3), + MakeToken("foo", 1, 4, 7), + MakeToken("foo", 0, 3, 6) + }); Assert.Fail(); } catch (Exception expected) when (expected.IsIllegalArgumentException()) @@ -501,7 +511,12 @@ public virtual void TestBackwardsOffsets() [Test] public virtual void TestStackedTokens() { - CheckTokens(new Token[] { MakeToken("foo", 1, 0, 3), MakeToken("foo", 0, 0, 3), MakeToken("foo", 0, 0, 3) }); + CheckTokens(new Token[] + { + MakeToken("foo", 1, 0, 3), + MakeToken("foo", 0, 0, 3), + MakeToken("foo", 0, 0, 3) + }); } [Test] @@ -573,4 +588,4 @@ private Token MakeToken(string text, int posIncr, int startOffset, int endOffset return t; } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestReaderClosed.cs b/src/Lucene.Net.Tests/Index/TestReaderClosed.cs index dfd1bb1379..1f3aca361e 100644 --- a/src/Lucene.Net.Tests/Index/TestReaderClosed.cs +++ b/src/Lucene.Net.Tests/Index/TestReaderClosed.cs @@ -44,7 +44,9 @@ public override void SetUp() { base.SetUp(); dir = NewDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(Random, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.KEYWORD, false)).SetMaxBufferedDocs(TestUtil.NextInt32(Random, 50, 1000))); + RandomIndexWriter writer = new RandomIndexWriter(Random, dir, + NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.KEYWORD, false)) + .SetMaxBufferedDocs(TestUtil.NextInt32(Random, 50, 1000))); Document doc = new Document(); Field field = NewStringField("field", "", Field.Store.NO); @@ -117,4 +119,4 @@ public override void TearDown() base.TearDown(); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestRollingUpdates.cs b/src/Lucene.Net.Tests/Index/TestRollingUpdates.cs index 8ec8c04e81..d16d03b697 100644 --- a/src/Lucene.Net.Tests/Index/TestRollingUpdates.cs +++ b/src/Lucene.Net.Tests/Index/TestRollingUpdates.cs @@ -49,21 +49,21 @@ public virtual void TestRollingUpdates_Mem() LineFileDocs docs = new LineFileDocs(random, DefaultCodecSupportsDocValues); //provider.register(new MemoryCodec()); - if ((!"Lucene3x".Equals(Codec.Default.Name, StringComparison.Ordinal)) && LuceneTestCase.Random.NextBoolean()) + if ((!"Lucene3x".Equals(Codec.Default.Name, StringComparison.Ordinal)) && Random.NextBoolean()) { Codec.Default = - TestUtil.AlwaysPostingsFormat(new MemoryPostingsFormat(LuceneTestCase.Random.nextBoolean(), random.NextSingle())); + TestUtil.AlwaysPostingsFormat(new MemoryPostingsFormat(Random.nextBoolean(), random.NextSingle())); } - MockAnalyzer analyzer = new MockAnalyzer(LuceneTestCase.Random); - analyzer.MaxTokenLength = TestUtil.NextInt32(LuceneTestCase.Random, 1, IndexWriter.MAX_TERM_LENGTH); + MockAnalyzer analyzer = new MockAnalyzer(Random); + analyzer.MaxTokenLength = TestUtil.NextInt32(Random, 1, IndexWriter.MAX_TERM_LENGTH); IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer)); int SIZE = AtLeast(20); int id = 0; IndexReader r = null; IndexSearcher s = null; - int numUpdates = (int)(SIZE * (2 + (TestNightly ? 200 * LuceneTestCase.Random.NextDouble() : 5 * LuceneTestCase.Random.NextDouble()))); + int numUpdates = (int)(SIZE * (2 + (TestNightly ? 200 * Random.NextDouble() : 5 * Random.NextDouble()))); if (Verbose) { Console.WriteLine("TEST: numUpdates=" + numUpdates); @@ -72,7 +72,7 @@ public virtual void TestRollingUpdates_Mem() // TODO: sometimes update ids not in order... for (int docIter = 0; docIter < numUpdates; docIter++) { - Documents.Document doc = docs.NextDoc(); + Document doc = docs.NextDoc(); string myID = "" + id; if (id == SIZE - 1) { @@ -128,14 +128,14 @@ public virtual void TestRollingUpdates_Mem() w.AddDocument(doc); } - if (docIter >= SIZE && LuceneTestCase.Random.Next(50) == 17) + if (docIter >= SIZE && Random.Next(50) == 17) { if (r != null) { r.Dispose(); } - bool applyDeletions = LuceneTestCase.Random.NextBoolean(); + bool applyDeletions = Random.NextBoolean(); if (Verbose) { @@ -198,13 +198,14 @@ public virtual void TestUpdateSameDoc() LineFileDocs docs = new LineFileDocs(Random); for (int r = 0; r < 3; r++) { - IndexWriter w = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2)); + IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig( + TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2)); int numUpdates = AtLeast(20); int numThreads = TestUtil.NextInt32(Random, 2, 6); IndexingThread[] threads = new IndexingThread[numThreads]; for (int i = 0; i < numThreads; i++) { - threads[i] = new IndexingThread(docs, w, numUpdates, NewStringField); + threads[i] = new IndexingThread(docs, w, numUpdates); threads[i].Start(); } @@ -229,20 +230,11 @@ internal class IndexingThread : ThreadJob internal readonly IndexWriter writer; internal readonly int num; - private readonly Func newStringField; - - /// - /// LUCENENET specific - /// Passed in because - /// is no longer static. - /// - public IndexingThread(LineFileDocs docs, IndexWriter writer, int num, Func newStringField) - : base() + public IndexingThread(LineFileDocs docs, IndexWriter writer, int num) { this.docs = docs; this.writer = writer; this.num = num; - this.newStringField = newStringField; } public override void Run() @@ -252,8 +244,8 @@ public override void Run() DirectoryReader open = null; for (int i = 0; i < num; i++) { - Documents.Document doc = new Documents.Document(); // docs.NextDoc(); - doc.Add(newStringField("id", "test", Field.Store.NO)); + Document doc = new Document(); // docs.NextDoc(); + doc.Add(NewStringField("id", "test", Field.Store.NO)); writer.UpdateDocument(new Term("id", "test"), doc); if (Random.Next(3) == 0) { @@ -282,4 +274,4 @@ public override void Run() } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestSameTokenSamePosition.cs b/src/Lucene.Net.Tests/Index/TestSameTokenSamePosition.cs index ed08cbed4b..bcefea9011 100644 --- a/src/Lucene.Net.Tests/Index/TestSameTokenSamePosition.cs +++ b/src/Lucene.Net.Tests/Index/TestSameTokenSamePosition.cs @@ -70,7 +70,7 @@ internal sealed class BugReproTokenStream : TokenStream private readonly ICharTermAttribute termAtt; private readonly IOffsetAttribute offsetAtt; private readonly IPositionIncrementAttribute posIncAtt; - private readonly int tokenCount = 4; + private const int tokenCount = 4; private int nextTokenIndex = 0; private readonly string[] terms = new string[] { "six", "six", "drunken", "drunken" }; private readonly int[] starts = new int[] { 0, 0, 4, 4 }; @@ -106,4 +106,4 @@ public override void Reset() this.nextTokenIndex = 0; } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestSegmentMerger.cs b/src/Lucene.Net.Tests/Index/TestSegmentMerger.cs index c439c42972..6f03af4566 100644 --- a/src/Lucene.Net.Tests/Index/TestSegmentMerger.cs +++ b/src/Lucene.Net.Tests/Index/TestSegmentMerger.cs @@ -38,18 +38,15 @@ public class TestSegmentMerger : LuceneTestCase { //The variables for the new merged segment private Directory mergedDir; - private string mergedSegment = "test"; //First segment to be merged private Directory merge1Dir; - private Document doc1; private SegmentReader reader1; //Second Segment to be merged private Directory merge2Dir; - private Document doc2; private SegmentReader reader2; @@ -113,7 +110,12 @@ public virtual void TestMerge() Assert.IsTrue(newDoc2 != null); Assert.IsTrue(DocHelper.NumFields(newDoc2) == DocHelper.NumFields(doc2) - DocHelper.Unstored.Count); - DocsEnum termDocs = TestUtil.Docs(Random, mergedReader, DocHelper.TEXT_FIELD_2_KEY, new BytesRef("field"), MultiFields.GetLiveDocs(mergedReader), null, 0); + DocsEnum termDocs = TestUtil.Docs(Random, mergedReader, + DocHelper.TEXT_FIELD_2_KEY, + new BytesRef("field"), + MultiFields.GetLiveDocs(mergedReader), + null, + 0); Assert.IsTrue(termDocs != null); Assert.IsTrue(termDocs.NextDoc() != DocIdSetIterator.NO_MORE_DOCS); @@ -149,21 +151,22 @@ public virtual void TestMerge() mergedReader.Dispose(); } - private static bool Equals(MergeState.DocMap map1, MergeState.DocMap map2) - { - if (map1.MaxDoc != map2.MaxDoc) - { - return false; - } - for (int i = 0; i < map1.MaxDoc; ++i) - { - if (map1.Get(i) != map2.Get(i)) - { - return false; - } - } - return true; - } + // LUCENENET: commenting out as this method is unused, even in the upstream Java code + // private static bool Equals(MergeState.DocMap map1, MergeState.DocMap map2) + // { + // if (map1.MaxDoc != map2.MaxDoc) + // { + // return false; + // } + // for (int i = 0; i < map1.MaxDoc; ++i) + // { + // if (map1.Get(i) != map2.Get(i)) + // { + // return false; + // } + // } + // return true; + // } [Test] public virtual void TestBuildDocMap() @@ -205,4 +208,4 @@ public virtual void TestBuildDocMap() } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestSegmentReader.cs b/src/Lucene.Net.Tests/Index/TestSegmentReader.cs index bf067995a2..6b2a44f6f6 100644 --- a/src/Lucene.Net.Tests/Index/TestSegmentReader.cs +++ b/src/Lucene.Net.Tests/Index/TestSegmentReader.cs @@ -153,14 +153,27 @@ public virtual void TestTerms() } } - DocsEnum termDocs = TestUtil.Docs(Random, reader, DocHelper.TEXT_FIELD_1_KEY, new BytesRef("field"), MultiFields.GetLiveDocs(reader), null, 0); + DocsEnum termDocs = TestUtil.Docs(Random, reader, + DocHelper.TEXT_FIELD_1_KEY, + new BytesRef("field"), + MultiFields.GetLiveDocs(reader), + null, + 0); Assert.IsTrue(termDocs.NextDoc() != DocIdSetIterator.NO_MORE_DOCS); - termDocs = TestUtil.Docs(Random, reader, DocHelper.NO_NORMS_KEY, new BytesRef(DocHelper.NO_NORMS_TEXT), MultiFields.GetLiveDocs(reader), null, 0); + termDocs = TestUtil.Docs(Random, reader, + DocHelper.NO_NORMS_KEY, + new BytesRef(DocHelper.NO_NORMS_TEXT), + MultiFields.GetLiveDocs(reader), + null, + 0); Assert.IsTrue(termDocs.NextDoc() != DocIdSetIterator.NO_MORE_DOCS); - DocsAndPositionsEnum positions = MultiFields.GetTermPositionsEnum(reader, MultiFields.GetLiveDocs(reader), DocHelper.TEXT_FIELD_1_KEY, new BytesRef("field")); + DocsAndPositionsEnum positions = MultiFields.GetTermPositionsEnum(reader, + MultiFields.GetLiveDocs(reader), + DocHelper.TEXT_FIELD_1_KEY, + new BytesRef("field")); // NOTE: prior rev of this test was failing to first // call next here: Assert.IsTrue(positions.NextDoc() != DocIdSetIterator.NO_MORE_DOCS); @@ -267,4 +280,4 @@ public virtual void TestOutOfBoundsAccess() } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestSegmentTermDocs.cs b/src/Lucene.Net.Tests/Index/TestSegmentTermDocs.cs index e613616e72..cb7cf2ec84 100644 --- a/src/Lucene.Net.Tests/Index/TestSegmentTermDocs.cs +++ b/src/Lucene.Net.Tests/Index/TestSegmentTermDocs.cs @@ -34,7 +34,7 @@ namespace Lucene.Net.Index [TestFixture] public class TestSegmentTermDocs : LuceneTestCase { - private Document testDoc; + private Document testDoc; // LUCENENET: = new Document(); moved to SetUp private Directory dir; private SegmentCommitInfo info; @@ -99,7 +99,12 @@ public virtual void TestBadSeek(int indexDivisor) //After adding the document, we should be able to read it back in SegmentReader reader = new SegmentReader(info, indexDivisor, NewIOContext(Random)); Assert.IsTrue(reader != null); - DocsEnum termDocs = TestUtil.Docs(Random, reader, "textField2", new BytesRef("bad"), reader.LiveDocs, null, 0); + DocsEnum termDocs = TestUtil.Docs(Random, reader, + "textField2", + new BytesRef("bad"), + reader.LiveDocs, + null, + 0); Assert.IsNull(termDocs); reader.Dispose(); @@ -108,7 +113,12 @@ public virtual void TestBadSeek(int indexDivisor) //After adding the document, we should be able to read it back in SegmentReader reader = new SegmentReader(info, indexDivisor, NewIOContext(Random)); Assert.IsTrue(reader != null); - DocsEnum termDocs = TestUtil.Docs(Random, reader, "junk", new BytesRef("bad"), reader.LiveDocs, null, 0); + DocsEnum termDocs = TestUtil.Docs(Random, reader, + "junk", + new BytesRef("bad"), + reader.LiveDocs, + null, + 0); Assert.IsNull(termDocs); reader.Dispose(); } @@ -149,7 +159,12 @@ public virtual void TestSkipTo(int indexDivisor) IndexReader reader = DirectoryReader.Open(dir, indexDivisor); - DocsEnum tdocs = TestUtil.Docs(Random, reader, ta.Field, new BytesRef(ta.Text), MultiFields.GetLiveDocs(reader), null, DocsFlags.FREQS); + DocsEnum tdocs = TestUtil.Docs(Random, reader, + ta.Field, + new BytesRef(ta.Text), + MultiFields.GetLiveDocs(reader), + null, + DocsFlags.FREQS); // without optimization (assumption skipInterval == 16) @@ -169,7 +184,12 @@ public virtual void TestSkipTo(int indexDivisor) Assert.IsFalse(tdocs.Advance(10) != DocIdSetIterator.NO_MORE_DOCS); // without next - tdocs = TestUtil.Docs(Random, reader, ta.Field, new BytesRef(ta.Text), MultiFields.GetLiveDocs(reader), null, 0); + tdocs = TestUtil.Docs(Random, reader, + ta.Field, + new BytesRef(ta.Text), + MultiFields.GetLiveDocs(reader), + null, + 0); Assert.IsTrue(tdocs.Advance(0) != DocIdSetIterator.NO_MORE_DOCS); Assert.AreEqual(0, tdocs.DocID); @@ -182,7 +202,12 @@ public virtual void TestSkipTo(int indexDivisor) // exactly skipInterval documents and therefore with optimization // with next - tdocs = TestUtil.Docs(Random, reader, tb.Field, new BytesRef(tb.Text), MultiFields.GetLiveDocs(reader), null, DocsFlags.FREQS); + tdocs = TestUtil.Docs(Random, reader, + tb.Field, + new BytesRef(tb.Text), + MultiFields.GetLiveDocs(reader), + null, + DocsFlags.FREQS); Assert.IsTrue(tdocs.NextDoc() != DocIdSetIterator.NO_MORE_DOCS); Assert.AreEqual(10, tdocs.DocID); @@ -201,7 +226,12 @@ public virtual void TestSkipTo(int indexDivisor) Assert.IsFalse(tdocs.Advance(26) != DocIdSetIterator.NO_MORE_DOCS); // without next - tdocs = TestUtil.Docs(Random, reader, tb.Field, new BytesRef(tb.Text), MultiFields.GetLiveDocs(reader), null, DocsFlags.FREQS); + tdocs = TestUtil.Docs(Random, reader, + tb.Field, + new BytesRef(tb.Text), + MultiFields.GetLiveDocs(reader), + null, + DocsFlags.FREQS); Assert.IsTrue(tdocs.Advance(5) != DocIdSetIterator.NO_MORE_DOCS); Assert.AreEqual(10, tdocs.DocID); @@ -216,7 +246,12 @@ public virtual void TestSkipTo(int indexDivisor) // much more than skipInterval documents and therefore with optimization // with next - tdocs = TestUtil.Docs(Random, reader, tc.Field, new BytesRef(tc.Text), MultiFields.GetLiveDocs(reader), null, DocsFlags.FREQS); + tdocs = TestUtil.Docs(Random, reader, + tc.Field, + new BytesRef(tc.Text), + MultiFields.GetLiveDocs(reader), + null, + DocsFlags.FREQS); Assert.IsTrue(tdocs.NextDoc() != DocIdSetIterator.NO_MORE_DOCS); Assert.AreEqual(26, tdocs.DocID); @@ -237,7 +272,12 @@ public virtual void TestSkipTo(int indexDivisor) Assert.IsFalse(tdocs.Advance(76) != DocIdSetIterator.NO_MORE_DOCS); //without next - tdocs = TestUtil.Docs(Random, reader, tc.Field, new BytesRef(tc.Text), MultiFields.GetLiveDocs(reader), null, 0); + tdocs = TestUtil.Docs(Random, reader, + tc.Field, + new BytesRef(tc.Text), + MultiFields.GetLiveDocs(reader), + null, + 0); Assert.IsTrue(tdocs.Advance(5) != DocIdSetIterator.NO_MORE_DOCS); Assert.AreEqual(26, tdocs.DocID); Assert.IsTrue(tdocs.Advance(40) != DocIdSetIterator.NO_MORE_DOCS); @@ -272,4 +312,4 @@ private void AddDoc(IndexWriter writer, string value) writer.AddDocument(doc); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestSizeBoundedForceMerge.cs b/src/Lucene.Net.Tests/Index/TestSizeBoundedForceMerge.cs index fc7a25dc1d..a853f5fcb6 100644 --- a/src/Lucene.Net.Tests/Index/TestSizeBoundedForceMerge.cs +++ b/src/Lucene.Net.Tests/Index/TestSizeBoundedForceMerge.cs @@ -51,7 +51,7 @@ private void AddDocs(IndexWriter writer, int numDocs, bool withID) writer.Commit(); } - private IndexWriterConfig NewWriterConfig() + private static IndexWriterConfig NewWriterConfig() { IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, null); conf.SetMaxBufferedDocs(IndexWriterConfig.DISABLE_AUTO_FLUSH); @@ -401,4 +401,4 @@ public virtual void TestSingleMergeableTooLargeSegment() Assert.IsTrue(sis[0].HasDeletions); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestSnapshotDeletionPolicy.cs b/src/Lucene.Net.Tests/Index/TestSnapshotDeletionPolicy.cs index f0bdbffb99..2f94be2a55 100644 --- a/src/Lucene.Net.Tests/Index/TestSnapshotDeletionPolicy.cs +++ b/src/Lucene.Net.Tests/Index/TestSnapshotDeletionPolicy.cs @@ -1,7 +1,6 @@ using J2N.Threading; using Lucene.Net.Documents; using Lucene.Net.Index.Extensions; -using Lucene.Net.Support.Threading; using NUnit.Framework; using System; using System.Collections.Generic; @@ -44,9 +43,9 @@ namespace Lucene.Net.Index [TestFixture] public class TestSnapshotDeletionPolicy : LuceneTestCase { - public const string INDEX_PATH = "test.snapshots"; + // public const string INDEX_PATH = "test.snapshots"; // LUCENENET: unused constant - protected internal virtual IndexWriterConfig GetConfig(Random random, IndexDeletionPolicy dp) + protected IndexWriterConfig GetConfig(Random random, IndexDeletionPolicy dp) { IndexWriterConfig conf = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)); if (dp != null) @@ -56,13 +55,13 @@ protected internal virtual IndexWriterConfig GetConfig(Random random, IndexDelet return conf; } - protected internal virtual void CheckSnapshotExists(Directory dir, IndexCommit c) + protected void CheckSnapshotExists(Directory dir, IndexCommit c) { string segFileName = c.SegmentsFileName; Assert.IsTrue(SlowFileExists(dir, segFileName), "segments file not found in directory: " + segFileName); } - protected internal virtual void CheckMaxDoc(IndexCommit commit, int expectedMaxDoc) + protected void CheckMaxDoc(IndexCommit commit, int expectedMaxDoc) { IndexReader reader = DirectoryReader.Open(commit); try @@ -75,7 +74,9 @@ protected internal virtual void CheckMaxDoc(IndexCommit commit, int expectedMaxD } } - protected internal virtual void PrepareIndexAndSnapshots(SnapshotDeletionPolicy sdp, IndexWriter writer, int numSnapshots) + protected IList snapshots; // LUCENENET: = new JCG.List(); moved to SetUp + + protected void PrepareIndexAndSnapshots(SnapshotDeletionPolicy sdp, IndexWriter writer, int numSnapshots) { for (int i = 0; i < numSnapshots; i++) { @@ -86,9 +87,9 @@ protected internal virtual void PrepareIndexAndSnapshots(SnapshotDeletionPolicy } } - protected internal virtual SnapshotDeletionPolicy DeletionPolicy => new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy()); + protected SnapshotDeletionPolicy DeletionPolicy => new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy()); - protected internal virtual void AssertSnapshotExists(Directory dir, SnapshotDeletionPolicy sdp, int numSnapshots, bool checkIndexCommitSame) + protected void AssertSnapshotExists(Directory dir, SnapshotDeletionPolicy sdp, int numSnapshots, bool checkIndexCommitSame) { for (int i = 0; i < numSnapshots; i++) { @@ -106,8 +107,6 @@ protected internal virtual void AssertSnapshotExists(Directory dir, SnapshotDele } } - protected internal IList snapshots; - [SetUp] public override void SetUp() { @@ -130,7 +129,9 @@ private void RunTest(Random random, Directory dir) long stopTime = (J2N.Time.NanoTime() / J2N.Time.MillisecondsPerNanosecond) + 1000; // LUCENENET: Use NanoTime() rather than CurrentTimeMilliseconds() for more accurate/reliable results SnapshotDeletionPolicy dp = DeletionPolicy; - IndexWriter writer = new IndexWriter(dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetIndexDeletionPolicy(dp).SetMaxBufferedDocs(2)); + IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random)) + .SetIndexDeletionPolicy(dp) + .SetMaxBufferedDocs(2)); // Verify we catch misuse: try @@ -145,7 +146,7 @@ private void RunTest(Random random, Directory dir) dp = (SnapshotDeletionPolicy)writer.Config.IndexDeletionPolicy; writer.Commit(); - ThreadJob t = new ThreadAnonymousClass(stopTime, writer, NewField); + ThreadJob t = new ThreadAnonymousClass(stopTime, writer); t.Start(); @@ -180,18 +181,11 @@ private sealed class ThreadAnonymousClass : ThreadJob { private readonly long stopTime; private readonly IndexWriter writer; - private readonly Func newFieldFunc; - - /// - /// LUCENENET specific - /// Passed in because - /// is no longer static. - /// - public ThreadAnonymousClass(long stopTime, IndexWriter writer, Func newFieldFunc) + + public ThreadAnonymousClass(long stopTime, IndexWriter writer) { this.stopTime = stopTime; this.writer = writer; - this.newFieldFunc = newFieldFunc; } public override void Run() @@ -201,7 +195,7 @@ public override void Run() customType.StoreTermVectors = true; customType.StoreTermVectorPositions = true; customType.StoreTermVectorOffsets = true; - doc.Add(newFieldFunc("content", "aaa", customType)); + doc.Add(NewField("content", "aaa", customType)); do { for (int i = 0; i < 27; i++) @@ -269,7 +263,7 @@ private void CopyFiles(Directory dir, IndexCommit cp) // we take to do the backup, the IndexWriter will // never delete the files in the snapshot: ICollection files = cp.FileNames; - foreach (String fileName in files) + foreach (string fileName in files) { // NOTE: in a real backup you would not use // readFile; you would need to use something else @@ -320,7 +314,7 @@ private void ReadFile(Directory dir, string name) [Test] public virtual void TestBasicSnapshots() { - int numSnapshots = 3; + const int numSnapshots = 3; // Create 3 snapshots: snapshot0, snapshot1, snapshot2 Directory dir = NewDirectory(); @@ -357,7 +351,7 @@ public virtual void TestMultiThreadedSnapshotting() for (int i = 0; i < threads.Length; i++) { int finalI = i; - threads[i] = new ThreadAnonymousClass2(this, writer, sdp, snapshots, finalI); + threads[i] = new ThreadAnonymousClass2(writer, sdp, snapshots, finalI); threads[i].Name = "t" + i; } @@ -387,16 +381,13 @@ public virtual void TestMultiThreadedSnapshotting() private sealed class ThreadAnonymousClass2 : ThreadJob { - private readonly TestSnapshotDeletionPolicy outerInstance; - private readonly IndexWriter writer; private readonly SnapshotDeletionPolicy sdp; private readonly IndexCommit[] snapshots; private readonly int finalI; - public ThreadAnonymousClass2(TestSnapshotDeletionPolicy outerInstance, IndexWriter writer, SnapshotDeletionPolicy sdp, IndexCommit[] snapshots, int finalI) + public ThreadAnonymousClass2(IndexWriter writer, SnapshotDeletionPolicy sdp, IndexCommit[] snapshots, int finalI) { - this.outerInstance = outerInstance; this.writer = writer; this.sdp = sdp; this.snapshots = snapshots; @@ -421,7 +412,7 @@ public override void Run() [Test] public virtual void TestRollbackToOldSnapshot() { - int numSnapshots = 2; + const int numSnapshots = 2; Directory dir = NewDirectory(); SnapshotDeletionPolicy sdp = DeletionPolicy; @@ -511,10 +502,10 @@ public virtual void TestMissingCommits() // open a new writer w/ KeepOnlyLastCommit policy, so it will delete "s1" // commit. - (new IndexWriter(dir, GetConfig(Random, null))).Dispose(); + new IndexWriter(dir, GetConfig(Random, null)).Dispose(); Assert.IsFalse(SlowFileExists(dir, s1.SegmentsFileName), "snapshotted commit should not exist"); dir.Dispose(); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestStoredFieldsFormat.cs b/src/Lucene.Net.Tests/Index/TestStoredFieldsFormat.cs index 6e88e45b4e..a7a980d203 100644 --- a/src/Lucene.Net.Tests/Index/TestStoredFieldsFormat.cs +++ b/src/Lucene.Net.Tests/Index/TestStoredFieldsFormat.cs @@ -20,7 +20,9 @@ namespace Lucene.Net.Index */ using Codec = Lucene.Net.Codecs.Codec; +#pragma warning disable 612, 618 using Lucene3xCodec = Lucene.Net.Codecs.Lucene3x.Lucene3xCodec; +#pragma warning restore 612, 618 /// /// Tests with the default randomized codec. Not really redundant with @@ -47,4 +49,4 @@ public override void TestWriteReadMerge() base.TestWriteReadMerge(); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestStressAdvance.cs b/src/Lucene.Net.Tests/Index/TestStressAdvance.cs index c61e0ded99..1b10714801 100644 --- a/src/Lucene.Net.Tests/Index/TestStressAdvance.cs +++ b/src/Lucene.Net.Tests/Index/TestStressAdvance.cs @@ -44,7 +44,7 @@ public virtual void TestStressAdvance_Mem() Directory dir = NewDirectory(); RandomIndexWriter w = new RandomIndexWriter(Random, dir); ISet aDocs = new JCG.HashSet(); - Documents.Document doc = new Documents.Document(); + Document doc = new Document(); Field f = NewStringField("field", "", Field.Store.NO); doc.Add(f); Field idField = NewStringField("id", "", Field.Store.YES); @@ -166,9 +166,9 @@ private void TestOne(DocsEnum docs, IList expected) Console.WriteLine(" expect docID=" + expected[upto] + " actual=" + docID); } Assert.IsTrue(docID != DocIdSetIterator.NO_MORE_DOCS); - Assert.AreEqual((int)expected[upto], docID); + Assert.AreEqual(expected[upto], docID); } } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestStressIndexing.cs b/src/Lucene.Net.Tests/Index/TestStressIndexing.cs index effd7c9ae3..253060403a 100644 --- a/src/Lucene.Net.Tests/Index/TestStressIndexing.cs +++ b/src/Lucene.Net.Tests/Index/TestStressIndexing.cs @@ -38,8 +38,8 @@ private abstract class TimedThread : ThreadJob { internal volatile bool failed; internal int count; - internal static int RUN_TIME_MSEC = AtLeast(1000); - internal TimedThread[] allThreads; + private static int RUN_TIME_MSEC = AtLeast(1000); + private TimedThread[] allThreads; public abstract void DoWork(); @@ -89,30 +89,13 @@ internal virtual bool AnyErrors() private class IndexerThread : TimedThread { - private readonly Func newStringFieldFunc; - private readonly Func newTextFieldFunc; - internal IndexWriter writer; internal int nextID; - /// - /// LUCENENET specific - /// Passed in because - /// is no longer static. - /// - /// - /// LUCENENET specific - /// Passed in because - /// is no longer static. - /// - public IndexerThread(IndexWriter writer, TimedThread[] threads, - Func newStringField, - Func newTextField) + public IndexerThread(IndexWriter writer, TimedThread[] threads) : base(threads) { this.writer = writer; - newStringFieldFunc = newStringField; - newTextFieldFunc = newTextField; } public override void DoWork() @@ -120,10 +103,10 @@ public override void DoWork() // Add 10 docs: for (int j = 0; j < 10; j++) { - Documents.Document d = new Documents.Document(); + Document d = new Document(); int n = Random.Next(); - d.Add(newStringFieldFunc("id", Convert.ToString(nextID++), Field.Store.YES)); - d.Add(newTextFieldFunc("contents", English.Int32ToEnglish(n), Field.Store.NO)); + d.Add(NewStringField("id", Convert.ToString(nextID++), Field.Store.YES)); + d.Add(NewTextField("contents", English.Int32ToEnglish(n), Field.Store.NO)); writer.AddDocument(d); } @@ -139,18 +122,11 @@ public override void DoWork() private class SearcherThread : TimedThread { - internal Directory directory; - private readonly LuceneTestCase outerInstance; - - /// - /// LUCENENET specific - /// Passed in because - /// is no longer static. - /// - public SearcherThread(Directory directory, TimedThread[] threads, LuceneTestCase outerInstance) + private Directory directory; + + public SearcherThread(Directory directory, TimedThread[] threads) : base(threads) { - this.outerInstance = outerInstance; this.directory = directory; } @@ -159,7 +135,7 @@ public override void DoWork() for (int i = 0; i < 100; i++) { IndexReader ir = DirectoryReader.Open(directory); - IndexSearcher @is = NewSearcher(ir); + IndexSearcher _ = NewSearcher(ir); // LUCENENET: discarding unused `is` variable ir.Dispose(); } count += 100; @@ -181,21 +157,21 @@ public virtual void RunStressTest(Directory directory, IConcurrentMergeScheduler // One modifier that writes 10 docs then removes 5, over // and over: - IndexerThread indexerThread = new IndexerThread(modifier, threads, NewStringField, NewTextField); + IndexerThread indexerThread = new IndexerThread(modifier, threads); threads[numThread++] = indexerThread; indexerThread.Start(); - IndexerThread indexerThread2 = new IndexerThread(modifier, threads, NewStringField, NewTextField); + IndexerThread indexerThread2 = new IndexerThread(modifier, threads); threads[numThread++] = indexerThread2; indexerThread2.Start(); // Two searchers that constantly just re-instantiate the // searcher: - SearcherThread searcherThread1 = new SearcherThread(directory, threads, this); + SearcherThread searcherThread1 = new SearcherThread(directory, threads); threads[numThread++] = searcherThread1; searcherThread1.Start(); - SearcherThread searcherThread2 = new SearcherThread(directory, threads, this); + SearcherThread searcherThread2 = new SearcherThread(directory, threads); threads[numThread++] = searcherThread2; searcherThread2.Start(); @@ -236,4 +212,4 @@ public virtual void TestStressIndexAndSearching() directory.Dispose(); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestStressIndexing2.cs b/src/Lucene.Net.Tests/Index/TestStressIndexing2.cs index 89c7d718bf..8944ae2bbc 100644 --- a/src/Lucene.Net.Tests/Index/TestStressIndexing2.cs +++ b/src/Lucene.Net.Tests/Index/TestStressIndexing2.cs @@ -59,13 +59,6 @@ public class TestStressIndexing2 : LuceneTestCase public sealed class YieldTestPoint : ITestPoint { - private readonly TestStressIndexing2 outerInstance; - - public YieldTestPoint(TestStressIndexing2 outerInstance) - { - this.outerInstance = outerInstance; - } - public void Apply(string name) { // if (name.equals("startCommit")) { @@ -156,10 +149,11 @@ public virtual void TestMultiConfig() } } - internal static Term idTerm = new Term("id", ""); + // internal static Term idTerm = new Term("id", ""); // LUCENENET: unused internal IndexingThread[] threads; - internal static IComparer fieldNameComparer = Comparer.Create((o1, o2) => o1.Name.CompareToOrdinal(o2.Name)); + internal static IComparer fieldNameComparer = Comparer.Create((o1, o2) => + o1.Name.CompareToOrdinal(o2.Name)); // this test avoids using any extra synchronization in the multiple // indexing threads to test that IndexWriter does correctly synchronize @@ -174,7 +168,11 @@ public class DocsAndWriter public virtual DocsAndWriter IndexRandomIWReader(int nThreads, int iterations, int range, Directory dir) { IDictionary docs = new Dictionary(); - IndexWriter w = RandomIndexWriter.MockIndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetRAMBufferSizeMB(0.1).SetMaxBufferedDocs(maxBufferedDocs).SetMergePolicy(NewLogMergePolicy()), new YieldTestPoint(this)); + IndexWriter w = RandomIndexWriter.MockIndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetOpenMode(OpenMode.CREATE) + .SetRAMBufferSizeMB(0.1) + .SetMaxBufferedDocs(maxBufferedDocs) + .SetMergePolicy(NewLogMergePolicy()), new YieldTestPoint()); w.Commit(); LogMergePolicy lmp = (LogMergePolicy)w.Config.MergePolicy; lmp.NoCFSRatio = 0.0; @@ -189,7 +187,7 @@ public virtual DocsAndWriter IndexRandomIWReader(int nThreads, int iterations, i threads = new IndexingThread[nThreads]; for (int i = 0; i < threads.Length; i++) { - IndexingThread th = new IndexingThread(this); + IndexingThread th = new IndexingThread(); th.w = w; th.@base = 1000000 * i; th.range = range; @@ -212,7 +210,7 @@ public virtual DocsAndWriter IndexRandomIWReader(int nThreads, int iterations, i for (int i = 0; i < threads.Length; i++) { IndexingThread th = threads[i]; - UninterruptableMonitor.Enter(th); + UninterruptableMonitor.Enter(th); // LUCENENET: using UninterruptableMonitor instead of lock/synchronized, see docs for type try { docs.PutAll(th.docs); @@ -233,7 +231,13 @@ public virtual DocsAndWriter IndexRandomIWReader(int nThreads, int iterations, i public virtual IDictionary IndexRandom(int nThreads, int iterations, int range, Directory dir, int maxThreadStates, bool doReaderPooling) { IDictionary docs = new Dictionary(); - IndexWriter w = RandomIndexWriter.MockIndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetOpenMode(OpenMode.CREATE).SetRAMBufferSizeMB(0.1).SetMaxBufferedDocs(maxBufferedDocs).SetIndexerThreadPool(new DocumentsWriterPerThreadPool(maxThreadStates)).SetReaderPooling(doReaderPooling).SetMergePolicy(NewLogMergePolicy()), new YieldTestPoint(this)); + IndexWriter w = RandomIndexWriter.MockIndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetOpenMode(OpenMode.CREATE) + .SetRAMBufferSizeMB(0.1) + .SetMaxBufferedDocs(maxBufferedDocs) + .SetIndexerThreadPool(new DocumentsWriterPerThreadPool(maxThreadStates)) + .SetReaderPooling(doReaderPooling) + .SetMergePolicy(NewLogMergePolicy()), new YieldTestPoint()); LogMergePolicy lmp = (LogMergePolicy)w.Config.MergePolicy; lmp.NoCFSRatio = 0.0; lmp.MergeFactor = mergeFactor; @@ -241,7 +245,7 @@ public virtual IDictionary IndexRandom(int nThreads, int itera threads = new IndexingThread[nThreads]; for (int i = 0; i < threads.Length; i++) { - IndexingThread th = new IndexingThread(this); + IndexingThread th = new IndexingThread(); th.w = w; th.@base = 1000000 * i; th.range = range; @@ -264,7 +268,7 @@ public virtual IDictionary IndexRandom(int nThreads, int itera for (int i = 0; i < threads.Length; i++) { IndexingThread th = threads[i]; - UninterruptableMonitor.Enter(th); + UninterruptableMonitor.Enter(th); // LUCENENET: using UninterruptableMonitor instead of lock/synchronized, see docs for type try { docs.PutAll(th.docs); @@ -281,11 +285,7 @@ public virtual IDictionary IndexRandom(int nThreads, int itera return docs; } - /// - /// LUCENENET specific - /// Is non-static because NewIndexWriterConfig is no longer static. - /// - public void IndexSerial(Random random, IDictionary docs, Directory dir) + public static void IndexSerial(Random random, IDictionary docs, Directory dir) { IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(random, TEST_VERSION_CURRENT, new MockAnalyzer(random)).SetMergePolicy(NewLogMergePolicy())); @@ -559,13 +559,13 @@ public virtual void VerifyEquals(DirectoryReader r1, DirectoryReader r2, string long[] info1 = new long[r1.NumDocs]; long[] info2 = new long[r2.NumDocs]; - for (; ; ) + for (;;) { BytesRef term1 = null, term2 = null; // iterate until we get some docs int len1; - for (; ; ) + for (;;) { len1 = 0; if (termsEnum1 is null) @@ -608,7 +608,7 @@ public virtual void VerifyEquals(DirectoryReader r1, DirectoryReader r2, string // iterate until we get some docs int len2; - for (; ; ) + for (;;) { len2 = 0; if (termsEnum2 is null) @@ -810,13 +810,6 @@ public static void VerifyEquals(Fields d1, Fields d2) internal class IndexingThread : ThreadJob { - private readonly TestStressIndexing2 outerInstance; - - public IndexingThread(TestStressIndexing2 outerInstance) - { - this.outerInstance = outerInstance; - } - internal IndexWriter w; internal int @base; internal int range; @@ -1055,7 +1048,7 @@ public override void Run() Assert.Fail(e.ToString()); } - UninterruptableMonitor.Enter(this); + UninterruptableMonitor.Enter(this); // LUCENENET: using UninterruptableMonitor instead of lock/synchronized, see docs for type try { int dummy = docs.Count; @@ -1067,4 +1060,4 @@ public override void Run() } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestStressNRT.cs b/src/Lucene.Net.Tests/Index/TestStressNRT.cs index c3a4c5a3ef..5df2d89f0a 100644 --- a/src/Lucene.Net.Tests/Index/TestStressNRT.cs +++ b/src/Lucene.Net.Tests/Index/TestStressNRT.cs @@ -9,6 +9,7 @@ using System; using System.Collections.Concurrent; using System.Collections.Generic; +using System.Globalization; using System.Threading; using JCG = J2N.Collections.Generic; using Console = Lucene.Net.Util.SystemConsole; @@ -54,7 +55,7 @@ public class TestStressNRT : LuceneTestCase private long snapshotCount; private long committedModelClock; private volatile int lastId; - private readonly string field = "val_l"; + private const string field = "val_l"; private object[] syncArr; private void InitModel(int ndocs) @@ -208,7 +209,7 @@ public override void Run() long version; DirectoryReader oldReader; - UninterruptableMonitor.Enter(outerInstance); + UninterruptableMonitor.Enter(outerInstance); // LUCENENET: using UninterruptableMonitor instead of lock/synchronized; see docs for type try { newCommittedModel = new Dictionary(outerInstance.model); // take a snapshot @@ -267,7 +268,7 @@ public override void Run() oldReader.DecRef(); - UninterruptableMonitor.Enter(outerInstance); + UninterruptableMonitor.Enter(outerInstance); // LUCENENET: using UninterruptableMonitor instead of lock/synchronized; see docs for type try { // install the new reader if it's newest (and check the current version since another reader may have already been installed) @@ -286,7 +287,7 @@ public override void Run() // Silly: forces fieldInfos to be // loaded so we don't hit IOE on later // reader.toString - newReader.ToString(); + _ = newReader.ToString(); // LUCENENET: discard result // install this snapshot only if it's newer than the current one if (version >= outerInstance.committedModelClock) @@ -338,7 +339,7 @@ public override void Run() // We can't concurrently update the same document and retain our invariants of increasing values // since we can't guarantee what order the updates will be executed. - UninterruptableMonitor.Enter(sync); + UninterruptableMonitor.Enter(sync); // LUCENENET: using UninterruptableMonitor instead of lock/synchronized; see docs for type try { long val = outerInstance.model[id]; @@ -352,16 +353,16 @@ public override void Run() if (tombstones) { Document d = new Document(); - d.Add(NewStringField("id", "-" + Convert.ToString(id), Documents.Field.Store.YES)); - d.Add(NewField(outerInstance.field, Convert.ToString(nextVal), storedOnlyType)); - writer.UpdateDocument(new Term("id", "-" + Convert.ToString(id)), d); + d.Add(NewStringField("id", "-" + Convert.ToString(id, CultureInfo.InvariantCulture), Field.Store.YES)); + d.Add(NewField(field, Convert.ToString(nextVal, CultureInfo.InvariantCulture), storedOnlyType)); + writer.UpdateDocument(new Term("id", "-" + Convert.ToString(id, CultureInfo.InvariantCulture)), d); } if (Verbose) { Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": term delDocs id:" + id + " nextVal=" + nextVal); } - writer.DeleteDocuments(new Term("id", Convert.ToString(id))); + writer.DeleteDocuments(new Term("id", Convert.ToString(id, CultureInfo.InvariantCulture))); outerInstance.model[id] = -nextVal; } else if (oper < commitPercent + deletePercent + deleteByQueryPercent) @@ -372,33 +373,33 @@ public override void Run() if (tombstones) { Document d = new Document(); - d.Add(NewStringField("id", "-" + Convert.ToString(id), Documents.Field.Store.YES)); - d.Add(NewField(outerInstance.field, Convert.ToString(nextVal), storedOnlyType)); - writer.UpdateDocument(new Term("id", "-" + Convert.ToString(id)), d); + d.Add(NewStringField("id", "-" + Convert.ToString(id, CultureInfo.InvariantCulture), Field.Store.YES)); + d.Add(NewField(field, Convert.ToString(nextVal, CultureInfo.InvariantCulture), storedOnlyType)); + writer.UpdateDocument(new Term("id", "-" + Convert.ToString(id, CultureInfo.InvariantCulture)), d); } if (Verbose) { Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": query delDocs id:" + id + " nextVal=" + nextVal); } - writer.DeleteDocuments(new TermQuery(new Term("id", Convert.ToString(id)))); + writer.DeleteDocuments(new TermQuery(new Term("id", Convert.ToString(id, CultureInfo.InvariantCulture)))); outerInstance.model[id] = -nextVal; } else { // assertU(adoc("id",Integer.toString(id), field, Long.toString(nextVal))); Document d = new Document(); - d.Add(NewStringField("id", Convert.ToString(id), Documents.Field.Store.YES)); - d.Add(NewField(outerInstance.field, Convert.ToString(nextVal), storedOnlyType)); + d.Add(NewStringField("id", Convert.ToString(id, CultureInfo.InvariantCulture), Field.Store.YES)); + d.Add(NewField(field, Convert.ToString(nextVal, CultureInfo.InvariantCulture), storedOnlyType)); if (Verbose) { Console.WriteLine("TEST: " + Thread.CurrentThread.Name + ": u id:" + id + " val=" + nextVal); } - writer.UpdateDocument(new Term("id", Convert.ToString(id)), d); + writer.UpdateDocument(new Term("id", Convert.ToString(id, CultureInfo.InvariantCulture)), d); if (tombstones) { // remove tombstone after new addition (this should be optional?) - writer.DeleteDocuments(new Term("id", "-" + Convert.ToString(id))); + writer.DeleteDocuments(new Term("id", "-" + Convert.ToString(id, CultureInfo.InvariantCulture))); } outerInstance.model[id] = nextVal; } @@ -461,7 +462,7 @@ public override void Run() long val; DirectoryReader r; - UninterruptableMonitor.Enter(outerInstance); + UninterruptableMonitor.Enter(outerInstance); // LUCENENET: using UninterruptableMonitor instead of lock/synchronized; see docs for type try { val = outerInstance.committedModel[id]; @@ -493,13 +494,13 @@ public override void Run() lastReader = r; lastSearcher = searcher; } - Query q = new TermQuery(new Term("id", Convert.ToString(id))); + Query q = new TermQuery(new Term("id", Convert.ToString(id, CultureInfo.InvariantCulture))); TopDocs results = searcher.Search(q, 10); if (results.TotalHits == 0 && tombstones) { // if we couldn't find the doc, look for its tombstone - q = new TermQuery(new Term("id", "-" + Convert.ToString(id))); + q = new TermQuery(new Term("id", "-" + Convert.ToString(id, CultureInfo.InvariantCulture))); results = searcher.Search(q, 1); if (results.TotalHits == 0) { @@ -526,12 +527,12 @@ public override void Run() foreach (ScoreDoc sd in results.ScoreDocs) { Document doc = r.Document(sd.Doc); - Console.WriteLine(" docID=" + sd.Doc + " id:" + doc.Get("id") + " foundVal=" + doc.Get(outerInstance.field)); + Console.WriteLine(" docID=" + sd.Doc + " id:" + doc.Get("id") + " foundVal=" + doc.Get(field)); } Assert.Fail("id=" + id + " reader=" + r + " totalHits=" + results.TotalHits); } Document doc_ = searcher.Doc(results.ScoreDocs[0].Doc); - long foundVal = Convert.ToInt64(doc_.Get(outerInstance.field)); + long foundVal = Convert.ToInt64(doc_.Get(field)); if (foundVal < Math.Abs(val)) { Assert.Fail("foundVal=" + foundVal + " val=" + val + " id=" + id + " reader=" + r); @@ -543,7 +544,7 @@ public override void Run() } catch (Exception e) when (e.IsThrowable()) { - operations.Value = ((int)-1L); + operations.Value = (int)-1L; Console.WriteLine(Thread.CurrentThread.Name + ": FAILED: unexpected exception"); Console.WriteLine(e.StackTrace); throw RuntimeException.Create(e); @@ -551,4 +552,4 @@ public override void Run() } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestSumDocFreq.cs b/src/Lucene.Net.Tests/Index/TestSumDocFreq.cs index f887d4ae1d..2e20add835 100644 --- a/src/Lucene.Net.Tests/Index/TestSumDocFreq.cs +++ b/src/Lucene.Net.Tests/Index/TestSumDocFreq.cs @@ -1,5 +1,4 @@ -using System; -using Lucene.Net.Documents; +using Lucene.Net.Documents; using NUnit.Framework; using Assert = Lucene.Net.TestFramework.Assert; using Console = Lucene.Net.Util.SystemConsole; @@ -30,7 +29,7 @@ namespace Lucene.Net.Index using TestUtil = Lucene.Net.Util.TestUtil; /// - /// Tests + /// Tests /// @lucene.experimental /// [TestFixture] @@ -110,4 +109,4 @@ private void AssertSumDocFreq(IndexReader ir) } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestTermVectorsFormat.cs b/src/Lucene.Net.Tests/Index/TestTermVectorsFormat.cs index 54b96eaa4c..c5b79e4d2b 100644 --- a/src/Lucene.Net.Tests/Index/TestTermVectorsFormat.cs +++ b/src/Lucene.Net.Tests/Index/TestTermVectorsFormat.cs @@ -21,7 +21,9 @@ namespace Lucene.Net.Index */ using Codec = Lucene.Net.Codecs.Codec; +#pragma warning disable 612, 618 using Lucene3xCodec = Lucene.Net.Codecs.Lucene3x.Lucene3xCodec; +#pragma warning restore 612, 618 /// /// Tests with the default randomized codec. Not really redundant with @@ -57,4 +59,4 @@ public override void TestMergeStability() AssumeTrue("The MockRandom PF randomizes content on the fly, so we can't check it", false); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestTermVectorsReader.cs b/src/Lucene.Net.Tests/Index/TestTermVectorsReader.cs index ce55888c57..de2229b450 100644 --- a/src/Lucene.Net.Tests/Index/TestTermVectorsReader.cs +++ b/src/Lucene.Net.Tests/Index/TestTermVectorsReader.cs @@ -41,6 +41,7 @@ namespace Lucene.Net.Index [TestFixture] public class TestTermVectorsReader : LuceneTestCase { + // LUCENENET specific - required to initialize fields using testTerms field public TestTermVectorsReader() { positions = new int[testTerms.Length][]; @@ -48,26 +49,18 @@ public TestTermVectorsReader() } //Must be lexicographically sorted, will do in setup, versus trying to maintain here - private string[] testFields = new string[] { "f1", "f2", "f3", "f4" }; - - private bool[] testFieldsStorePos = new bool[] { true, false, true, false }; - private bool[] testFieldsStoreOff = new bool[] { true, false, false, true }; - private string[] testTerms = new string[] { "this", "is", "a", "test" }; + private string[] testFields = { "f1", "f2", "f3", "f4" }; + private bool[] testFieldsStorePos = { true, false, true, false }; + private bool[] testFieldsStoreOff = { true, false, false, true }; + private string[] testTerms = { "this", "is", "a", "test" }; private int[][] positions; private Directory dir; private SegmentCommitInfo seg; private FieldInfos fieldInfos = new FieldInfos(new FieldInfo[0]); - private static int TERM_FREQ = 3; + private const int TERM_FREQ = 3; internal class TestToken : IComparable { - private readonly TestTermVectorsReader outerInstance; - - public TestToken(TestTermVectorsReader outerInstance) - { - this.outerInstance = outerInstance; - } - internal string text; internal int pos; internal int startOffset; @@ -101,7 +94,7 @@ public override void SetUp() { // positions are always sorted in increasing order positions[i][j] = (int)(j * 10 + Random.NextDouble() * 10); // LUCENENET: Using Random because Math.random() doesn't exist in .NET and it seems to make sense to want this repeatable. - TestToken token = tokens[tokenUpto++] = new TestToken(this); + TestToken token = tokens[tokenUpto++] = new TestToken(); token.text = testTerms[i]; token.pos = positions[i][j]; token.startOffset = j * 10; @@ -111,7 +104,10 @@ public override void SetUp() Array.Sort(tokens); dir = NewDirectory(); - IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MyAnalyzer(this)).SetMaxBufferedDocs(-1).SetMergePolicy(NewLogMergePolicy(false, 10)).SetUseCompoundFile(false)); + IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MyAnalyzer(this)) + .SetMaxBufferedDocs(-1) + .SetMergePolicy(NewLogMergePolicy(false, 10)) + .SetUseCompoundFile(false)); Document doc = new Document(); for (int i = 0; i < testFields.Length; i++) @@ -471,4 +467,4 @@ public virtual void TestIllegalIndexableField() dir.Dispose(); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestTermVectorsWriter.cs b/src/Lucene.Net.Tests/Index/TestTermVectorsWriter.cs index cef2a7268b..8267833fec 100644 --- a/src/Lucene.Net.Tests/Index/TestTermVectorsWriter.cs +++ b/src/Lucene.Net.Tests/Index/TestTermVectorsWriter.cs @@ -415,7 +415,11 @@ public virtual void TestTermVectorCorruption() Directory dir = NewDirectory(); for (int iter = 0; iter < 2; iter++) { - IndexWriter writer = new IndexWriter(dir, ((IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2).SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH)).SetMergeScheduler(new SerialMergeScheduler()).SetMergePolicy(new LogDocMergePolicy())); + IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(2) + .SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH) + .SetMergeScheduler(new SerialMergeScheduler()) + .SetMergePolicy(new LogDocMergePolicy())); Document document = new Document(); FieldType customType = new FieldType(); @@ -447,9 +451,13 @@ public virtual void TestTermVectorCorruption() } reader.Dispose(); - writer = new IndexWriter(dir, ((IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2).SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH)).SetMergeScheduler(new SerialMergeScheduler()).SetMergePolicy(new LogDocMergePolicy())); + writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(2) + .SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH) + .SetMergeScheduler(new SerialMergeScheduler()) + .SetMergePolicy(new LogDocMergePolicy())); - Directory[] indexDirs = new Directory[] { new MockDirectoryWrapper(Random, new RAMDirectory(dir, NewIOContext(Random))) }; + Directory[] indexDirs = { new MockDirectoryWrapper(Random, new RAMDirectory(dir, NewIOContext(Random))) }; writer.AddIndexes(indexDirs); writer.ForceMerge(1); writer.Dispose(); @@ -464,7 +472,11 @@ public virtual void TestTermVectorCorruption2() Directory dir = NewDirectory(); for (int iter = 0; iter < 2; iter++) { - IndexWriter writer = new IndexWriter(dir, ((IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2).SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH)).SetMergeScheduler(new SerialMergeScheduler()).SetMergePolicy(new LogDocMergePolicy())); + IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(2) + .SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH) + .SetMergeScheduler(new SerialMergeScheduler()) + .SetMergePolicy(new LogDocMergePolicy())); Document document = new Document(); @@ -502,7 +514,11 @@ public virtual void TestTermVectorCorruption2() public virtual void TestTermVectorCorruption3() { Directory dir = NewDirectory(); - IndexWriter writer = new IndexWriter(dir, ((IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2).SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH)).SetMergeScheduler(new SerialMergeScheduler()).SetMergePolicy(new LogDocMergePolicy())); + IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(2) + .SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH) + .SetMergeScheduler(new SerialMergeScheduler()) + .SetMergePolicy(new LogDocMergePolicy())); Document document = new Document(); FieldType customType = new FieldType(); @@ -522,7 +538,11 @@ public virtual void TestTermVectorCorruption3() } writer.Dispose(); - writer = new IndexWriter(dir, ((IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMaxBufferedDocs(2).SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH)).SetMergeScheduler(new SerialMergeScheduler()).SetMergePolicy(new LogDocMergePolicy())); + writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetMaxBufferedDocs(2) + .SetRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH) + .SetMergeScheduler(new SerialMergeScheduler()) + .SetMergePolicy(new LogDocMergePolicy())); for (int i = 0; i < 6; i++) { writer.AddDocument(document); @@ -605,4 +625,4 @@ public virtual void TestNoTermVectorAfterTermVectorMerge() dir.Dispose(); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestTermdocPerf.cs b/src/Lucene.Net.Tests/Index/TestTermdocPerf.cs index b857fe1e2e..d5819e6909 100644 --- a/src/Lucene.Net.Tests/Index/TestTermdocPerf.cs +++ b/src/Lucene.Net.Tests/Index/TestTermdocPerf.cs @@ -3,11 +3,14 @@ using Lucene.Net.Documents; using Lucene.Net.Index.Extensions; using NUnit.Framework; -using RandomizedTesting.Generators; using System; using System.IO; using Console = Lucene.Net.Util.SystemConsole; +#if !FEATURE_RANDOM_NEXTINT64_NEXTSINGLE +using RandomizedTesting.Generators; +#endif + namespace Lucene.Net.Index { /* @@ -92,7 +95,10 @@ internal virtual void AddDocs(Random random, Directory dir, int ndocs, string fi Document doc = new Document(); doc.Add(NewStringField(field, val, Field.Store.NO)); - IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(100).SetMergePolicy(NewLogMergePolicy(100))); + IndexWriter writer = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, analyzer) + .SetOpenMode(OpenMode.CREATE) + .SetMaxBufferedDocs(100) + .SetMergePolicy(NewLogMergePolicy(100))); for (int i = 0; i < ndocs; i++) { @@ -129,7 +135,7 @@ public virtual int DoTest(int iter, int ndocs, int maxTF, float percentDocs) Directory dir = NewDirectory(); long start = J2N.Time.NanoTime() / J2N.Time.MillisecondsPerNanosecond; // LUCENENET: Use NanoTime() rather than CurrentTimeMilliseconds() for more accurate/reliable results - AddDocs(LuceneTestCase.Random, dir, ndocs, "foo", "val", maxTF, percentDocs); + AddDocs(Random, dir, ndocs, "foo", "val", maxTF, percentDocs); long end = J2N.Time.NanoTime() / J2N.Time.MillisecondsPerNanosecond; // LUCENENET: Use NanoTime() rather than CurrentTimeMilliseconds() for more accurate/reliable results if (Verbose) { @@ -173,4 +179,4 @@ public virtual void TestTermDocPerf() DoTest(100000, 10000, 3, .1f); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestTermsEnum.cs b/src/Lucene.Net.Tests/Index/TestTermsEnum.cs index 4531bf0c1a..cc8ca18981 100644 --- a/src/Lucene.Net.Tests/Index/TestTermsEnum.cs +++ b/src/Lucene.Net.Tests/Index/TestTermsEnum.cs @@ -55,9 +55,9 @@ public virtual void Test() Random random = new J2N.Randomizer(Random.NextInt64()); LineFileDocs docs = new LineFileDocs(random, DefaultCodecSupportsDocValues); Directory d = NewDirectory(); - MockAnalyzer analyzer = new MockAnalyzer(LuceneTestCase.Random); - analyzer.MaxTokenLength = TestUtil.NextInt32(LuceneTestCase.Random, 1, IndexWriter.MAX_TERM_LENGTH); - RandomIndexWriter w = new RandomIndexWriter(LuceneTestCase.Random, d, analyzer); + MockAnalyzer analyzer = new MockAnalyzer(Random); + analyzer.MaxTokenLength = TestUtil.NextInt32(Random, 1, IndexWriter.MAX_TERM_LENGTH); + RandomIndexWriter w = new RandomIndexWriter(Random, d, analyzer); int numDocs = AtLeast(10); for (int docCount = 0; docCount < numDocs; docCount++) { @@ -82,7 +82,7 @@ public virtual void Test() for (int iter = 0; iter < iters; iter++) { bool isEnd; - if (upto != -1 && LuceneTestCase.Random.NextBoolean()) + if (upto != -1 && Random.NextBoolean()) { // next if (Verbose) @@ -114,29 +114,29 @@ public virtual void Test() { BytesRef target; string exists; - if (LuceneTestCase.Random.NextBoolean()) + if (Random.NextBoolean()) { // likely fake term - if (LuceneTestCase.Random.NextBoolean()) + if (Random.NextBoolean()) { - target = new BytesRef(TestUtil.RandomSimpleString(LuceneTestCase.Random)); + target = new BytesRef(TestUtil.RandomSimpleString(Random)); } else { - target = new BytesRef(TestUtil.RandomRealisticUnicodeString(LuceneTestCase.Random)); + target = new BytesRef(TestUtil.RandomRealisticUnicodeString(Random)); } exists = "likely not"; } else { // real term - target = terms[LuceneTestCase.Random.Next(terms.Count)]; + target = terms[Random.Next(terms.Count)]; exists = "yes"; } upto = terms.BinarySearch(target); - if (LuceneTestCase.Random.NextBoolean()) + if (Random.NextBoolean()) { if (Verbose) { @@ -421,7 +421,7 @@ public virtual void TestIntersectRandom() docsEnum = TestUtil.Docs(Random, te, null, docsEnum, DocsFlags.NONE); int docID = docsEnum.NextDoc(); Assert.IsTrue(docID != DocIdSetIterator.NO_MORE_DOCS); - Assert.AreEqual(docIDToID.Get(docID), (int)termToID[expected]); + Assert.AreEqual(docIDToID.Get(docID), termToID[expected]); do { loc++; @@ -435,17 +435,26 @@ public virtual void TestIntersectRandom() dir.Dispose(); } - private readonly string FIELD = "field"; + private Directory d; + private IndexReader r; - private IndexReader MakeIndex(Directory d, params string[] terms) + private const string FIELD = "field"; + + private IndexReader MakeIndex(params string[] terms) { + // LUCENENET specific: clean up reader/directory before creating a new one, moved from below + if (r != null) { + Close(); + } + + d = NewDirectory(); var iwc = NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)); /* iwc.SetCodec(new StandardCodec(minTermsInBlock, maxTermsInBlock)); */ - using var w = new RandomIndexWriter(Random, d, iwc); + var w = new RandomIndexWriter(Random, d, iwc); foreach (string term in terms) { var doc = new Document(); @@ -454,7 +463,20 @@ private IndexReader MakeIndex(Directory d, params string[] terms) w.AddDocument(doc); } - return w.GetReader(); + // LUCENENET specific: moved above to clean up reader/directory before creating a new one + // if (r != null) { + // Close(); + // } + r = w.GetReader(); + w.Dispose(); + return r; + } + + // LUCENENET NOTE: note that this is not a true dispose pattern, see usage above + private void Close() + { + r?.Dispose(); + d?.Dispose(); } private int DocFreq(IndexReader r, string term) @@ -466,8 +488,8 @@ private int DocFreq(IndexReader r, string term) public virtual void TestEasy() { // No floor arcs: - using var d = NewDirectory(); - using var r = MakeIndex(d, "aa0", "aa1", "aa2", "aa3", "bb0", "bb1", "bb2", "bb3", "aa"); + r = MakeIndex("aa0", "aa1", "aa2", "aa3", "bb0", "bb1", "bb2", "bb3", "aa"); + // First term in block: Assert.AreEqual(1, DocFreq(r, "aa0")); @@ -517,6 +539,8 @@ public virtual void TestEasy() // Found, rewind: Assert.AreEqual(1, DocFreq(r, "bb0")); + + Close(); } // tests: @@ -527,9 +551,9 @@ public virtual void TestEasy() public virtual void TestFloorBlocks() { var terms = new[] { "aa0", "aa1", "aa2", "aa3", "aa4", "aa5", "aa6", "aa7", "aa8", "aa9", "aa", "xx" }; + r = MakeIndex(terms); + //r = makeIndex("aa0", "aa1", "aa2", "aa3", "aa4", "aa5", "aa6", "aa7", "aa8", "aa9"); - using var d = NewDirectory(); - using var r = MakeIndex(d, terms); // First term in first block: Assert.AreEqual(1, DocFreq(r, "aa0")); Assert.AreEqual(1, DocFreq(r, "aa4")); @@ -575,12 +599,13 @@ public virtual void TestFloorBlocks() Assert.AreEqual("xx", Next(te)); TestRandomSeeks(r, terms); + Close(); } [Test] public virtual void TestZeroTerms() { - var d = NewDirectory(); + d = NewDirectory(); RandomIndexWriter w = new RandomIndexWriter(Random, d); Document doc = new Document(); doc.Add(NewTextField("field", "one two three", Field.Store.NO)); @@ -656,9 +681,9 @@ public virtual void TestRandomTerms() } } - using var d = NewDirectory(); - using var r = MakeIndex(d, terms); + r = MakeIndex(terms); TestRandomSeeks(r, terms); + Close(); } // sugar @@ -887,7 +912,7 @@ public virtual void TestIntersectBasic() w.Dispose(); AtomicReader sub = GetOnlySegmentReader(r); Terms terms = sub.Fields.GetTerms("field"); - Automaton automaton = (new RegExp(".*", RegExpSyntax.NONE)).ToAutomaton(); + Automaton automaton = new RegExp(".*", RegExpSyntax.NONE).ToAutomaton(); CompiledAutomaton ca = new CompiledAutomaton(automaton, false, false); TermsEnum te = terms.Intersect(ca, null); Assert.IsTrue(te.MoveNext()); @@ -953,7 +978,7 @@ public virtual void TestIntersectStartTerm() AtomicReader sub = GetOnlySegmentReader(r); Terms terms = sub.Fields.GetTerms("field"); - Automaton automaton = (new RegExp(".*d", RegExpSyntax.NONE)).ToAutomaton(); + Automaton automaton = new RegExp(".*d", RegExpSyntax.NONE).ToAutomaton(); CompiledAutomaton ca = new CompiledAutomaton(automaton, false, false); TermsEnum te; @@ -1013,7 +1038,7 @@ public virtual void TestIntersectEmptyString() AtomicReader sub = GetOnlySegmentReader(r); Terms terms = sub.Fields.GetTerms("field"); - Automaton automaton = (new RegExp(".*", RegExpSyntax.NONE)).ToAutomaton(); // accept ALL + Automaton automaton = new RegExp(".*", RegExpSyntax.NONE).ToAutomaton(); // accept ALL CompiledAutomaton ca = new CompiledAutomaton(automaton, false, false); TermsEnum te = terms.Intersect(ca, null); @@ -1048,4 +1073,4 @@ public virtual void TestIntersectEmptyString() dir.Dispose(); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestTermsEnum2.cs b/src/Lucene.Net.Tests/Index/TestTermsEnum2.cs index c53a099081..ccbb9f6674 100644 --- a/src/Lucene.Net.Tests/Index/TestTermsEnum2.cs +++ b/src/Lucene.Net.Tests/Index/TestTermsEnum2.cs @@ -60,7 +60,8 @@ public override void SetUp() // but for preflex codec, the test can be very slow, so use less iterations. numIterations = Codec.Default.Name.Equals("Lucene3x", StringComparison.Ordinal) ? 10 * RandomMultiplier : AtLeast(50); dir = NewDirectory(); - RandomIndexWriter writer = new RandomIndexWriter(Random, dir, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.KEYWORD, false)).SetMaxBufferedDocs(TestUtil.NextInt32(Random, 50, 1000))); + RandomIndexWriter writer = new RandomIndexWriter(Random, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random, MockTokenizer.KEYWORD, false)) + .SetMaxBufferedDocs(TestUtil.NextInt32(Random, 50, 1000))); Document doc = new Document(); Field field = NewStringField("field", "", Field.Store.YES); doc.Add(field); @@ -91,14 +92,15 @@ public override void TearDown() } /// - /// tests a pre-intersected automaton against the original + /// tests a pre-intersected automaton against the original + /// [Test] public virtual void TestFiniteVersusInfinite() { for (int i = 0; i < numIterations; i++) { string reg = AutomatonTestUtil.RandomRegexp(Random); - Automaton automaton = (new RegExp(reg, RegExpSyntax.NONE)).ToAutomaton(); + Automaton automaton = new RegExp(reg, RegExpSyntax.NONE).ToAutomaton(); IList matchedTerms = new JCG.List(); foreach (BytesRef t in terms) { @@ -119,7 +121,8 @@ public virtual void TestFiniteVersusInfinite() } /// - /// seeks to every term accepted by some automata + /// seeks to every term accepted by some automata + /// [Test] public virtual void TestSeeking() { @@ -153,7 +156,8 @@ public virtual void TestSeeking() } /// - /// mixes up seek and next for all terms + /// mixes up seek and next for all terms + /// [Test] public virtual void TestSeekingAndNexting() { @@ -190,7 +194,7 @@ public virtual void TestIntersect() for (int i = 0; i < numIterations; i++) { string reg = AutomatonTestUtil.RandomRegexp(Random); - Automaton automaton = (new RegExp(reg, RegExpSyntax.NONE)).ToAutomaton(); + Automaton automaton = new RegExp(reg, RegExpSyntax.NONE).ToAutomaton(); CompiledAutomaton ca = new CompiledAutomaton(automaton, SpecialOperations.IsFinite(automaton), false); TermsEnum te = MultiFields.GetTerms(reader, "field").Intersect(ca, null); Automaton expected = BasicOperations.Intersection(termsAutomaton, automaton); @@ -205,4 +209,4 @@ public virtual void TestIntersect() } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestThreadedForceMerge.cs b/src/Lucene.Net.Tests/Index/TestThreadedForceMerge.cs index 9071401712..56d4bdd323 100644 --- a/src/Lucene.Net.Tests/Index/TestThreadedForceMerge.cs +++ b/src/Lucene.Net.Tests/Index/TestThreadedForceMerge.cs @@ -63,7 +63,10 @@ private void SetFailed() public virtual void RunTest(Random random, Directory directory) { - IndexWriter writer = new IndexWriter(directory, ((IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, ANALYZER).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(2)).SetMergePolicy(NewLogMergePolicy())); + IndexWriter writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, ANALYZER) + .SetOpenMode(OpenMode.CREATE) + .SetMaxBufferedDocs(2) + .SetMergePolicy(NewLogMergePolicy())); for (int iter = 0; iter < NUM_ITER; iter++) { @@ -111,7 +114,9 @@ public virtual void RunTest(Random random, Directory directory) Assert.AreEqual(expectedDocCount, writer.MaxDoc, "index=" + writer.SegString() + " numDocs=" + writer.NumDocs + " maxDoc=" + writer.MaxDoc + " config=" + writer.Config); writer.Dispose(); - writer = new IndexWriter(directory, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, ANALYZER).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(2)); + writer = new IndexWriter(directory, NewIndexWriterConfig(TEST_VERSION_CURRENT, ANALYZER) + .SetOpenMode(OpenMode.APPEND) + .SetMaxBufferedDocs(2)); DirectoryReader reader = DirectoryReader.Open(directory); Assert.AreEqual(1, reader.Leaves.Count, "reader=" + reader); @@ -182,4 +187,4 @@ public virtual void TestThreadedForceMerge_Mem() directory.Dispose(); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestTransactionRollback.cs b/src/Lucene.Net.Tests/Index/TestTransactionRollback.cs index 2ad48ff02f..1e9df50b31 100644 --- a/src/Lucene.Net.Tests/Index/TestTransactionRollback.cs +++ b/src/Lucene.Net.Tests/Index/TestTransactionRollback.cs @@ -71,7 +71,9 @@ private void RollBackLast(int id) throw RuntimeException.Create("Couldn't find commit point " + id); } - IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetIndexDeletionPolicy(new RollbackDeletionPolicy(this, id)).SetIndexCommit(last)); + IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetIndexDeletionPolicy(new RollbackDeletionPolicy(id)) + .SetIndexCommit(last)); IDictionary data = new Dictionary(); data["index"] = "Rolled back to 1-" + id.ToString(CultureInfo.InvariantCulture); w.SetCommitData(data); @@ -140,7 +142,7 @@ public override void SetUp() dir = NewDirectory(); //Build index, of records 1 to 100, committing after each batch of 10 - IndexDeletionPolicy sdp = new KeepAllDeletionPolicy(this); + IndexDeletionPolicy sdp = new KeepAllDeletionPolicy(); IndexWriter w = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetIndexDeletionPolicy(sdp)); for (int currentRecordId = 1; currentRecordId <= 100; currentRecordId++) @@ -171,13 +173,10 @@ public override void TearDown() // Rolls back to previous commit point internal class RollbackDeletionPolicy : IndexDeletionPolicy { - private readonly TestTransactionRollback outerInstance; + private int rollbackPoint; - internal int rollbackPoint; - - public RollbackDeletionPolicy(TestTransactionRollback outerInstance, int rollbackPoint) + public RollbackDeletionPolicy(int rollbackPoint) { - this.outerInstance = outerInstance; this.rollbackPoint = rollbackPoint; } @@ -219,13 +218,6 @@ public override void OnInit(IList commits) internal class DeleteLastCommitPolicy : IndexDeletionPolicy { - private readonly TestTransactionRollback outerInstance; - - public DeleteLastCommitPolicy(TestTransactionRollback outerInstance) - { - this.outerInstance = outerInstance; - } - public override void OnCommit(IList commits) { } @@ -243,7 +235,9 @@ public virtual void TestRollbackDeletionPolicy() { // Unless you specify a prior commit point, rollback // should not work: - (new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetIndexDeletionPolicy(new DeleteLastCommitPolicy(this)))).Dispose(); + new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)) + .SetIndexDeletionPolicy(new DeleteLastCommitPolicy())) + .Dispose(); IndexReader r = DirectoryReader.Open(dir); Assert.AreEqual(100, r.NumDocs); r.Dispose(); @@ -253,13 +247,6 @@ public virtual void TestRollbackDeletionPolicy() // Keeps all commit points (used to build index) internal class KeepAllDeletionPolicy : IndexDeletionPolicy { - private readonly TestTransactionRollback outerInstance; - - public KeepAllDeletionPolicy(TestTransactionRollback outerInstance) - { - this.outerInstance = outerInstance; - } - public override void OnCommit(IList commits) { } @@ -269,4 +256,4 @@ public override void OnInit(IList commits) } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestTransactions.cs b/src/Lucene.Net.Tests/Index/TestTransactions.cs index ebd885eb5c..4ff4e42731 100644 --- a/src/Lucene.Net.Tests/Index/TestTransactions.cs +++ b/src/Lucene.Net.Tests/Index/TestTransactions.cs @@ -47,13 +47,6 @@ public class TestTransactions : LuceneTestCase private class RandomFailure : Failure { - private readonly TestTransactions outerInstance; - - public RandomFailure(TestTransactions outerInstance) - { - this.outerInstance = outerInstance; - } - public override void Eval(MockDirectoryWrapper dir) { if (TestTransactions.doFail && Random.Next() % 10 <= 3) @@ -114,18 +107,16 @@ internal virtual bool AnyErrors() private class IndexerThread : TimedThread { - private readonly TestTransactions outerInstance; internal Directory dir1; internal Directory dir2; internal object @lock; internal int nextID; - public IndexerThread(TestTransactions outerInstance, object @lock, + public IndexerThread(object @lock, Directory dir1, Directory dir2, TimedThread[] threads) : base(threads) { - this.outerInstance = outerInstance; this.@lock = @lock; this.dir1 = dir1; this.dir2 = dir2; @@ -155,7 +146,7 @@ public override void DoWork() doFail = true; try { - UninterruptableMonitor.Enter(@lock); + UninterruptableMonitor.Enter(@lock); // LUCENENET: Using UninterruptableMonitor instead of lock/synchronized, see docs for type try { try @@ -237,7 +228,7 @@ public SearcherThread(object @lock, Directory dir1, Directory dir2, TimedThread[ public override void DoWork() { IndexReader r1 = null, r2 = null; - UninterruptableMonitor.Enter(@lock); + UninterruptableMonitor.Enter(@lock); // LUCENENET: Using UninterruptableMonitor instead of lock/synchronized, see docs for type try { try @@ -297,8 +288,8 @@ public virtual void TestTransactions_Mem() MockDirectoryWrapper dir2 = new MockDirectoryWrapper(Random, new RAMDirectory()); dir1.PreventDoubleWrite = false; dir2.PreventDoubleWrite = false; - dir1.FailOn(new RandomFailure(this)); - dir2.FailOn(new RandomFailure(this)); + dir1.FailOn(new RandomFailure()); + dir2.FailOn(new RandomFailure()); dir1.FailOnOpenInput = false; dir2.FailOnOpenInput = false; @@ -313,7 +304,7 @@ public virtual void TestTransactions_Mem() TimedThread[] threads = new TimedThread[3]; int numThread = 0; - IndexerThread indexerThread = new IndexerThread(this, this, dir1, dir2, threads); + IndexerThread indexerThread = new IndexerThread(this, dir1, dir2, threads); threads[numThread++] = indexerThread; indexerThread.Start(); @@ -341,4 +332,4 @@ public virtual void TestTransactions_Mem() Console.WriteLine("End test"); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net.Tests/Index/TestUniqueTermCount.cs b/src/Lucene.Net.Tests/Index/TestUniqueTermCount.cs index 4797010399..dcf9dee112 100644 --- a/src/Lucene.Net.Tests/Index/TestUniqueTermCount.cs +++ b/src/Lucene.Net.Tests/Index/TestUniqueTermCount.cs @@ -6,7 +6,6 @@ using Lucene.Net.Store; using Lucene.Net.Util; using NUnit.Framework; -using System; using System.Collections.Generic; using System.Text; using JCG = J2N.Collections.Generic; @@ -79,10 +78,10 @@ public void Test() } } - /** - * Makes a bunch of single-char tokens (the max # unique terms will at most be 26). - * puts the # unique terms into expected, to be checked against the norm. - */ + /// + /// Makes a bunch of single-char tokens (the max # unique terms will at most be 26). + /// puts the # unique terms into expected, to be checked against the norm. + /// private string AddValue() { StringBuilder sb = new StringBuilder(); @@ -99,12 +98,11 @@ private string AddValue() return sb.toString(); } - /** - * Simple similarity that encodes maxTermFrequency directly - */ + /// + /// Simple similarity that encodes maxTermFrequency directly + /// internal class TestSimilarity : Similarity { - public override long ComputeNorm(FieldInvertState state) { return state.UniqueTermCount; diff --git a/src/Lucene.Net/Index/IndexWriter.cs b/src/Lucene.Net/Index/IndexWriter.cs index fd81b70740..48e651ea92 100644 --- a/src/Lucene.Net/Index/IndexWriter.cs +++ b/src/Lucene.Net/Index/IndexWriter.cs @@ -126,9 +126,9 @@ namespace Lucene.Net.Index /// the and the . /// The is invoked whenever there are /// changes to the segments in the index. Its role is to - /// select which merges to do, if any, and return a + /// select which merges to do, if any, and return a /// describing the merges. - /// The default is . Then, the + /// The default is . Then, the /// is invoked with the requested merges and /// it decides when and how to run the merges. The default is /// . @@ -145,7 +145,7 @@ namespace Lucene.Net.Index /// last commit. You can also just call /// directly. /// - /// NOTE: + /// NOTE: /// instances are completely thread /// safe, meaning multiple threads can call any of its /// methods, concurrently. If your application requires @@ -1074,24 +1074,24 @@ private void MessageState() /// something like this: /// /// - /// try + /// try /// { /// writer.Dispose(); - /// } - /// finally + /// } + /// finally /// { - /// if (IndexWriter.IsLocked(directory)) + /// if (IndexWriter.IsLocked(directory)) /// { /// IndexWriter.Unlock(directory); /// } /// } /// - /// + /// /// after which, you must be certain not to use the writer /// instance anymore. /// /// NOTE: if this method hits an - /// you should immediately dispose the writer, again. See + /// you should immediately dispose the writer, again. See /// for details. /// /// if there is a low-level IO error @@ -1109,7 +1109,7 @@ public void Dispose() /// threads. /// /// NOTE: If this method hits an - /// you should immediately dispose the writer, again. See + /// you should immediately dispose the writer, again. See /// for details. /// /// NOTE: It is dangerous to always call @@ -1160,7 +1160,7 @@ public void Dispose(bool waitForMerges) /// set to true. /// /// NOTE: If this method hits an - /// you should immediately dispose the writer, again. See + /// you should immediately dispose the writer, again. See /// for details. /// /// NOTE: It is dangerous to always call @@ -1594,7 +1594,7 @@ public virtual bool HasDeletions() /// U+FFFD. /// /// NOTE: if this method hits an - /// you should immediately dispose the writer. See + /// you should immediately dispose the writer. See /// for details. /// /// if the index is corrupt @@ -1658,8 +1658,8 @@ public virtual void AddDocument(IEnumerable doc, Analyzer analy /// NOTE: if this method hits an /// you should immediately dispose the writer. See /// for details. - /// - /// @lucene.experimental + /// + /// @lucene.experimental /// /// if the index is corrupt /// if there is a low-level IO error @@ -1746,7 +1746,7 @@ public virtual void UpdateDocuments(Term delTerm, IEnumerable. /// /// NOTE: if this method hits an - /// you should immediately dispose the writer. See + /// you should immediately dispose the writer. See /// for details. /// /// the term to identify the documents to be deleted @@ -1770,7 +1770,7 @@ public virtual void DeleteDocuments(Term term) /// /// Expert: attempts to delete by document ID, as long as - /// the provided is a near-real-time reader (from + /// the provided is a near-real-time reader (from /// . If the /// provided is an NRT reader obtained from this /// writer, and its segment has not been merged away, then @@ -2604,7 +2604,7 @@ private bool UpdatePendingMerges(MergeTrigger trigger, int maxNumSegments) try { if (Debugging.AssertsEnabled) Debugging.Assert(maxNumSegments == -1 || maxNumSegments > 0); - //if (Debugging.AssertsEnabled) Debugging.Assert(trigger != null); // LUCENENET NOTE: Enum cannot be null in .NET + if (Debugging.AssertsEnabled) Debugging.Assert(trigger != MergeTrigger.NONE); // LUCENENET specific: using NONE instead of null if (stopMerges) { return false; @@ -2912,7 +2912,7 @@ private void RollbackInternal() /// documents as deleted. /// /// NOTE: this method will forcefully abort all merges - /// in progress. If other threads are running + /// in progress. If other threads are running /// , or /// methods, they may receive /// s. @@ -3852,8 +3852,8 @@ protected virtual void DoBeforeFlush() /// steps necessary to commit changes since this writer /// was opened: flushes pending added and deleted docs, /// syncs the index files, writes most of next segments_N - /// file. After calling this you must call either - /// to finish the commit, or + /// file. After calling this you must call either + /// to finish the commit, or /// to revert the commit and undo all changes /// done since the writer was opened. /// @@ -6136,7 +6136,7 @@ public static void Unlock(Directory directory) /// after a merge completes. /// /// @lucene.experimental - /// + /// /// NOTE: is called before any deletes have /// been carried over to the merged segment. /// @@ -6485,4 +6485,4 @@ private static bool SlowFileExists(Directory dir, string fileName) } } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net/Index/MergeTrigger.cs b/src/Lucene.Net/Index/MergeTrigger.cs index ea8c80c16a..2aa3ebccf6 100644 --- a/src/Lucene.Net/Index/MergeTrigger.cs +++ b/src/Lucene.Net/Index/MergeTrigger.cs @@ -24,6 +24,11 @@ namespace Lucene.Net.Index /// public enum MergeTrigger { + /// + /// LUCENENET-specific value to be used instead of null. + /// + NONE = 0, + /// /// Merge was triggered by a segment flush. /// @@ -50,4 +55,4 @@ public enum MergeTrigger /// CLOSING } -} \ No newline at end of file +} diff --git a/src/Lucene.Net/Index/UpgradeIndexMergePolicy.cs b/src/Lucene.Net/Index/UpgradeIndexMergePolicy.cs index 0214c46b8f..62a1e359c6 100644 --- a/src/Lucene.Net/Index/UpgradeIndexMergePolicy.cs +++ b/src/Lucene.Net/Index/UpgradeIndexMergePolicy.cs @@ -83,8 +83,8 @@ public override void SetIndexWriter(IndexWriter writer) public override MergeSpecification FindMerges(MergeTrigger mergeTrigger, SegmentInfos segmentInfos) { - // LUCENENET specific - just use min value to indicate "null" for merge trigger - return m_base.FindMerges((MergeTrigger)int.MinValue, segmentInfos); + // LUCENENET specific - use NONE instead of null + return m_base.FindMerges(MergeTrigger.NONE, segmentInfos); } public override MergeSpecification FindForcedMerges(SegmentInfos segmentInfos, int maxSegmentCount, IDictionary segmentsToMerge) @@ -163,7 +163,7 @@ public override bool UseCompoundFile(SegmentInfos segments, SegmentCommitInfo ne protected override void Dispose(bool disposing) { if (disposing) - { + { m_base.Dispose(); } } @@ -184,4 +184,4 @@ private void Message(string message) m_writer.Get().infoStream.Message("UPGMP", message); } } -} \ No newline at end of file +} diff --git a/src/Lucene.Net/Util/IOUtils.cs b/src/Lucene.Net/Util/IOUtils.cs index a901108607..bbfdb1ccfa 100644 --- a/src/Lucene.Net/Util/IOUtils.cs +++ b/src/Lucene.Net/Util/IOUtils.cs @@ -39,9 +39,12 @@ public static class IOUtils // LUCENENET specific - made static { /// /// UTF-8 instance to prevent repeated - /// lookups - [Obsolete("Use Encoding.UTF8 instead.")] - public static readonly Encoding CHARSET_UTF_8 = Encoding.UTF8; + /// lookups and match Java's behavior + /// with respect to a lack of a byte-order mark (BOM). + /// + public static readonly Encoding CHARSET_UTF_8 = new UTF8Encoding( + encoderShouldEmitUTF8Identifier: false, + throwOnInvalidBytes: true); /// /// UTF-8 charset string. @@ -58,21 +61,21 @@ public static class IOUtils // LUCENENET specific - made static /// /// IDisposable resource1 = null, resource2 = null, resource3 = null; /// ExpectedException priorE = null; - /// try + /// try /// { /// resource1 = ...; resource2 = ...; resource3 = ...; // Acquisition may throw ExpectedException /// ..do..stuff.. // May throw ExpectedException - /// } - /// catch (ExpectedException e) + /// } + /// catch (ExpectedException e) /// { /// priorE = e; - /// } - /// finally + /// } + /// finally /// { /// IOUtils.CloseWhileHandlingException(priorE, resource1, resource2, resource3); /// } /// - /// + /// /// /// null or an exception that will be rethrown after method completion. /// Objects to call on. @@ -148,21 +151,21 @@ public static void CloseWhileHandlingException(IEnumerable objects) /// /// IDisposable resource1 = null, resource2 = null, resource3 = null; /// ExpectedException priorE = null; - /// try + /// try /// { /// resource1 = ...; resource2 = ...; resource3 = ...; // Acquisition may throw ExpectedException /// ..do..stuff.. // May throw ExpectedException - /// } - /// catch (ExpectedException e) + /// } + /// catch (ExpectedException e) /// { /// priorE = e; - /// } - /// finally + /// } + /// finally /// { /// IOUtils.DisposeWhileHandlingException(priorE, resource1, resource2, resource3); /// } /// - /// + /// /// /// null or an exception that will be rethrown after method completion. /// Objects to call on. @@ -201,7 +204,7 @@ public static void DisposeWhileHandlingException(Exception priorException, param /// Disposes all given s, suppressing all thrown exceptions. /// [MethodImpl(MethodImplOptions.AggressiveInlining)] - public static void DisposeWhileHandlingException(Exception priorException, IEnumerable objects) + public static void DisposeWhileHandlingException(Exception priorException, IEnumerable objects) { Exception th = null; @@ -241,7 +244,7 @@ public static void DisposeWhileHandlingException(Exception priorException, IEnum /// /// Objects to call on [MethodImpl(MethodImplOptions.AggressiveInlining)] - public static void Dispose(params IDisposable[] objects) + public static void Dispose(params IDisposable[] objects) { Exception th = null; @@ -298,7 +301,7 @@ public static void Dispose(IEnumerable objects) /// /// Objects to call on [MethodImpl(MethodImplOptions.AggressiveInlining)] - public static void DisposeWhileHandlingException(params IDisposable[] objects) + public static void DisposeWhileHandlingException(params IDisposable[] objects) { foreach (var o in objects) { @@ -334,7 +337,7 @@ public static void DisposeWhileHandlingException(IEnumerable object /// /// Since there's no C# equivalent of Java's Exception.AddSuppressed, we add the - /// suppressed exceptions to a data field via the + /// suppressed exceptions to a data field via the /// method. /// /// The exceptions can be retrieved by calling @@ -480,7 +483,7 @@ public static void Copy(FileInfo source, FileInfo target) /// /// Simple utilty method that takes a previously caught - /// and rethrows either + /// and rethrows either /// or an unchecked exception. If the /// argument is null then this method does nothing. /// @@ -513,8 +516,8 @@ public static void ReThrowUnchecked(Exception th) } } - // LUCENENET specific: Fsync is pointless in .NET, since we are + // LUCENENET specific: Fsync is pointless in .NET, since we are // calling FileStream.Flush(true) before the stream is disposed // which means we never need it at the point in Java where it is called. } -} \ No newline at end of file +}