public void AtomicInt64InitialisesWithDefaultValue()
        {
            const long expected = 0;

            var i = new AtomicInt64();

            Assert.AreEqual(expected, (long)i);
            Assert.AreEqual(expected, i.Value);
        }
Example #2
0
 public ThreadAnonymousInnerClassHelper2(TestStressNRT outerInstance, string str, int ndocs, bool tombstones, AtomicInt64 operations)
     : base(str)
 {
     this.OuterInstance = outerInstance;
     this.Ndocs         = ndocs;
     this.Tombstones    = tombstones;
     this.Operations    = operations;
     rand = new Random(Random().Next());
 }
Example #3
0
 public ThreadAnonymousClass2(TestStressNRT outerInstance, string str, int ndocs, bool tombstones, AtomicInt64 operations)
     : base(str)
 {
     this.outerInstance = outerInstance;
     this.ndocs         = ndocs;
     this.tombstones    = tombstones;
     this.operations    = operations;
     rand = new Random(Random.Next());
 }
        public void AtomicInt64InitialisesWithSpecifiedvalue()
        {
            const long expected = 47;

            var i = new AtomicInt64(expected);

            Assert.AreEqual(expected, (long)i);
            Assert.AreEqual(expected, i.Value);
        }
        public void AtomicInt64SupportsPreIncrementMultiThreaded()
        {
            const long expected = 1000;

            var i = new AtomicInt64();

            System.Threading.Tasks.Parallel.For(0, expected, j => i.PreIncrement());

            Assert.AreEqual(expected, (long)i);
            Assert.AreEqual(expected, i.Value);
        }
        public void AtomicInt64SupportsGetAndAddNegativesMultiThreaded()
        {
            const long expected = 1000;

            var i = new AtomicInt64();

            System.Threading.Tasks.Parallel.For(0, expected, j => i.GetAndAdd(1));

            Assert.AreEqual(expected, (long)i);
            Assert.AreEqual(expected, i.Value);
        }
        /// <summary>
        /// Processes the rollover of this file.
        /// </summary>
        private void m_rolloverTask_Running(object sender, EventArgs <ScheduledTaskRunningReason> e)
        {
            //the nature of how the ScheduledTask works
            //gaurentees that this function will not be called concurrently

            //The worker can be disposed either via the Stop() method or
            //the Dispose() method.  If via the dispose method, then
            //don't do any cleanup.
            if (m_disposed && e.Argument == ScheduledTaskRunningReason.Disposing)
            {
                Log.Publish(MessageLevel.Info, "Rollover thread is Disposing");

                m_waitForEmptyActiveQueue.Dispose();
                return;
            }

            lock (m_syncRoot)
            {
                int count = m_activeQueue.Count;
                if (count == 0)
                {
                    m_waitForEmptyActiveQueue.Set();
                    return;
                }

                //Swap active and processing.
                SortedPointBuffer <TKey, TValue> swap = m_activeQueue;
                m_activeQueue               = m_processingQueue;
                m_processingQueue           = swap;
                m_activeQueue.IsReadingMode = false; //Should do nothing, but just to be sure.

                m_waitForEmptyActiveQueue.Set();
                m_currentTransactionIdRollingOver = m_latestTransactionId;
                m_currentlyRollingOverFullQueue   = m_processingQueue.IsFull;
            }

            //ToDo: The current inner loop for inserting random data is the sorting process here.
            //ToDo:  If the current speed isn't fast enough, this can be multithreaded to improve
            //ToDo:  the insert performance. However, at this time, the added complexity is
            //ToDo:  not worth it since write speeds are already blazing fast.
            try
            {
                m_processingQueue.IsReadingMode = true; //Very CPU intensive. This does a sort on the incoming measurements. Profiling shows that about 33% of the time is spent sorting elements.
                PrebufferRolloverArgs <TKey, TValue> args = new PrebufferRolloverArgs <TKey, TValue>(m_processingQueue, m_currentTransactionIdRollingOver);
                m_onRollover(args);
                m_processingQueue.IsReadingMode = false; //Clears the queue
            }
            catch (Exception ex)
            {
                Log.Publish(MessageLevel.Critical, "Rollover process unhandled exception", "The rollover process threw an unhandled exception. There is likely data loss that will result from this exception", null, ex);
            }
            m_currentlyRollingOverFullQueue = false;
        }
Example #8
0
        public static void Main2(string[] args)
        {
            int nThreads = 2;
            int numIters = 200;

            string    fname1 = "facet1";
            FacetSpec fspec  = new FacetSpec();

            fspec.ExpandSelection = (true);
            fspec.MaxCount        = (50);
            fspec.MinHitCount     = (1);
            fspec.OrderBy         = FacetSpec.FacetSortSpec.OrderHitsDesc;

            List <IFacetAccessible> list1 = new List <IFacetAccessible>(numSegs);

            for (int i = 0; i < numSegs; ++i)
            {
                list1.Add(BuildSubAccessible(fname1, i, fspec));
            }

            AtomicInt64 timeCounter = new AtomicInt64();

            Thread[]       threads      = new Thread[nThreads];
            RunnerThread[] threadStates = new RunnerThread[nThreads];
            for (int i = 0; i < threads.Length; ++i)
            {
                var threadState = new RunnerThread(timeCounter, numIters, fspec, list1);
                threadStates[i] = threadState;
                threads[i]      = new Thread(new ThreadStart(threadState.Run));
            }


            //		System.out.println("press key to start load test... ");
            //		{
            //			BufferedReader br = new BufferedReader(new InputStreamReader(
            //					System.in));
            //			int ch = br.read();
            //			char c = (char) ch;
            //		}
            foreach (Thread t in threads)
            {
                t.Start();
            }

            foreach (Thread t in threads)
            {
                t.Join();
            }

            Console.WriteLine("average time: " + timeCounter.Get() / numIters / nThreads + " ms");
        }
        public void AtomicInt64SupportsPreIncrement()
        {
            const long expected = 1000;

            var i = new AtomicInt64();

            for (int j = 0; j < expected; j++)
            {
                i.PreIncrement();
            }

            Assert.AreEqual(expected, (long)i);
            Assert.AreEqual(expected, i.Value);
        }
        public void AtomicInt64SupportsGetAndAdd()
        {
            const long expected = 1000;

            var i = new AtomicInt64();

            for (int j = 0; j < expected; j++)
            {
                i.GetAndAdd(1);
            }

            Assert.AreEqual(expected, (long)i);
            Assert.AreEqual(expected, i.Value);
        }
Example #11
0
 public void TestAtomicInt64OperatorMinus()
 {
     for (var j = 0; j < 10; j++)
     {
         var x     = AtomicInt64.From(1000);
         var tasks = new Task[100];
         for (var i = 0; i < 100; i++)
         {
             tasks[i] = Task.Factory.StartNew(() => x.Minus(5));
         }
         Task.WaitAll(tasks);
         Assert.Equal(500, x.Value);
     }
 }
Example #12
0
 public void TestAtomicInt64OperatorIncrement()
 {
     for (var j = 0; j < 10; j++)
     {
         var atomic = AtomicInt64.From(0);
         var tasks  = new Task[100];
         for (var i = 0; i < 100; i++)
         {
             tasks[i] = Task.Factory.StartNew(() => atomic.Increment());
         }
         Task.WaitAll(tasks);
         Assert.Equal(100, atomic.Value);
     }
 }
        public void AtomicInt64SupportsGetAndAddNegatives()
        {
            const long expected = 0;

            var i = new AtomicInt64();

            i.GetAndSet(1000);

            for (int j = 0; j < 1000; j++)
            {
                i.GetAndAdd(-1);
            }

            Assert.AreEqual(expected, (long)i);
            Assert.AreEqual(expected, i.Value);
        }
Example #14
0
 public ThreadAnonymousClass(TestStressNRT outerInstance, string str, int commitPercent, int softCommitPercent, int deletePercent, int deleteByQueryPercent, int ndocs, int maxConcurrentCommits, bool tombstones, AtomicInt64 operations, FieldType storedOnlyType, AtomicInt32 numCommitting, RandomIndexWriter writer)
     : base(str)
 {
     this.outerInstance        = outerInstance;
     this.commitPercent        = commitPercent;
     this.softCommitPercent    = softCommitPercent;
     this.deletePercent        = deletePercent;
     this.deleteByQueryPercent = deleteByQueryPercent;
     this.ndocs = ndocs;
     this.maxConcurrentCommits = maxConcurrentCommits;
     this.tombstones           = tombstones;
     this.operations           = operations;
     this.storedOnlyType       = storedOnlyType;
     this.numCommitting        = numCommitting;
     this.writer = writer;
     rand        = new J2N.Randomizer(Random.NextInt64());
 }
Example #15
0
 public ThreadAnonymousInnerClassHelper(TestStressNRT outerInstance, string str, int commitPercent, int softCommitPercent, int deletePercent, int deleteByQueryPercent, int ndocs, int maxConcurrentCommits, bool tombstones, AtomicInt64 operations, FieldType storedOnlyType, AtomicInt32 numCommitting, RandomIndexWriter writer)
     : base(str)
 {
     this.OuterInstance        = outerInstance;
     this.CommitPercent        = commitPercent;
     this.SoftCommitPercent    = softCommitPercent;
     this.DeletePercent        = deletePercent;
     this.DeleteByQueryPercent = deleteByQueryPercent;
     this.Ndocs = ndocs;
     this.MaxConcurrentCommits = maxConcurrentCommits;
     this.Tombstones           = tombstones;
     this.Operations           = operations;
     this.StoredOnlyType       = storedOnlyType;
     this.NumCommitting        = numCommitting;
     this.Writer = writer;
     rand        = new Random(Random().Next());
 }
Example #16
0
        public async Task LowerVsUpperHashcodeTest()
        {
            var msg        = string.Empty;
            var lowerChars = new string(Enumerable.Range('a', 26).Select(i => (char)i).ToArray());
            var upperChars = new string(Enumerable.Range('A', 26).Select(i => (char)i).ToArray());

            var max   = 10000000L;
            var count = new AtomicInt64();
            await Task.WhenAll(Enumerable.Range(0, Environment.ProcessorCount / 2).Select(n => Task.Run(() =>
            {
                var i = count.Increment();
                while (i <= max)
                {
                    var lower = i.ToBigInt().ToBase(lowerChars);
                    var upper = i.ToBigInt().ToBase(upperChars);
                    var e     = msg = $"Lower: {lower}, Upper: {upper}";
                    Assert.AreNotEqual(lower.GetHashCode(), upper.GetHashCode(), e);
                    i = count.Increment();
                }
            })));

            Console.WriteLine(msg);
        }
Example #17
0
 public POCopStats()
 {
     intervalCount = AtomicInt64.From(0);
     totalOpsDone  = AtomicInt64.From(0);
     slowOps       = AtomicInt64.From(0);
 }
Example #18
0
        public virtual void Test()
        {
            // update variables
            int commitPercent        = Random().Next(20);
            int softCommitPercent    = Random().Next(100); // what percent of the commits are soft
            int deletePercent        = Random().Next(50);
            int deleteByQueryPercent = Random().Next(25);
            int ndocs                = AtLeast(50);
            int nWriteThreads        = TestUtil.NextInt(Random(), 1, TEST_NIGHTLY ? 10 : 5);
            int maxConcurrentCommits = TestUtil.NextInt(Random(), 1, TEST_NIGHTLY ? 10 : 5); // number of committers at a time... needed if we want to avoid commit errors due to exceeding the max

            bool tombstones = Random().NextBoolean();

            // query variables
            AtomicInt64 operations = new AtomicInt64(AtLeast(10000)); // number of query operations to perform in total

            int nReadThreads = TestUtil.NextInt(Random(), 1, TEST_NIGHTLY ? 10 : 5);

            InitModel(ndocs);

            FieldType storedOnlyType = new FieldType();

            storedOnlyType.IsStored = true;

            if (VERBOSE)
            {
                Console.WriteLine("\n");
                Console.WriteLine("TEST: commitPercent=" + commitPercent);
                Console.WriteLine("TEST: softCommitPercent=" + softCommitPercent);
                Console.WriteLine("TEST: deletePercent=" + deletePercent);
                Console.WriteLine("TEST: deleteByQueryPercent=" + deleteByQueryPercent);
                Console.WriteLine("TEST: ndocs=" + ndocs);
                Console.WriteLine("TEST: nWriteThreads=" + nWriteThreads);
                Console.WriteLine("TEST: nReadThreads=" + nReadThreads);
                Console.WriteLine("TEST: maxConcurrentCommits=" + maxConcurrentCommits);
                Console.WriteLine("TEST: tombstones=" + tombstones);
                Console.WriteLine("TEST: operations=" + operations);
                Console.WriteLine("\n");
            }

            AtomicInt32 numCommitting = new AtomicInt32();

            IList <ThreadClass> threads = new List <ThreadClass>();

            Directory dir = NewDirectory();

            RandomIndexWriter writer = new RandomIndexWriter(Random(), dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())));

            writer.DoRandomForceMergeAssert = false;
            writer.Commit();
            Reader = DirectoryReader.Open(dir);

            for (int i = 0; i < nWriteThreads; i++)
            {
                ThreadClass thread = new ThreadAnonymousInnerClassHelper(this, "WRITER" + i, commitPercent, softCommitPercent, deletePercent, deleteByQueryPercent, ndocs, maxConcurrentCommits, tombstones, operations, storedOnlyType, numCommitting, writer);

                threads.Add(thread);
            }

            for (int i = 0; i < nReadThreads; i++)
            {
                ThreadClass thread = new ThreadAnonymousInnerClassHelper2(this, "READER" + i, ndocs, tombstones, operations);

                threads.Add(thread);
            }

            foreach (ThreadClass thread in threads)
            {
                thread.Start();
            }

            foreach (ThreadClass thread in threads)
            {
                thread.Join();
            }

            writer.Dispose();
            if (VERBOSE)
            {
                Console.WriteLine("TEST: close reader=" + Reader);
            }
            Reader.Dispose();
            dir.Dispose();
        }
        internal Lucene42DocValuesProducer(SegmentReadState state, string dataCodec, string dataExtension, string metaCodec, string metaExtension)
        {
            maxDoc = state.SegmentInfo.DocCount;
            string metaName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, metaExtension);
            // read in the entries from the metadata file.
            ChecksumIndexInput @in = state.Directory.OpenChecksumInput(metaName, state.Context);
            bool success           = false;

            ramBytesUsed = new AtomicInt64(RamUsageEstimator.ShallowSizeOfInstance(this.GetType()));
            try
            {
                version  = CodecUtil.CheckHeader(@in, metaCodec, VERSION_START, VERSION_CURRENT);
                numerics = new Dictionary <int, NumericEntry>();
                binaries = new Dictionary <int, BinaryEntry>();
                fsts     = new Dictionary <int, FSTEntry>();
                ReadFields(@in, state.FieldInfos);

                if (version >= VERSION_CHECKSUM)
                {
                    CodecUtil.CheckFooter(@in);
                }
                else
                {
#pragma warning disable 612, 618
                    CodecUtil.CheckEOF(@in);
#pragma warning restore 612, 618
                }

                success = true;
            }
            finally
            {
                if (success)
                {
                    IOUtils.Dispose(@in);
                }
                else
                {
                    IOUtils.DisposeWhileHandlingException(@in);
                }
            }

            success = false;
            try
            {
                string dataName = IndexFileNames.SegmentFileName(state.SegmentInfo.Name, state.SegmentSuffix, dataExtension);
                data = state.Directory.OpenInput(dataName, state.Context);
                int version2 = CodecUtil.CheckHeader(data, dataCodec, VERSION_START, VERSION_CURRENT);
                if (version != version2)
                {
                    throw new CorruptIndexException("Format versions mismatch");
                }

                success = true;
            }
            finally
            {
                if (!success)
                {
                    IOUtils.DisposeWhileHandlingException(this.data);
                }
            }
        }
        public virtual void Test()
        {
            Directory         dir = NewDirectory();
            RandomIndexWriter w   = new RandomIndexWriter(
#if FEATURE_INSTANCE_TESTDATA_INITIALIZATION
                this,
#endif
                Random, dir);

            long startTime = Environment.TickCount;

            // TODO: replace w/ the @nightly test data; make this
            // into an optional @nightly stress test
            Document doc  = new Document();
            Field    body = NewTextField("body", "", Field.Store.NO);

            doc.Add(body);
            StringBuilder sb = new StringBuilder();

            for (int docCount = 0; docCount < NUM_DOCS; docCount++)
            {
                int numTerms = Random.Next(10);
                for (int termCount = 0; termCount < numTerms; termCount++)
                {
                    sb.Append(Random.NextBoolean() ? "aaa" : "bbb");
                    sb.Append(' ');
                }
                body.SetStringValue(sb.ToString());
                w.AddDocument(doc);
                sb.Remove(0, sb.Length);
            }
            IndexReader r = w.GetReader();

            w.Dispose();

            long endTime = Environment.TickCount;

            if (Verbose)
            {
                Console.WriteLine("BUILD took " + (endTime - startTime));
            }

            IndexSearcher s = NewSearcher(r);

            AtomicBoolean failed    = new AtomicBoolean();
            AtomicInt64   netSearch = new AtomicInt64();

            ThreadJob[] threads = new ThreadJob[NUM_SEARCH_THREADS];
            for (int threadID = 0; threadID < NUM_SEARCH_THREADS; threadID++)
            {
                threads[threadID] = new ThreadAnonymousClass(this, s, failed, netSearch);
                threads[threadID].IsBackground = (true);
            }

            foreach (ThreadJob t in threads)
            {
                t.Start();
            }

            foreach (ThreadJob t in threads)
            {
                t.Join();
            }

            if (Verbose)
            {
                Console.WriteLine(NUM_SEARCH_THREADS + " threads did " + netSearch + " searches");
            }

            r.Dispose();
            dir.Dispose();
        }
Example #21
0
 public AtomicSequence()
 {
     seq = AtomicInt64.From(0);
 }
 public ThreadAnonymousClass(TestSearchWithThreads outerInstance, IndexSearcher s, AtomicBoolean failed, AtomicInt64 netSearch)
 {
     this.outerInstance = outerInstance;
     this.s             = s;
     this.failed        = failed;
     this.netSearch     = netSearch;
     col = new TotalHitCountCollector();
 }
Example #23
0
        // note: just like segmentreader in 3.x, we open up all the files here (including separate norms) up front.
        // but we just don't do any seeks or reading yet.
        public Lucene3xNormsProducer(Directory dir, SegmentInfo info, FieldInfos fields, IOContext context)
        {
            Directory separateNormsDir = info.Dir; // separate norms are never inside CFS

            maxdoc = info.DocCount;
            string segmentName = info.Name;
            bool   success     = false;

            try
            {
                long nextNormSeek = NORMS_HEADER.Length; //skip header (header unused for now)
                foreach (FieldInfo fi in fields)
                {
                    if (fi.HasNorms)
                    {
                        string    fileName = GetNormFilename(info, fi.Number);
                        Directory d        = HasSeparateNorms(info, fi.Number) ? separateNormsDir : dir;

                        // singleNormFile means multiple norms share this file
                        bool       singleNormFile = IndexFileNames.MatchesExtension(fileName, NORMS_EXTENSION);
                        IndexInput normInput      = null;
                        long       normSeek;

                        if (singleNormFile)
                        {
                            normSeek = nextNormSeek;
                            if (singleNormStream == null)
                            {
                                singleNormStream = d.OpenInput(fileName, context);
                                openFiles.Add(singleNormStream);
                            }
                            // All norms in the .nrm file can share a single IndexInput since
                            // they are only used in a synchronized context.
                            // If this were to change in the future, a clone could be done here.
                            normInput = singleNormStream;
                        }
                        else
                        {
                            normInput = d.OpenInput(fileName, context);
                            openFiles.Add(normInput);
                            // if the segment was created in 3.2 or after, we wrote the header for sure,
                            // and don't need to do the sketchy file size check. otherwise, we check
                            // if the size is exactly equal to maxDoc to detect a headerless file.
                            // NOTE: remove this check in Lucene 5.0!
                            string version       = info.Version;
                            bool   isUnversioned = (version == null || StringHelper.VersionComparer.Compare(version, "3.2") < 0) && normInput.Length == maxdoc;
                            if (isUnversioned)
                            {
                                normSeek = 0;
                            }
                            else
                            {
                                normSeek = NORMS_HEADER.Length;
                            }
                        }
                        NormsDocValues norm = new NormsDocValues(this, normInput, normSeek);
                        norms[fi.Name] = norm;
                        nextNormSeek  += maxdoc; // increment also if some norms are separate
                    }
                }
                // TODO: change to a real check? see LUCENE-3619
                if (Debugging.AssertsEnabled)
                {
                    Debugging.Assert(singleNormStream == null || nextNormSeek == singleNormStream.Length, singleNormStream != null ? "len: {0} expected: {1}" : "null", singleNormStream?.Length ?? 0, nextNormSeek);
                }
                success = true;
            }
            finally
            {
                if (!success)
                {
                    IOUtils.DisposeWhileHandlingException(openFiles);
                }
            }
            ramBytesUsed = new AtomicInt64();
        }
Example #24
0
 internal BufferedUpdates() // LUCENENET NOTE: Made internal rather than public, since this class is intended to be internal but couldn't be because it is exposed through a public API
 {
     this.bytesUsed = new AtomicInt64();
 }
        public virtual void Test()
        {
            Directory         dir = NewDirectory();
            RandomIndexWriter w   = new RandomIndexWriter(Random(), dir, Similarity, TimeZone);

            long startTime = Environment.TickCount;

            // TODO: replace w/ the @nightly test data; make this
            // into an optional @nightly stress test
            Document doc  = new Document();
            Field    body = NewTextField("body", "", Field.Store.NO);

            doc.Add(body);
            StringBuilder sb = new StringBuilder();

            for (int docCount = 0; docCount < NUM_DOCS; docCount++)
            {
                int numTerms = Random().Next(10);
                for (int termCount = 0; termCount < numTerms; termCount++)
                {
                    sb.Append(Random().NextBoolean() ? "aaa" : "bbb");
                    sb.Append(' ');
                }
                body.SetStringValue(sb.ToString());
                w.AddDocument(doc);
                sb.Remove(0, sb.Length);
            }
            IndexReader r = w.Reader;

            w.Dispose();

            long endTime = Environment.TickCount;

            if (VERBOSE)
            {
                Console.WriteLine("BUILD took " + (endTime - startTime));
            }

            IndexSearcher s = NewSearcher(r);

            AtomicBoolean failed    = new AtomicBoolean();
            AtomicInt64   netSearch = new AtomicInt64();

            ThreadClass[] threads = new ThreadClass[NUM_SEARCH_THREADS];
            for (int threadID = 0; threadID < NUM_SEARCH_THREADS; threadID++)
            {
                threads[threadID] = new ThreadAnonymousInnerClassHelper(this, s, failed, netSearch);
                threads[threadID].SetDaemon(true);
            }

            foreach (ThreadClass t in threads)
            {
                t.Start();
            }

            foreach (ThreadClass t in threads)
            {
                t.Join();
            }

            if (VERBOSE)
            {
                Console.WriteLine(NUM_SEARCH_THREADS + " threads did " + netSearch.Get() + " searches");
            }

            r.Dispose();
            dir.Dispose();
        }
Example #26
0
        public void TestAtomicInt64DefaultCtor()
        {
            var al = new AtomicInt64();

            Assert.Equal(0, al.Value);
        }