public virtual void TestCommitThreadSafety() { const int NUM_THREADS = 5; const double RUN_SEC = 0.5; var dir = NewDirectory(); var w = new RandomIndexWriter(Random, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(NewLogMergePolicy())); TestUtil.ReduceOpenFiles(w.IndexWriter); w.Commit(); var failed = new AtomicBoolean(); var threads = new ThreadJob[NUM_THREADS]; long endTime = (J2N.Time.NanoTime() / J2N.Time.MillisecondsPerNanosecond) + ((long)(RUN_SEC * 1000)); // LUCENENET: Use NanoTime() rather than CurrentTimeMilliseconds() for more accurate/reliable results for (int i = 0; i < NUM_THREADS; i++) { int finalI = i; threads[i] = new ThreadAnonymousClass(dir, w, failed, endTime, finalI, NewStringField); threads[i].Start(); } for (int i = 0; i < NUM_THREADS; i++) { threads[i].Join(); } Assert.IsFalse(failed); w.Dispose(); dir.Dispose(); }
public virtual void TestGetDocsWithFieldThreadSafety() { IFieldCache cache = FieldCache.DEFAULT; cache.PurgeAllCaches(); int NUM_THREADS = 3; ThreadJob[] threads = new ThreadJob[NUM_THREADS]; AtomicBoolean failed = new AtomicBoolean(); AtomicInt32 iters = new AtomicInt32(); int NUM_ITER = 200 * RandomMultiplier; Barrier restart = new Barrier(NUM_THREADS, (barrier) => new RunnableAnonymousClass(this, cache, iters).Run()); for (int threadIDX = 0; threadIDX < NUM_THREADS; threadIDX++) { threads[threadIDX] = new ThreadAnonymousClass(this, cache, failed, iters, NUM_ITER, restart); threads[threadIDX].Start(); } for (int threadIDX = 0; threadIDX < NUM_THREADS; threadIDX++) { threads[threadIDX].Join(); } Assert.IsFalse(failed); }
public ConcurrentIMDBNameParser(uint imdbID) { person = new ImdbPerson(imdbID); mainPageJob = new JobWebPageDownload(Regex.Replace(url, placeholder, imdbID.ToString())); this.addJob(mainPageJob); }
public override bool hasFinished(ThreadJob job) { if (job == mainPageJob) { JobWebPageDownload res = job as JobWebPageDownload; this.mainPage = res.getResult(); pictureLoadJob = getPictureLoadJob(); if (pictureLoadJob != null) { addJob(pictureLoadJob); } else { pictureLoadJobDone = true; } JobIMDBNameParser parseJob = new JobIMDBNameParser(mainPage, person); parseJob.run(); mainPageJobDone = true; } else if (job == pictureLoadJob) { person.image = ((JobLoadImage)job).getResult(); pictureLoadJobDone = true; } bool result = false; lock (this) { if (mainPageJobDone && pictureLoadJobDone) { result = true; mainPageJobDone = false; } } return result; }
public virtual void Collect(int doc) { int docId = doc + docBase; if (slowdown > 0) { //try //{ ThreadJob.Sleep(slowdown); // } //#if !FEATURE_THREAD_INTERRUPT // catch (Exception) // { // throw; // LUCENENET: CA2200: Rethrow to preserve stack details (https://docs.microsoft.com/en-us/visualstudio/code-quality/ca2200-rethrow-to-preserve-stack-details) // } //#else // catch (ThreadInterruptedException) // LUCENENET NOTE: Senseless to catch and rethrow the same exception type // { // throw; // LUCENENET: CA2200: Rethrow to preserve stack details (https://docs.microsoft.com/en-us/visualstudio/code-quality/ca2200-rethrow-to-preserve-stack-details) // } //#endif } if (Debugging.AssertsEnabled) { Debugging.Assert(docId >= 0, " base={0} doc={1}", docBase, doc); } bits.Set(docId); lastDocCollected = docId; }
internal bool IsHealthy => !stalled; // volatile read! internal bool IsThreadQueued(ThreadJob t) // for tests { lock (this) { return(waiting.ContainsKey(t)); } }
public virtual void TestCommitThreadSafety() { const int NUM_THREADS = 5; const double RUN_SEC = 0.5; var dir = NewDirectory(); var w = new RandomIndexWriter(Random, dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random)).SetMergePolicy(NewLogMergePolicy())); TestUtil.ReduceOpenFiles(w.IndexWriter); w.Commit(); var failed = new AtomicBoolean(); var threads = new ThreadJob[NUM_THREADS]; long endTime = Environment.TickCount + ((long)(RUN_SEC * 1000)); for (int i = 0; i < NUM_THREADS; i++) { int finalI = i; threads[i] = new ThreadAnonymousInnerClassHelper(dir, w, failed, endTime, finalI, NewStringField); threads[i].Start(); } for (int i = 0; i < NUM_THREADS; i++) { threads[i].Join(); } Assert.IsFalse(failed); w.Dispose(); dir.Dispose(); }
public virtual void TestRAMDirectorySize() { Directory dir = NewFSDirectory(IndexDir); MockDirectoryWrapper ramDir = new MockDirectoryWrapper(Random, new RAMDirectory(dir, NewIOContext(Random))); dir.Dispose(); IndexWriter writer = new IndexWriter(ramDir, (new IndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random))).SetOpenMode(OpenMode.APPEND)); writer.ForceMerge(1); Assert.AreEqual(ramDir.GetSizeInBytes(), ramDir.GetRecomputedSizeInBytes()); ThreadJob[] threads = new ThreadJob[NumThreads]; for (int i = 0; i < NumThreads; i++) { int num = i; threads[i] = new ThreadAnonymousInnerClassHelper(this, writer, num); } for (int i = 0; i < NumThreads; i++) { threads[i].Start(); } for (int i = 0; i < NumThreads; i++) { threads[i].Join(); } writer.ForceMerge(1); Assert.AreEqual(ramDir.GetSizeInBytes(), ramDir.GetRecomputedSizeInBytes()); writer.Dispose(); }
public virtual void Collect(int doc) { int docId = doc + docBase; if (slowdown > 0) { //try //{ ThreadJob.Sleep(slowdown); // } //#if NETSTANDARD1_6 // catch (Exception) // { // throw; // } //#else // catch (ThreadInterruptedException) // LUCENENET NOTE: Senseless to catch and rethrow the same exception type // { // throw; // } //#endif } if (Debugging.AssertsEnabled) { Debugging.Assert(docId >= 0, () => " base=" + docBase + " doc=" + doc); } bits.Set(docId); lastDocCollected = docId; }
public virtual void TestRandom() { DocumentsWriterStallControl ctrl = new DocumentsWriterStallControl(); ctrl.UpdateStalled(false); ThreadJob[] stallThreads = new ThreadJob[AtLeast(3)]; for (int i = 0; i < stallThreads.Length; i++) { int stallProbability = 1 + Random.Next(10); stallThreads[i] = new ThreadAnonymousInnerClassHelper(ctrl, stallProbability); } Start(stallThreads); long time = Environment.TickCount; /* * use a 100 sec timeout to make sure we not hang forever. join will fail in * that case */ while ((Environment.TickCount - time) < 100 * 1000 && !Terminated(stallThreads)) { ctrl.UpdateStalled(false); if (Random.NextBoolean()) { Thread.Sleep(0); } else { Thread.Sleep(1); } } Join(stallThreads); }
protected virtual void RunSearchThreads(long stopTime) { int numThreads = TestUtil.NextInt32(Random, 1, 5); ThreadJob[] searchThreads = new ThreadJob[numThreads]; AtomicInt32 totHits = new AtomicInt32(); // silly starting guess: AtomicInt32 totTermCount = new AtomicInt32(100); // TODO: we should enrich this to do more interesting searches for (int thread = 0; thread < searchThreads.Length; thread++) { searchThreads[thread] = new ThreadAnonymousClass2(this, stopTime, totHits, totTermCount); searchThreads[thread].IsBackground = (true); searchThreads[thread].Start(); } for (int thread = 0; thread < searchThreads.Length; thread++) { searchThreads[thread].Join(); } if (Verbose) { Console.WriteLine("TEST: DONE search: totHits=" + totHits); } }
public virtual void Collect(int doc) { int docId = doc + docBase; if (slowdown > 0) { //try //{ ThreadJob.Sleep(slowdown); // } //#if !FEATURE_THREAD_INTERRUPT // catch (Exception) // { // throw; // } //#else // catch (ThreadInterruptedException) // LUCENENET NOTE: Senseless to catch and rethrow the same exception type // { // throw; // } //#endif } if (Debugging.AssertsEnabled) { Debugging.Assert(docId >= 0, " base={0} doc={1}", docBase, doc); } bits.Set(docId); lastDocCollected = docId; }
public override bool hasFinished(ThreadJob job) { bool result = false; if (job == mainPageJob) { JobWebPageDownload res = job as JobWebPageDownload; mainPageJobDone = true; this.mainPage = res.getResult(); this.addJob(getPictureLoadJob()); } else if (job == awardsPageJob) { JobWebPageDownload res = job as JobWebPageDownload; awardsPageJobDone = true; this.awardsPage = res.getResult(); } else if (job == creditsPageJob) { JobWebPageDownload res = job as JobWebPageDownload; creditsPageJobDone = true; this.creditsPage = res.getResult(); } else if (job == imageLoadJob) { this.movieData.poster = ((JobLoadImage)job).getResult(); imageLoadJobDone = true; } else if (job == parseJob) { parseJobDone = true; } lock (this) { if (!parseStared && mainPageJobDone && awardsPageJobDone && creditsPageJobDone && imageLoadJobDone) { parseStared = true; parseJob = new JobImdbMovieParser(mainPage, creditsPage, awardsPage, movieData); parseJob.run(); result = true; } } return result; }
public virtual void TestRandom() { DocumentsWriterStallControl ctrl = new DocumentsWriterStallControl(); ctrl.UpdateStalled(false); ThreadJob[] stallThreads = new ThreadJob[AtLeast(3)]; for (int i = 0; i < stallThreads.Length; i++) { int stallProbability = 1 + Random.Next(10); stallThreads[i] = new ThreadAnonymousClass(ctrl, stallProbability); } Start(stallThreads); long time = J2N.Time.NanoTime() / J2N.Time.MillisecondsPerNanosecond; // LUCENENET: Use NanoTime() rather than CurrentTimeMilliseconds() for more accurate/reliable results /* * use a 100 sec timeout to make sure we not hang forever. join will fail in * that case */ while (((J2N.Time.NanoTime() / J2N.Time.MillisecondsPerNanosecond) - time) < 100 * 1000 && !Terminated(stallThreads)) // LUCENENET: Use NanoTime() rather than CurrentTimeMilliseconds() for more accurate/reliable results { ctrl.UpdateStalled(false); if (Random.NextBoolean()) { Thread.Yield(); } else { Thread.Sleep(1); } } Join(stallThreads); }
public virtual void Collect(int doc) { int docId = doc + docBase; if (slowdown > 0) { try { ThreadJob.Sleep(slowdown); } catch (Exception ie) when(ie.IsInterruptedException()) { #pragma warning disable IDE0001 // Simplify name throw new Util.ThreadInterruptedException(ie); #pragma warning restore IDE0001 // Simplify name } } if (Debugging.AssertsEnabled) { Debugging.Assert(docId >= 0, " base={0} doc={1}", docBase, doc); } bits.Set(docId); lastDocCollected = docId; }
public virtual void Init(IndexReader reader) { this.reader = reader; timeElapsed = 0; t = new ThreadJob(new System.Threading.ThreadStart(this.Run)); t.Start(); }
/// <summary> /// Constructor /// </summary> /// <param name="job">The method to execute from the thread</param> /// <param name="autostart">Whether to automatically or manually start the thread</param> /// <param name="joinTimeout">The time to wait for the thread join on disposal or stop</param> public WorkerT(ThreadJob job, T threadData, bool autostart, int joinTimeout) { JoinTimeout = joinTimeout; threadJob = job; ThreadData = threadData; Init(autostart); }
public virtual void RunTest(Random random, Directory directory) { IndexWriter writer = new IndexWriter(directory, ((IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, ANALYZER).SetOpenMode(OpenMode.CREATE).SetMaxBufferedDocs(2)).SetMergePolicy(NewLogMergePolicy())); for (int iter = 0; iter < NUM_ITER; iter++) { int iterFinal = iter; ((LogMergePolicy)writer.Config.MergePolicy).MergeFactor = 1000; FieldType customType = new FieldType(StringField.TYPE_STORED); customType.OmitNorms = true; for (int i = 0; i < 200; i++) { Document d = new Document(); d.Add(NewField("id", Convert.ToString(i), customType)); d.Add(NewField("contents", English.Int32ToEnglish(i), customType)); writer.AddDocument(d); } ((LogMergePolicy)writer.Config.MergePolicy).MergeFactor = 4; ThreadJob[] threads = new ThreadJob[NUM_THREADS]; for (int i = 0; i < NUM_THREADS; i++) { int iFinal = i; IndexWriter writerFinal = writer; threads[i] = new ThreadAnonymousInnerClassHelper(this, iterFinal, customType, iFinal, writerFinal); } for (int i = 0; i < NUM_THREADS; i++) { threads[i].Start(); } for (int i = 0; i < NUM_THREADS; i++) { threads[i].Join(); } Assert.IsTrue(!Failed); int expectedDocCount = (int)((1 + iter) * (200 + 8 * NUM_ITER2 * (NUM_THREADS / 2.0) * (1 + NUM_THREADS))); Assert.AreEqual(expectedDocCount, writer.NumDocs, "index=" + writer.SegString() + " numDocs=" + writer.NumDocs + " maxDoc=" + writer.MaxDoc + " config=" + writer.Config); Assert.AreEqual(expectedDocCount, writer.MaxDoc, "index=" + writer.SegString() + " numDocs=" + writer.NumDocs + " maxDoc=" + writer.MaxDoc + " config=" + writer.Config); writer.Dispose(); writer = new IndexWriter(directory, (IndexWriterConfig)NewIndexWriterConfig(TEST_VERSION_CURRENT, ANALYZER).SetOpenMode(OpenMode.APPEND).SetMaxBufferedDocs(2)); DirectoryReader reader = DirectoryReader.Open(directory); Assert.AreEqual(1, reader.Leaves.Count, "reader=" + reader); Assert.AreEqual(expectedDocCount, reader.NumDocs); reader.Dispose(); } writer.Dispose(); }
void ThreadHandler(ThreadJob t) { Thread.Sleep(5000); //wait until everything initialized to avoid multithreaded access violations while (!t.IsDone) { ScanRobots(); Thread.Sleep(ScanPeriod); } }
internal static ThreadJob[] WaitThreads(int num, DocumentsWriterStallControl ctrl) { ThreadJob[] array = new ThreadJob[num]; for (int i = 0; i < array.Length; i++) { array[i] = new ThreadAnonymousInnerClassHelper2(ctrl); } return(array); }
public static void Main(string[] args) { if (args.Length != 2) { // LUCENENET specific - our wrapper console shows the correct usage throw new ArgumentException(); //Console.WriteLine("Usage: java Lucene.Net.Store.LockVerifyServer bindToIp clients\n"); //Environment.FailFast("1"); } int arg = 0; string hostname = args[arg++]; int maxClients = Convert.ToInt32(args[arg++], CultureInfo.InvariantCulture); IPAddress ipAddress = IPAddress.Parse(hostname); using (Socket s = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp)) { s.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress, 1); s.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReceiveTimeout, 30000);// SoTimeout = 30000; // initially 30 secs to give clients enough time to startup s.Bind(new IPEndPoint(ipAddress, 0)); s.Listen(maxClients); Console.WriteLine("Listening on " + ((IPEndPoint)s.LocalEndPoint).Port.ToString() + "..."); // we set the port as a sysprop, so the ANT task can read it. For that to work, this server must run in-process: SystemProperties.SetProperty("lockverifyserver.port", ((IPEndPoint)s.LocalEndPoint).Port.ToString(CultureInfo.InvariantCulture)); object localLock = new object(); int[] lockedID = new int[1]; lockedID[0] = -1; CountdownEvent startingGun = new CountdownEvent(1); ThreadJob[] threads = new ThreadJob[maxClients]; for (int count = 0; count < maxClients; count++) { Socket cs = s.Accept(); threads[count] = new ThreadAnonymousInnerClassHelper(localLock, lockedID, startingGun, cs); threads[count].Start(); } // start Console.WriteLine("All clients started, fire gun..."); startingGun.Signal(); // wait for all threads to finish foreach (ThreadJob t in threads) { t.Join(); } // cleanup sysprop SystemProperties.SetProperty("lockverifyserver.port", null); Console.WriteLine("Server terminated."); } }
// ************************** // Public functions // ************************* public void Start() // NOTE: Due to current underlying C++ implementation being single threaded, there can only be one of these { m_cppPlugin = new CppPlugin(this, kMaxNumTextures); m_textureIndexUsage = new int[kMaxNumTextures]; m_coroutineQueue = new CoroutineQueue(this); m_coroutineQueue.StartLoop(); m_threadJob = new ThreadJob(this); }
public void Start() { m_pictureFilePaths = new List <string>(); m_coroutineQueue = new CoroutineQueue(this); m_coroutineQueue.StartLoop(); m_threadJob = new ThreadJob(this); m_cppPlugin = new CppPlugin(this); }
public async Task ExecuteInDefaultScheduler() { IJobScheduler scheduler = new JobScheduler(_builder); var job = new ThreadJob(Thread.CurrentThread); var jobRunner = scheduler.ScheduleJobInternal(new JobScheduler.JobContainer(job)); await jobRunner.WaitForJob(); job.HasRun.Should().BeTrue(); jobRunner.Retries.Should().Be(0); job.InitThread.Should().Be(job.RunThread); }
public RTPRobotScanner() { _robotCommunicator = new RTPRobotCommunicator(); _dataCommunicator = new RTPDataCommunicator(); _dataCommunicator.Start(); _dataCommunicator.OnRobotInfoDetected += OnRobotInfoDetected; _thread = new ThreadJob(); _thread.ThreadHandler += ThreadHandler; _thread.Start(); }
internal bool IsHealthy => !stalled; // volatile read! internal bool IsThreadQueued(ThreadJob t) // for tests { UninterruptableMonitor.Enter(this); try { return(waiting.ContainsKey(t)); } finally { UninterruptableMonitor.Exit(this); } }
public ConcurrentImdbMovieParser(uint imdbID) { movieData = new ImdbMovie(imdbID); this.mainPageJob = new JobWebPageDownload(IMDBUtil.getURLToMovie(imdbID)); this.awardsPageJob = new JobWebPageDownload(IMDBUtil.getAwardsURLToMovie(imdbID)); this.creditsPageJob = new JobWebPageDownload(IMDBUtil.getFullcreditsURLToMovie(imdbID)); this.addJob(mainPageJob); this.addJob(awardsPageJob); this.addJob(creditsPageJob); }
public virtual void AssertThreadSafe(Analyzer analyzer) { int numTestPoints = 100; int numThreads = TestUtil.NextInt32(Random, 3, 5); Dictionary <string, BytesRef> map = new Dictionary <string, BytesRef>(); // create a map<String,SortKey> up front. // then with multiple threads, generate sort keys for all the keys in the map // and ensure they are the same as the ones we produced in serial fashion. for (int i = 0; i < numTestPoints; i++) { string term = TestUtil.RandomSimpleString(Random); Exception priorException = null; // LUCENENET: No need to cast to IOExcpetion TokenStream ts = analyzer.GetTokenStream("fake", new StringReader(term)); try { ITermToBytesRefAttribute termAtt = ts.AddAttribute <ITermToBytesRefAttribute>(); BytesRef bytes = termAtt.BytesRef; ts.Reset(); Assert.IsTrue(ts.IncrementToken()); termAtt.FillBytesRef(); // ensure we make a copy of the actual bytes too map[term] = BytesRef.DeepCopyOf(bytes); Assert.IsFalse(ts.IncrementToken()); ts.End(); } catch (Exception e) when(e.IsIOException()) { priorException = e; } finally { IOUtils.DisposeWhileHandlingException(priorException, ts); } } ThreadJob[] threads = new ThreadJob[numThreads]; for (int i = 0; i < numThreads; i++) { threads[i] = new ThreadAnonymousClass(analyzer, map); } for (int i = 0; i < numThreads; i++) { threads[i].Start(); } for (int i = 0; i < numThreads; i++) { threads[i].Join(); } }
public void TestMultiThreaded() { FileInfo file = new FileInfo(Path.Combine(getWorkDir().FullName, "one-line")); PerfRunData runData = createPerfRunData(file, false, typeof(ThreadingDocMaker).AssemblyQualifiedName); ThreadJob[] threads = new ThreadJob[10]; using (WriteLineDocTask wldt = new WriteLineDocTask(runData)) { for (int i = 0; i < threads.Length; i++) { threads[i] = new ThreadAnonymousHelper("t" + i, wldt); } foreach (ThreadJob t in threads) { t.Start(); } foreach (ThreadJob t in threads) { t.Join(); } } // wldt.Dispose(); ISet <String> ids = new JCG.HashSet <string>(); TextReader br = new StreamReader(new FileStream(file.FullName, FileMode.Open, FileAccess.Read, FileShare.None), Encoding.UTF8); try { String line = br.ReadLine(); assertHeaderLine(line); // header line is written once, no matter how many threads there are for (int i = 0; i < threads.Length; i++) { line = br.ReadLine(); assertNotNull($"line for index {i} is missing", line); // LUCENENET specific - ensure the line is there before splitting String[] parts = line.Split(WriteLineDocTask.SEP).TrimEnd(); assertEquals(line, 3, parts.Length); // check that all thread names written are the same in the same line String tname = parts[0].Substring(parts[0].IndexOf('_')); ids.add(tname); assertEquals(tname, parts[1].Substring(parts[1].IndexOf('_'))); assertEquals(tname, parts[2].Substring(parts[2].IndexOf('_'))); } // only threads.length lines should exist assertNull(br.ReadLine()); assertEquals(threads.Length, ids.size()); } finally { br.Dispose(); } }
public override bool hasFinished(ThreadJob job) { if (job == mainPageJob) { mainPage = (job as JobWebPageDownload).getResult(); } else if (job == weekEndPageJob) { weekendPage = (job as JobWebPageDownload).getResult(); } else if (job == foreignPageJob) { foreignPage = (job as JobWebPageDownload).getResult(); } return false; }
internal string[] Next() { if (t == null) { threadDone = false; t = new ThreadJob(Run); t.IsBackground = true; t.Start(); } string[] result; UninterruptableMonitor.Enter(this); try { while (tuple == null && nmde == null && !threadDone && !stopped) { try { UninterruptableMonitor.Wait(this); } catch (Exception ie) when(ie.IsInterruptedException()) { throw new Util.ThreadInterruptedException(ie); } } if (tuple != null) { result = tuple; tuple = null; Monitor.Pulse(this);// notify(); return(result); } if (nmde != null) { // Set to null so we will re-start thread in case // we are re-used: t = null; throw nmde; } // The thread has exited yet did not hit end of // data, so this means it hit an exception. We // throw NoMorDataException here to force // benchmark to stop the current alg: throw new NoMoreDataException(); } finally { UninterruptableMonitor.Exit(this); } }
protected virtual void Start(int numNodes, double runTimeSec, int maxSearcherAgeSeconds) { endTimeNanos = J2N.Time.NanoTime() + (long)(runTimeSec * 1000000000); this.maxSearcherAgeSeconds = maxSearcherAgeSeconds; m_nodes = new NodeState[numNodes]; for (int nodeID = 0; nodeID < numNodes; nodeID++) { m_nodes[nodeID] = new NodeState(this, Random, nodeID, numNodes); } long[] nodeVersions = new long[m_nodes.Length]; for (int nodeID = 0; nodeID < numNodes; nodeID++) { IndexSearcher s = m_nodes[nodeID].Mgr.Acquire(); try { nodeVersions[nodeID] = m_nodes[nodeID].Searchers.Record(s); } finally { m_nodes[nodeID].Mgr.Release(s); } } for (int nodeID = 0; nodeID < numNodes; nodeID++) { IndexSearcher s = m_nodes[nodeID].Mgr.Acquire(); if (Debugging.AssertsEnabled) { Debugging.Assert(nodeVersions[nodeID] == m_nodes[nodeID].Searchers.Record(s)); } if (Debugging.AssertsEnabled) { Debugging.Assert(s != null); } try { BroadcastNodeReopen(nodeID, nodeVersions[nodeID], s); } finally { m_nodes[nodeID].Mgr.Release(s); } } changeIndicesThread = new ChangeIndices(this); changeIndicesThread.Start(); }
public async Task ExecuteInOwnScheduler() { IJobScheduler scheduler = new JobScheduler(_builder); using var taskScheduler = new MockTaskScheduler(); var job = new ThreadJob(Thread.CurrentThread); var jobRunner = scheduler.ScheduleJobInternal(new JobScheduler.JobContainer(job), taskScheduler); await jobRunner.WaitForJob(); job.HasRun.Should().BeTrue(); jobRunner.Retries.Should().Be(0); taskScheduler.Scheduled.Should().Be(1); job.InitThread.Should().NotBe(job.RunThread); job.RunThread.Should().Be(taskScheduler.MainThread); }
// ************************** // Public functions // ************************** public void Start() { AndroidJNI.AttachCurrentThread(); m_javaPluginClass = new AndroidJavaClass("io.vreel.vreel.JavaPlugin"); m_galleryImageFilePaths = new List <string>(); m_coroutineQueue = new CoroutineQueue(this); m_coroutineQueue.StartLoop(); m_threadJob = new ThreadJob(this); m_backEndAPI = new BackEndAPI(this, m_user.GetErrorMessage(), m_user); m_uploadConfirmation.SetActive(false); }
// ************************** // Public functions // ************************** public void Start() { // Version dependent code m_vreelAnalyticsFile = GetSaveFile(); m_analyticsFilePath = Application.persistentDataPath + m_vreelAnalyticsFile; m_analyticsData = new AnalyticsData(); m_analyticsData.m_uid = ""; m_threadJob = new ThreadJob(this); m_coroutineQueue = new CoroutineQueue(this); m_coroutineQueue.StartLoop(); m_coroutineQueue.EnqueueAction(IdentifyInternal()); }
internal static void MTAExecute(WaitCallback callback, object state) { if (Thread.CurrentThread.GetApartmentState() != ApartmentState.MTA) { using (ThreadJob job = new ThreadJob(callback, state)) { Thread thread = new Thread(new ThreadStart(job.Run)); thread.SetApartmentState(ApartmentState.MTA); thread.IsBackground = true; thread.Start(); Exception innerException = job.Wait(); if (innerException != null) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ApplicationException(System.ServiceModel.SR.GetString("AdminMTAWorkerThreadException"), innerException)); } return; } } callback(state); }
internal static void MTAExecute(WaitCallback callback, object state) { if (Thread.CurrentThread.GetApartmentState() != ApartmentState.MTA) { using (ThreadJob job = new ThreadJob(callback, state)) { Thread thread = new Thread(new ThreadStart(job.Run)); thread.SetApartmentState(ApartmentState.MTA); thread.IsBackground = true; thread.Start(); Exception exception = job.Wait(); if (null != exception) { throw DiagnosticUtility.ExceptionUtility.ThrowHelperError(new ApplicationException(SR.GetString(SR.AdminMTAWorkerThreadException), exception)); } } } else { callback(state); } }
private ThreadJob getPictureLoadJob() { Match m = Regex.Match(mainPage, mediaURLRegex); imageURL = m.Groups["url"].Value + ".jpg"; imageLoadJob = new JobLoadImage(imageURL, null); return imageLoadJob; }
public abstract bool hasFinished(ThreadJob job);
public override bool hasFinished(ThreadJob job) { return true; }
public void addJob(ThreadJob job) { Monitor.Enter(lockvar); jobs.Add(job); Monitor.Exit(lockvar); }