public void TestAddRemove() { using (IDataStreamer <int, int> ldr = _grid.GetDataStreamer <int, int>(CacheName)) { ldr.AllowOverwrite = true; // Additions. ldr.AddData(1, 1); ldr.Flush(); Assert.AreEqual(1, _cache.Get(1)); ldr.AddData(new KeyValuePair <int, int>(2, 2)); ldr.Flush(); Assert.AreEqual(2, _cache.Get(2)); ldr.AddData(new List <KeyValuePair <int, int> > { new KeyValuePair <int, int>(3, 3), new KeyValuePair <int, int>(4, 4) }); ldr.Flush(); Assert.AreEqual(3, _cache.Get(3)); Assert.AreEqual(4, _cache.Get(4)); // Removal. ldr.RemoveData(1); ldr.Flush(); Assert.IsFalse(_cache.ContainsKey(1)); // Mixed. ldr.AddData(5, 5); ldr.RemoveData(2); ldr.AddData(new KeyValuePair <int, int>(7, 7)); ldr.AddData(6, 6); ldr.RemoveData(4); ldr.AddData(new List <KeyValuePair <int, int> > { new KeyValuePair <int, int>(9, 9), new KeyValuePair <int, int>(10, 10) }); ldr.AddData(new KeyValuePair <int, int>(8, 8)); ldr.RemoveData(3); ldr.AddData(new List <KeyValuePair <int, int> > { new KeyValuePair <int, int>(11, 11), new KeyValuePair <int, int>(12, 12) }); ldr.Flush(); for (int i = 2; i < 5; i++) { Assert.IsFalse(_cache.ContainsKey(i)); } for (int i = 5; i < 13; i++) { Assert.AreEqual(i, _cache.Get(i)); } } }
public void Process(IDataStreamer <long, CustomerAccount_ignite> streamer) { CustomerAccountsDataContext data = new CustomerAccountsDataContext(); long cnt = 0; var stopwatch = new Stopwatch(); stopwatch.Start(); data.ObjectTrackingEnabled = false; foreach (var customer in data.CustomerAccounts) { var customer_ignite = new CustomerAccount_ignite(customer); //Console.WriteLine("CustomerName {0}", customer.CustomerName); //cache.Put(cnt, customer_ignite); streamer.AddData(cnt, customer_ignite); cnt += 1; //if (cnt % 1000 == 0) // Console.Write("-"); //if (cnt == 100000) // break; } stopwatch.Stop(); long elapsed_time = stopwatch.ElapsedMilliseconds; Console.WriteLine("Finish Data Loading\n"); Console.WriteLine("Preload Time on {0} rows using {1} ms", cnt, elapsed_time); }
/// <summary> /// Creates an action that streams data in the cache. /// </summary> /// <param name="dataStreamer"></param> /// <param name="list"></param> /// <param name="currentChunkNumber"></param> /// <param name="currentChunkBytes"></param> /// <returns></returns> private Action <Task> StreamData(IDataStreamer <string, Artifact> dataStreamer, IEnumerable <KeyValuePair <FlowKey, IList <FrameData> > > list, int currentChunkNumber, int currentChunkBytes) { var items = list.Select(x => KeyValuePair.Create(x.Key.ToString(), new Artifact { PayloadBin = x.Value.SelectMany(y => y.GetBytes()).ToArray() })).ToList(); return(async(t) => { await dataStreamer.AddData(items); this.OnChunkStored(currentChunkNumber, currentChunkBytes); }); }
public void TestAutoFlushObsolete() { #pragma warning disable 618 // Type or member is obsolete using (IDataStreamer <int, int> ldr = _grid.GetDataStreamer <int, int>(CacheName)) { // Test auto flush turning on. var fut = ldr.AddData(1, 1); Thread.Sleep(100); Assert.IsFalse(fut.IsCompleted); ldr.AutoFlushFrequency = 1000; fut.Wait(); // Test forced flush after frequency change. fut = ldr.AddData(2, 2); ldr.AutoFlushFrequency = long.MaxValue; fut.Wait(); // Test another forced flush after frequency change. fut = ldr.AddData(3, 3); ldr.AutoFlushFrequency = 1000; fut.Wait(); // Test flush before stop. fut = ldr.AddData(4, 4); ldr.AutoFlushFrequency = 0; fut.Wait(); // Test flush after second turn on. fut = ldr.AddData(5, 5); ldr.AutoFlushFrequency = 1000; fut.Wait(); Assert.AreEqual(1, _cache.Get(1)); Assert.AreEqual(2, _cache.Get(2)); Assert.AreEqual(3, _cache.Get(3)); Assert.AreEqual(4, _cache.Get(4)); Assert.AreEqual(5, _cache.Get(5)); } #pragma warning restore 618 // Type or member is obsolete }
public void TestCancel() { using (IDataStreamer <int, int> ldr = _grid.GetDataStreamer <int, int>(CacheName)) { var fut = ldr.AddData(1, 1); ldr.Close(true); fut.Wait(); Assert.IsFalse(_cache.ContainsKey(1)); } }
public void TestClose() { using (IDataStreamer <int, int> ldr = _grid.GetDataStreamer <int, int>(CacheName)) { var fut = ldr.AddData(1, 1); ldr.Close(false); fut.Wait(); Assert.AreEqual(1, _cache.Get(1)); } }
public void TestCancel() { using (IDataStreamer <int, int> ldr = _grid.DataStreamer <int, int>(CacheName)) { var fut = ldr.AddData(1, 1); ldr.Close(true); fut.Get(); Assert.IsNull(_cache.Get(1)); } }
public void TestTryFlush() { using (IDataStreamer <int, int> ldr = _grid.DataStreamer <int, int>(CacheName)) { var fut = ldr.AddData(1, 1); ldr.TryFlush(); fut.Get(); Assert.AreEqual(1, _cache.Get(1)); } }
public void TestMultithreaded() { int entriesPerThread = 100000; int threadCnt = 8; for (int i = 0; i < 5; i++) { _cache.Clear(); Assert.AreEqual(0, _cache.GetSize()); Stopwatch watch = new Stopwatch(); watch.Start(); using (IDataStreamer <int, int> ldr = _grid.GetDataStreamer <int, int>(CacheName)) { ldr.PerNodeBufferSize = 1024; int ctr = 0; TestUtils.RunMultiThreaded(() => { int threadIdx = Interlocked.Increment(ref ctr); int startIdx = (threadIdx - 1) * entriesPerThread; int endIdx = startIdx + entriesPerThread; for (int j = startIdx; j < endIdx; j++) { // ReSharper disable once AccessToDisposedClosure ldr.AddData(j, j); if (j % 100000 == 0) { Console.WriteLine("Put [thread=" + threadIdx + ", cnt=" + j + ']'); } } }, threadCnt); } Console.WriteLine("Iteration " + i + ": " + watch.ElapsedMilliseconds); watch.Reset(); for (int j = 0; j < threadCnt * entriesPerThread; j++) { Assert.AreEqual(j, j); } } }
public void TestAutoFlush() { using (IDataStreamer <int, int> ldr = _grid.DataStreamer <int, int>(CacheName)) { // Test auto flush turning on. var fut = ldr.AddData(1, 1); Thread.Sleep(100); Assert.IsFalse(fut.IsDone); ldr.AutoFlushFrequency = 1000; fut.Get(); // Test forced flush after frequency change. fut = ldr.AddData(2, 2); ldr.AutoFlushFrequency = long.MaxValue; fut.Get(); // Test another forced flush after frequency change. fut = ldr.AddData(3, 3); ldr.AutoFlushFrequency = 1000; fut.Get(); // Test flush before stop. fut = ldr.AddData(4, 4); ldr.AutoFlushFrequency = 0; fut.Get(); // Test flush after second turn on. fut = ldr.AddData(5, 5); ldr.AutoFlushFrequency = 1000; fut.Get(); Assert.AreEqual(1, _cache.Get(1)); Assert.AreEqual(2, _cache.Get(2)); Assert.AreEqual(3, _cache.Get(3)); Assert.AreEqual(4, _cache.Get(4)); Assert.AreEqual(5, _cache.Get(5)); } }
public void TestBufferSize() { using (IDataStreamer <int, int> ldr = _grid.GetDataStreamer <int, int>(CacheName)) { var fut = ldr.AddData(1, 1); Thread.Sleep(100); Assert.IsFalse(fut.IsCompleted); ldr.PerNodeBufferSize = 2; ldr.AddData(2, 2); ldr.AddData(3, 3); ldr.AddData(4, 4).Wait(); fut.Wait(); Assert.AreEqual(1, _cache.Get(1)); Assert.AreEqual(2, _cache.Get(2)); Assert.AreEqual(3, _cache.Get(3)); Assert.AreEqual(4, _cache.Get(4)); ldr.AddData(new List <KeyValuePair <int, int> > { new KeyValuePair <int, int>(5, 5), new KeyValuePair <int, int>(6, 6), new KeyValuePair <int, int>(7, 7), new KeyValuePair <int, int>(8, 8) }).Wait(); Assert.AreEqual(5, _cache.Get(5)); Assert.AreEqual(6, _cache.Get(6)); Assert.AreEqual(7, _cache.Get(7)); Assert.AreEqual(8, _cache.Get(8)); } }
public void TestTryFlushObsolete() { #pragma warning disable 618 // Type or member is obsolete using (IDataStreamer <int, int> ldr = _grid.GetDataStreamer <int, int>(CacheName)) { var fut = ldr.AddData(1, 1); ldr.TryFlush(); fut.Wait(); Assert.AreEqual(1, _cache.Get(1)); } #pragma warning restore 618 // Type or member is obsolete }
public void AddData(TK key, TV value) { _streamer.AddData(key, value); }
private async Task StoreChunk(IDataStreamer <FrameKey, FrameData> dataStreamer, KeyValuePair <FrameKey, FrameData>[] frameArray, int currentChunkNumber, int currentChunkBytes) { await dataStreamer.AddData(frameArray); OnChunkStored?.Invoke(this, currentChunkNumber, currentChunkBytes); }
static void Main(string[] args) { //string conn_string = ConfigurationManager.ConnectionStrings["CacheService.Properties.Settings.testConnectionString"].Name; Ignition.ClientMode = true; using (ignite = Ignition.StartFromApplicationConfiguration()) { ICluster cluster = ignite.GetCluster(); ICollection <IClusterNode> t = cluster.ForRemotes().GetNodes(); List <IClusterNode> t1 = t.ToList <IClusterNode>(); Console.WriteLine("{0}", t1.ToString()); PreLoad preload = new PreLoad(); String CacheName = "Cache"; var caches = ignite.GetCacheNames(); cache = ignite.GetOrCreateCache <long, CustomerAccount_ignite>( new CacheConfiguration(CacheName, typeof(CustomerAccount_ignite)) { CacheMode = CacheMode.Partitioned } );//"Cache" IDataStreamer <long, CustomerAccount_ignite> streamer = ignite.GetDataStreamer <long, CustomerAccount_ignite>("Cache"); streamer.AllowOverwrite = true; IQueryable <ICacheEntry <long, CustomerAccount_ignite> > queryable = cache.AsCacheQueryable(); if (!caches.Contains(CacheName)) { preload.Process(streamer); } Console.WriteLine(DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss.fff") + " Populating Account IDs"); PopulateAccountIDs(); Console.WriteLine(accountIDs.Count); Console.WriteLine(DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss.fff") + " Start writing tasks"); CancellationTokenSource writeTokenSource = new CancellationTokenSource(); CancellationToken writeToken = writeTokenSource.Token; List <Task> writeTasks = new List <Task>(); for (int i = 0; i < numWriteThread; i++) { writeTasks.Add(new Task(() => { Thread.Sleep(2000); ///Console.Write("tasks1 start ", i); while (!writeToken.IsCancellationRequested) { Stopwatch sw = Stopwatch.StartNew(); try { long p = Helper.GetRandomAccountID(accountIDs); var accounts = queryable.Where(customer => customer.Value._AccountId == p).ToArray(); foreach (var account in accounts) { account.Value._AgencyName = Helper.GetRandomString(20); streamer.AddData(account.Key, account.Value); } Interlocked.Increment(ref writeSucceeded); } catch (Exception ex) { Console.WriteLine(ex); Interlocked.Increment(ref writeFailed); } sw.Stop(); Interlocked.Add(ref writeTotalLatency, sw.ElapsedMilliseconds); Interlocked.Increment(ref writeCalls); } }, writeToken)); } Parallel.ForEach(writeTasks, task => task.Start()); { CancellationTokenSource tokenSource = new CancellationTokenSource(); CancellationToken token = tokenSource.Token; List <Task> tasks = new List <Task>(); for (int i = 0; i < numReadThread; i++) { tasks.Add(new Task(() => { Thread.Sleep(2000); //Console.Write("{0} tasks start" ,i); while (!token.IsCancellationRequested) { Stopwatch sw = Stopwatch.StartNew(); try { long p = Helper.GetRandomAccountID(accountIDs); var accounts = queryable.Where(customer => customer.Value._AccountId == p).ToArray(); foreach (var account in accounts) { // } Interlocked.Increment(ref succeeded); } catch (Exception ex) { Console.WriteLine(ex); Interlocked.Increment(ref failed); } sw.Stop(); Interlocked.Add(ref totalLatency, sw.ElapsedMilliseconds); Interlocked.Increment(ref calls); } }, token)); } Parallel.ForEach(tasks, task => task.Start()); Thread.Sleep(numDuration * 1000 * 60); tokenSource.Cancel(); Console.Write(DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss.fff")); Console.WriteLine(" Reads:"); Console.WriteLine("averge latency: " + 1.0 * totalLatency / calls + " ms"); Console.WriteLine("throughput: {0} calls/sec", 1.0 * calls / (numDuration * 60)); Console.WriteLine("success rate: {0}, success: {1}, failed: {2}, calls: {3}", 1.0 * succeeded / calls, succeeded, failed, calls); } Reset(); { CancellationTokenSource tokenSource = new CancellationTokenSource(); CancellationToken token = tokenSource.Token; List <Task> tasks = new List <Task>(); for (int i = 0; i < numReadThread; i++) { tasks.Add(new Task(() => { Thread.Sleep(2000); while (!token.IsCancellationRequested) { Stopwatch sw = Stopwatch.StartNew(); try { long p = Helper.GetRandomAccountID(accountIDs); var accounts = queryable.Where(customer => customer.Value._AccountId == p).ToArray(); foreach (var account in accounts) { // } Interlocked.Increment(ref succeeded); } catch (Exception ex) { Console.WriteLine(ex); Interlocked.Increment(ref failed); } sw.Stop(); Interlocked.Add(ref totalLatency, sw.ElapsedMilliseconds); Interlocked.Increment(ref calls); } }, token)); } Parallel.ForEach(tasks, task => task.Start()); Thread.Sleep(numDuration * 1000 * 60); tokenSource.Cancel(); Console.Write(DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss.fff")); Console.WriteLine(" Reads:"); Console.WriteLine("averge latency: " + 1.0 * totalLatency / calls + " ms"); Console.WriteLine("throughput: {0} calls/sec", 1.0 * calls / (numDuration * 60)); Console.WriteLine("success rate: {0}, success: {1}, failed: {2}, calls: {3}", 1.0 * succeeded / calls, succeeded, failed, calls); } writeTokenSource.Cancel(); Console.Write(DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss.fff")); Console.WriteLine(" Writes:"); Console.WriteLine("averge latency: " + 1.0 * writeTotalLatency / writeCalls + " ms"); Console.WriteLine("throughput: {0} calls/sec", 1.0 * writeCalls / (numDuration * 60)); Console.WriteLine("success rate: {0}, success: {1}, failed: {2}, calls: {3}", 1.0 * writeSucceeded / writeCalls, writeSucceeded, writeFailed, writeCalls); /*long id = 1; * int times = 10000; * var stopwatch = new Stopwatch(); * stopwatch.Start(); * for (int i=0;i<times;i++) * { * query.Process(cache,id); * id = (id + 3) % 100000; * } * stopwatch.Stop(); * long elapsed_time = stopwatch.ElapsedMilliseconds; * Console.WriteLine("Finish SQL querying\n"); * Console.WriteLine("Preload Time on {0} times using {1} ms", times, elapsed_time); * var cache = ignite.GetOrCreateCache<int, string>("myCache");*/ // Store keys in cache (values will end up on different cache nodes). /*for (int i = 0; i < 10; i++) * cache.Put(i, i.ToString()); * * for (int i = 0; i < 10; i++) * Console.WriteLine("Got [key={0}, val={1}]", i, cache.Get(i));*/ } }