private void CheckExpire(TaskTimer timer) { // get the next scheduled item UpdateRecord data = null; lock (_data) { if (_queue.Count == 0) { _queueTimer.Change(_delay, TaskEnv.Current); return; } Tuplet <string, DateTime> key = _queue.Peek(); if (key.Item2 > DateTime.UtcNow) { _queueTimer.Change(key.Item2, TaskEnv.Current); return; } data = _data[key.Item1]; _queue.Dequeue(); _data.Remove(key.Item1); } _dispatcher.Dispatch(data); // check for optimal sleep interval lock (_data) { if (_queue.Count == 0) { _queueTimer.Change(_delay, TaskEnv.Current); return; } Tuplet <string, DateTime> key = _queue.Peek(); _queueTimer.Change(key.Item2, TaskEnv.Current); } }
protected override XDoc GetConfigForWikiId(string wikiId) { if (string.IsNullOrEmpty(wikiId)) { return(null); } var p = DirectoryGetConfigForWikiId(wikiId); if (p.IsSuccessful && p.ContentType.IsXml) { var configDoc = XDoc.Empty; var wikiDoc = p.ToDocument(); configDoc = wikiDoc["config"]; foreach (XDoc hostDoc in configDoc["host"]) { string host = hostDoc.AsText; lock (_hostsToWikiIds) { _hostsToWikiIds[host] = new Tuplet <string, DateTime>(wikiId, DateTime.UtcNow); } } var updated = wikiDoc["date.updated"].AsDate ?? DateTime.MinValue; if (updated != DateTime.MinValue) { configDoc.Attr("updated", updated); } return(configDoc); } _log.WarnFormat("Unable to lookup config for site '{0}'. Return status: '{1}'", wikiId, p.Status); return(null); }
public void Many_consumers_with_timeouts() { BlockingQueue<string> q = new BlockingQueue<string>(); Thread c1 = new Thread(MultiConsumer); Thread c2 = new Thread(MultiConsumer); Thread c3 = new Thread(MultiConsumer); c1.IsBackground = true; c2.IsBackground = true; c3.IsBackground = true; Tuplet<BlockingQueue<string>, string, TimeSpan, ManualResetEvent> v1 = new Tuplet<BlockingQueue<string>, string, TimeSpan, ManualResetEvent>(q, "x", TimeSpan.FromSeconds(1), new ManualResetEvent(false)); c1.Start(v1); Tuplet<BlockingQueue<string>, string, TimeSpan, ManualResetEvent> v2 = new Tuplet<BlockingQueue<string>, string, TimeSpan, ManualResetEvent>(q, "x", TimeSpan.FromSeconds(1), new ManualResetEvent(false)); c2.Start(v2); Tuplet<BlockingQueue<string>, string, TimeSpan, ManualResetEvent> v3 = new Tuplet<BlockingQueue<string>, string, TimeSpan, ManualResetEvent>(q, "x", TimeSpan.FromSeconds(1), new ManualResetEvent(false)); c3.Start(v3); q.Enqueue("foo"); Assert.IsTrue(v1.Item4.WaitOne(2000, false), "thread 1 did not finish"); Assert.IsTrue(v2.Item4.WaitOne(2000, false), "thread 2 did not finish"); Assert.IsTrue(v3.Item4.WaitOne(2000, false), "thread 3 did not finish"); bool gotValue = false; foreach(Tuplet<BlockingQueue<string>, string, TimeSpan, ManualResetEvent> v in new Tuplet<BlockingQueue<string>, string, TimeSpan, ManualResetEvent>[] { v1, v2, v3 }) { if(v.Item2 == "foo") { gotValue = true; Assert.Less(v.Item3.TotalSeconds, 1); } else { Assert.IsNull(v.Item2); Assert.GreaterOrEqual(v.Item3.TotalSeconds, 0.95); } } Assert.IsTrue(gotValue); }
internal Yield CreateSubscriptionSet(DreamContext context, DreamMessage request, Result <DreamMessage> response) { XDoc subscriptionSet = request.ToDocument(); Tuplet <PubSubSubscriptionSet, bool> set = _dispatcher.RegisterSet(subscriptionSet); XUri locationUri = Self.At("subscribers", set.Item1.Location).Uri.AsPublicUri(); DreamMessage msg = null; if (set.Item2) { // existing subs cause a Conflict with ContentLocation of the sub msg = DreamMessage.Conflict("The specified owner already has a registered subscription set"); msg.Headers.ContentLocation = locationUri; } else { // new subs cause a Created with Location of the sub, plus XDoc containing the location XDoc responseDoc = new XDoc("subscription-set") .Elem("uri.location", locationUri) .Elem("access-key", set.Item1.AccessKey); msg = DreamMessage.Created(locationUri, responseDoc); msg.Headers.Location = locationUri.With("access-key", set.Item1.AccessKey); } response.Return(msg); yield break; }
private void MultiConsumer(object state) { Tuplet <BlockingQueue <string>, string, TimeSpan, ManualResetEvent> v = (Tuplet <BlockingQueue <string>, string, TimeSpan, ManualResetEvent>)state; DateTime start = DateTime.Now; v.Item1.TryDequeue(v.Item3, out v.Item2); v.Item3 = DateTime.Now.Subtract(start); v.Item4.Set(); }
private void SingleConsumerForeachLoopAndStop(object obj) { Tuplet <IBlockingQueue <string>, List <string> > state = (Tuplet <IBlockingQueue <string>, List <string> >)obj; foreach (string guid in state.Item1) { state.Item2.Add(guid); } }
//--- Methods --- public void Add(uint pageId, DateTime modificationDate, bool delete) { Tuplet<DateTime, bool> record; if(!_pages.TryGetValue(pageId, out record)) { record = new Tuplet<DateTime, bool>(modificationDate, delete); _pages.Add(pageId, record); } if(!record.Item2 && delete) { record.Item2 = true; } }
public static XDoc GenerateLicense(string[] licenseArgs) { Init(); Tuplet <int, Stream, Stream> exitValues = CallLicenseGenerator(licenseArgs); Assert.AreEqual(0, exitValues.Item1, "Unexpected return code\n" + GetErrorMsg(exitValues.Item2) + GetErrorMsg(exitValues.Item3)); // Retrieve generated license return(!File.Exists(_genLicensePath) ? null : XDocFactory.LoadFrom(_genLicensePath, MimeType.XML)); }
/// <summary> /// Retrieves image dimensions. /// </summary> /// <param name="file"></param> /// <param name="width"></param> /// <param name="height"></param> /// <returns>True if image too large to be identified or identifying succeeded. False if identification attempted but failed</returns> public static bool RetrieveImageDimensions(StreamInfo file, out int width, out int height, out int frames) { width = 0; height = 0; frames = 0; if (file == null) { return(false); } using (file) { // prevent manipulation of large images if (DekiContext.Current.Instance.MaxImageSize < (ulong)file.Length) { return(true); } Stopwatch sw = Stopwatch.StartNew(); // execute imagemagick-identify Tuplet <int, Stream, Stream> exitValues = Async.ExecuteProcess(DekiContext.Current.Deki.ImageMagickIdentifyPath, "-format \"%wx%h \" -", file.Stream, new Result <Tuplet <int, Stream, Stream> >(TimeSpan.FromMilliseconds(DekiContext.Current.Deki.ImageMagickTimeout))).Wait(); // record stats about this imagemagick execution sw.Stop(); AddToStats(HEADERSTAT_IDENTIFY, sw.ElapsedMilliseconds); int status = exitValues.Item1; Stream outputStream = exitValues.Item2; Stream errorStream = exitValues.Item3; // parse output string output = new StreamReader(outputStream).ReadToEnd(); string[] dimensions = output.Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries); foreach (string dimension in dimensions) { int tmpWidth; int tmpHeight; string[] parts = dimension.Split(new char[] { 'x' }, 2); if (parts.Length == 2) { int.TryParse(parts[0], out tmpWidth); int.TryParse(parts[1], out tmpHeight); if ((tmpWidth > 0) && (tmpHeight > 0)) { ++frames; width = Math.Max(width, tmpWidth); height = Math.Max(height, tmpHeight); } } } _log.InfoFormat("Imagemagick identify finished in {0}ms", sw.ElapsedMilliseconds); } return(frames > 0); }
public void CombineWithTuplet16() { var eq = EqualityComparerExtensions.CombineWithTuplet <int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int>(EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default); var args1 = new Tuplet <int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int>(1643898504, 1101583865, 894414248, 1480982783, 1214195623, 118740352, 818421871, 1331036488, 2130610971, 534555286, 632810145, 1113625852, 1205077457, 1357564654, 1823479611, 328001364); var args2 = new Tuplet <int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int>(1304559037, 2023074705, 183081128, 625056566, 1755071974, 1740918513, 1416915494, 1333914416, 1701436564, 2013887079, 1214719941, 1240461550, 1703844286, 14536526, 500777339, 163989410); Assert.IsTrue(eq.Equals(args1, args1)); Assert.AreNotEqual(eq.GetHashCode(args1), eq.GetHashCode(args2)); Assert.IsFalse(eq.Equals(args1, args2)); Assert.IsFalse(eq.Equals(args2, args1)); }
public void CombineWithTuplet9() { var eq = EqualityComparerExtensions.CombineWithTuplet <int, int, int, int, int, int, int, int, int>(EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default); var args1 = new Tuplet <int, int, int, int, int, int, int, int, int>(1735673413, 1377064747, 1113116636, 959489802, 634977204, 1414209729, 1427780075, 968886160, 793414413); var args2 = new Tuplet <int, int, int, int, int, int, int, int, int>(2021882954, 2066152910, 698498224, 1066770888, 2070193431, 677361390, 1737011068, 19426017, 495129728); Assert.IsTrue(eq.Equals(args1, args1)); Assert.AreNotEqual(eq.GetHashCode(args1), eq.GetHashCode(args2)); Assert.IsFalse(eq.Equals(args1, args2)); Assert.IsFalse(eq.Equals(args2, args1)); }
public void CombineWithTuplet10() { var eq = EqualityComparerExtensions.CombineWithTuplet <int, int, int, int, int, int, int, int, int, int>(EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default); var args1 = new Tuplet <int, int, int, int, int, int, int, int, int, int>(973357632, 1544018695, 353570791, 1226995964, 49262773, 840989145, 559776888, 1747912691, 1364683612, 1046410302); var args2 = new Tuplet <int, int, int, int, int, int, int, int, int, int>(938708246, 2097133255, 405223749, 1828685152, 968247685, 1179698740, 1084353933, 327516175, 2017329178, 715567833); Assert.IsTrue(eq.Equals(args1, args1)); Assert.AreNotEqual(eq.GetHashCode(args1), eq.GetHashCode(args2)); Assert.IsFalse(eq.Equals(args1, args2)); Assert.IsFalse(eq.Equals(args2, args1)); }
public void CombineWithTuplet2() { var eq = EqualityComparerExtensions.CombineWithTuplet <int, int>(EqualityComparer <int> .Default, EqualityComparer <int> .Default); var args1 = new Tuplet <int, int>(1493878331, 1431177679); var args2 = new Tuplet <int, int>(1849429681, 1751256899); Assert.IsTrue(eq.Equals(args1, args1)); Assert.AreNotEqual(eq.GetHashCode(args1), eq.GetHashCode(args2)); Assert.IsFalse(eq.Equals(args1, args2)); Assert.IsFalse(eq.Equals(args2, args1)); }
public void CombineWithTuplet7() { var eq = EqualityComparerExtensions.CombineWithTuplet <int, int, int, int, int, int, int>(EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default); var args1 = new Tuplet <int, int, int, int, int, int, int>(1817177677, 1375769249, 1472875537, 1455274364, 893442372, 222821153, 2139036453); var args2 = new Tuplet <int, int, int, int, int, int, int>(1122443037, 1875720571, 672543699, 1704580922, 982962167, 1675057635, 1027430421); Assert.IsTrue(eq.Equals(args1, args1)); Assert.AreNotEqual(eq.GetHashCode(args1), eq.GetHashCode(args2)); Assert.IsFalse(eq.Equals(args1, args2)); Assert.IsFalse(eq.Equals(args2, args1)); }
public void CombineWithTuplet6() { var eq = EqualityComparerExtensions.CombineWithTuplet <int, int, int, int, int, int>(EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default); var args1 = new Tuplet <int, int, int, int, int, int>(595253483, 234458665, 572489588, 1694483652, 1522560149, 1510948432); var args2 = new Tuplet <int, int, int, int, int, int>(137241414, 1508071593, 1389568860, 1703299212, 2009351709, 1956271747); Assert.IsTrue(eq.Equals(args1, args1)); Assert.AreNotEqual(eq.GetHashCode(args1), eq.GetHashCode(args2)); Assert.IsFalse(eq.Equals(args1, args2)); Assert.IsFalse(eq.Equals(args2, args1)); }
public void CombineWithTuplet5() { var eq = EqualityComparerExtensions.CombineWithTuplet <int, int, int, int, int>(EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default); var args1 = new Tuplet <int, int, int, int, int>(768277902, 304671234, 642495331, 753171965, 276676877); var args2 = new Tuplet <int, int, int, int, int>(1686856785, 97220460, 2057705716, 1820941261, 795007812); Assert.IsTrue(eq.Equals(args1, args1)); Assert.AreNotEqual(eq.GetHashCode(args1), eq.GetHashCode(args2)); Assert.IsFalse(eq.Equals(args1, args2)); Assert.IsFalse(eq.Equals(args2, args1)); }
public void CombineWithTuplet11() { var eq = EqualityComparerExtensions.CombineWithTuplet <int, int, int, int, int, int, int, int, int, int, int>(EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default); var args1 = new Tuplet <int, int, int, int, int, int, int, int, int, int, int>(392453217, 1270864718, 1460409938, 185614642, 1516642130, 1779637132, 1249693301, 697996635, 1149977745, 88375853, 1896587492); var args2 = new Tuplet <int, int, int, int, int, int, int, int, int, int, int>(927038697, 924366747, 1469951323, 1484477316, 757660304, 342451040, 508677449, 1327801974, 272127491, 399712914, 1034548160); Assert.IsTrue(eq.Equals(args1, args1)); Assert.AreNotEqual(eq.GetHashCode(args1), eq.GetHashCode(args2)); Assert.IsFalse(eq.Equals(args1, args2)); Assert.IsFalse(eq.Equals(args2, args1)); }
public void CombineWithTuplet12() { var eq = EqualityComparerExtensions.CombineWithTuplet <int, int, int, int, int, int, int, int, int, int, int, int>(EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default); var args1 = new Tuplet <int, int, int, int, int, int, int, int, int, int, int, int>(49526117, 381369773, 30177914, 843764805, 1616659205, 237467758, 1877734186, 2034555795, 985839498, 349845215, 1867165537, 1451341831); var args2 = new Tuplet <int, int, int, int, int, int, int, int, int, int, int, int>(102676511, 1849976561, 83608757, 167956149, 1857837481, 417109288, 1738779491, 2009263900, 597934946, 1276307759, 1297306457, 11669549); Assert.IsTrue(eq.Equals(args1, args1)); Assert.AreNotEqual(eq.GetHashCode(args1), eq.GetHashCode(args2)); Assert.IsFalse(eq.Equals(args1, args2)); Assert.IsFalse(eq.Equals(args2, args1)); }
public void CombineWithTuplet15() { var eq = EqualityComparerExtensions.CombineWithTuplet <int, int, int, int, int, int, int, int, int, int, int, int, int, int, int>(EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default); var args1 = new Tuplet <int, int, int, int, int, int, int, int, int, int, int, int, int, int, int>(951796389, 1699741592, 1862964908, 709707081, 1684019481, 662752809, 1924836516, 236505162, 1064370040, 1925405905, 1068775490, 300636596, 1994976849, 136928363, 1001879028); var args2 = new Tuplet <int, int, int, int, int, int, int, int, int, int, int, int, int, int, int>(1776558442, 1118965621, 1448082938, 65597502, 839856988, 196611263, 1085392957, 485021651, 622503358, 1153346442, 1669482955, 161975470, 1760847106, 656561325, 2101761346); Assert.IsTrue(eq.Equals(args1, args1)); Assert.AreNotEqual(eq.GetHashCode(args1), eq.GetHashCode(args2)); Assert.IsFalse(eq.Equals(args1, args2)); Assert.IsFalse(eq.Equals(args2, args1)); }
public void CombineWithTuplet14() { var eq = EqualityComparerExtensions.CombineWithTuplet <int, int, int, int, int, int, int, int, int, int, int, int, int, int>(EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default); var args1 = new Tuplet <int, int, int, int, int, int, int, int, int, int, int, int, int, int>(798323615, 910692686, 680831647, 537932661, 436613214, 920702005, 1231546963, 18508365, 1818481944, 533903132, 2040743569, 1667146805, 1197308095, 410163014); var args2 = new Tuplet <int, int, int, int, int, int, int, int, int, int, int, int, int, int>(1350131014, 121963850, 1135486912, 744771454, 628637960, 1152052239, 12519421, 1184960156, 254131361, 726553948, 1041517804, 1482512996, 26287757, 1898047827); Assert.IsTrue(eq.Equals(args1, args1)); Assert.AreNotEqual(eq.GetHashCode(args1), eq.GetHashCode(args2)); Assert.IsFalse(eq.Equals(args1, args2)); Assert.IsFalse(eq.Equals(args2, args1)); }
public void CombineWithTuplet13() { var eq = EqualityComparerExtensions.CombineWithTuplet <int, int, int, int, int, int, int, int, int, int, int, int, int>(EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default); var args1 = new Tuplet <int, int, int, int, int, int, int, int, int, int, int, int, int>(1172766508, 1082756073, 344207836, 210587381, 837247700, 575676484, 1147197848, 1745201687, 315854919, 1505388704, 1221338601, 1079040165, 155436728); var args2 = new Tuplet <int, int, int, int, int, int, int, int, int, int, int, int, int>(672877325, 162977927, 1012225543, 967746096, 1262905597, 1250020002, 1546742277, 1207356807, 1620508563, 1367274812, 1781984402, 674051547, 174494891); Assert.IsTrue(eq.Equals(args1, args1)); Assert.AreNotEqual(eq.GetHashCode(args1), eq.GetHashCode(args2)); Assert.IsFalse(eq.Equals(args1, args2)); Assert.IsFalse(eq.Equals(args2, args1)); }
public static Tuplet CreateTuplet(int number, string type) { ObjectFactory factory = new ObjectFactory(); Tuplet tuplet = factory.createTuplet(); if (type == "start") { tuplet.setBracket(YesNo.YES); } tuplet.setNumber(new Integer(number)); tuplet.setType(StartStop.fromValue(type)); return(tuplet); }
public void CombineWithTuplet3() { var eq = EqualityComparerExtensions.CombineWithTuplet <int, int, int>(EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default); var args1 = new Tuplet <int, int, int>(1078584633, 1069952473, 864272798); var args2 = new Tuplet <int, int, int>(606339120, 660923900, 1089055706); Assert.IsTrue(eq.Equals(args1, args1)); Assert.AreNotEqual(eq.GetHashCode(args1), eq.GetHashCode(args2)); Assert.IsFalse(eq.Equals(args1, args2)); Assert.IsFalse(eq.Equals(args2, args1)); }
public void CombineWithTuplet8() { var eq = EqualityComparerExtensions.CombineWithTuplet <int, int, int, int, int, int, int, int>(EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default); var args1 = new Tuplet <int, int, int, int, int, int, int, int>(537590800, 740706366, 1154500802, 162572896, 1654036439, 1168362564, 1396494859, 69264986); var args2 = new Tuplet <int, int, int, int, int, int, int, int>(11085637, 426465235, 516566118, 943792764, 1965081051, 310351301, 154195352, 696021831); Assert.IsTrue(eq.Equals(args1, args1)); Assert.AreNotEqual(eq.GetHashCode(args1), eq.GetHashCode(args2)); Assert.IsFalse(eq.Equals(args1, args2)); Assert.IsFalse(eq.Equals(args2, args1)); }
public void CombineWithTuplet4() { var eq = EqualityComparerExtensions.CombineWithTuplet <int, int, int, int>(EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default, EqualityComparer <int> .Default); var args1 = new Tuplet <int, int, int, int>(490792769, 1340157553, 1821299733, 291436766); var args2 = new Tuplet <int, int, int, int>(56609777, 977758626, 932880312, 974984698); Assert.IsTrue(eq.Equals(args1, args1)); Assert.AreNotEqual(eq.GetHashCode(args1), eq.GetHashCode(args2)); Assert.IsFalse(eq.Equals(args1, args2)); Assert.IsFalse(eq.Equals(args2, args1)); }
public void One_producer_many_consumers_loop_with_foreach() { int n = 500; var enqueued = new List <string>(); var dequeued = new List <string>(); var q = new BlockingQueue <string>(); var c1 = new Thread(MultiConsumerForeachLoop) { IsBackground = true }; var c2 = new Thread(MultiConsumerForeachLoop) { IsBackground = true }; var c3 = new Thread(MultiConsumerForeachLoop) { IsBackground = true }; var v1 = new Tuplet <BlockingQueue <string>, List <string>, int, ManualResetEvent>(q, dequeued, 0, new ManualResetEvent(false)); c1.Start(v1); var v2 = new Tuplet <BlockingQueue <string>, List <string>, int, ManualResetEvent>(q, dequeued, 0, new ManualResetEvent(false)); c2.Start(v2); var v3 = new Tuplet <BlockingQueue <string>, List <string>, int, ManualResetEvent>(q, dequeued, 0, new ManualResetEvent(false)); c3.Start(v3); Thread.Sleep(1000); for (int i = 0; i < n; i++) { string guid = Guid.NewGuid().ToString(); q.Enqueue(guid); enqueued.Add(guid); } q.Close(); Assert.IsTrue(v1.Item4.WaitOne(10000, false), "thread 1 did not finish"); Assert.IsTrue(v2.Item4.WaitOne(10000, false), "thread 2 did not finish"); Assert.IsTrue(v3.Item4.WaitOne(10000, false), "thread 3 did not finish"); _log.DebugFormat("Thread 1 processed {0}", v1.Item3); _log.DebugFormat("Thread 2 processed {0}", v2.Item3); _log.DebugFormat("Thread 3 processed {0}", v3.Item3); Console.WriteLine("Thread 1 processed {0}", v1.Item3); Console.WriteLine("Thread 2 processed {0}", v2.Item3); Console.WriteLine("Thread 3 processed {0}", v3.Item3); Assert.GreaterOrEqual(v1.Item3, n / 4); Assert.GreaterOrEqual(v2.Item3, n / 4); Assert.GreaterOrEqual(v3.Item3, n / 4); Assert.AreEqual(n, dequeued.Count); Assert.AreEqual(dequeued.OrderBy(x => x).ToArray(), enqueued.OrderBy(x => x).ToArray()); }
public void Tuplet_Nested_ToString() { var args = new Tuplet <int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, Tuplet <int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, Tuplet <int> > >( 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, new Tuplet <int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, int, Tuplet <int> >( 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, new Tuplet <int>( 33 ) ) ); Assert.AreEqual("(" + string.Join(", ", Enumerable.Range(1, 33)) + ")", args.ToString()); }
private void Process(string hint, string application, string cmdline) { Tuplet <int, Stream, Stream> exitValues = Async.ExecuteProcess(application, cmdline, Stream.Null, new Result <Tuplet <int, Stream, Stream> >(TimeSpan.FromSeconds(30))).Wait(); int status = exitValues.Item1; Stream output = exitValues.Item2; Stream error = exitValues.Item3; if ((status < 0) || (status >= 2)) { string message; using (StreamReader reader = new StreamReader(error, Encoding.ASCII)) { message = reader.ReadToEnd(); } throw new DreamAbortException(DreamMessage.InternalError(string.Format("{0} failed with status {1}:\n{2}", hint, status, message))); } }
private void CheckExpire(TaskTimer timer) { while (true) { // get the next scheduled item UpdateRecord data = null; lock (_data) { if (_queue.Count == 0) { _queueTimer.Change(_delay, TaskEnv.None); return; } Tuplet <DateTime, XUri> key = _queue.Peek(); if (key.Item1 > DateTime.UtcNow) { _queueTimer.Change(key.Item1, TaskEnv.None); return; } data = _data[key.Item2]; _queue.Dequeue(); _data.Remove(key.Item2); } Interlocked.Increment(ref _pendingCount); _dispatcher.Dispatch(data, new Result(TimeSpan.MaxValue)).WhenDone(r => { // cleanup items from the queue var poll = false; foreach (var itemId in data.QueueIds) { if (!_persistentQueue.CommitDequeue(itemId)) { // if we couldn't take an item, it must have gone back to the queue, so we better poll again poll = true; } } if (poll) { _poll = true; } Interlocked.Decrement(ref _pendingCount); if (r.HasException) { _log.Error(string.Format("dispatch of '{0}' encountered an error", data.Id), r.Exception); } }); } }
public void Many_consumers_with_timeouts() { BlockingQueue <string> q = new BlockingQueue <string>(); Thread c1 = new Thread(MultiConsumer); Thread c2 = new Thread(MultiConsumer); Thread c3 = new Thread(MultiConsumer); c1.IsBackground = true; c2.IsBackground = true; c3.IsBackground = true; Tuplet <BlockingQueue <string>, string, TimeSpan, ManualResetEvent> v1 = new Tuplet <BlockingQueue <string>, string, TimeSpan, ManualResetEvent>(q, "x", TimeSpan.FromSeconds(1), new ManualResetEvent(false)); c1.Start(v1); Tuplet <BlockingQueue <string>, string, TimeSpan, ManualResetEvent> v2 = new Tuplet <BlockingQueue <string>, string, TimeSpan, ManualResetEvent>(q, "x", TimeSpan.FromSeconds(1), new ManualResetEvent(false)); c2.Start(v2); Tuplet <BlockingQueue <string>, string, TimeSpan, ManualResetEvent> v3 = new Tuplet <BlockingQueue <string>, string, TimeSpan, ManualResetEvent>(q, "x", TimeSpan.FromSeconds(1), new ManualResetEvent(false)); c3.Start(v3); q.Enqueue("foo"); Assert.IsTrue(v1.Item4.WaitOne(2000, false), "thread 1 did not finish"); Assert.IsTrue(v2.Item4.WaitOne(2000, false), "thread 2 did not finish"); Assert.IsTrue(v3.Item4.WaitOne(2000, false), "thread 3 did not finish"); bool gotValue = false; foreach (Tuplet <BlockingQueue <string>, string, TimeSpan, ManualResetEvent> v in new Tuplet <BlockingQueue <string>, string, TimeSpan, ManualResetEvent>[] { v1, v2, v3 }) { if (v.Item2 == "foo") { gotValue = true; Assert.Less(v.Item3.TotalSeconds, 1); } else { Assert.IsNull(v.Item2); Assert.GreaterOrEqual(v.Item3.TotalSeconds, 0.95); } } Assert.IsTrue(gotValue); }
public static StreamInfo BuildThumb(StreamInfo file, FormatType format, RatioType ratio, uint width, uint height) { using (file) { //The mimetype of the thumb is based on the formattype. The MimeType mime = ResolvePreviewMime(ref format); //Some basic DoS protection. if (!IsPreviewSizeAllowed(width, height)) { throw new Exceptions.ImagePreviewOversizedInvalidArgumentException(); } string thumbnailArgs = string.Empty; if (width > 0 || height > 0) { thumbnailArgs = string.Format("-colorspace RGB -thumbnail {0}{1}{2}{3}", width == 0 ? "" : width.ToString(), height > 0 ? "x" : "", height == 0 ? string.Empty : height.ToString(), ratio == RatioType.FIXED || ratio == RatioType.UNDEFINED ? "" : "!"); } // NOTE (steveb): the '-[0]' option means that we only want to convert the first frame if there are multiple frames string args = string.Format("{0} -[0] {1}-", thumbnailArgs, (format == FormatType.UNDEFINED) ? string.Empty : format.ToString() + ":"); Stopwatch sw = Stopwatch.StartNew(); // run ImageMagick application Tuplet <int, Stream, Stream> exitValues = Async.ExecuteProcess(DekiContext.Current.Deki.ImageMagickConvertPath, args, file.Stream, new Result <Tuplet <int, Stream, Stream> >(TimeSpan.FromMilliseconds(DekiContext.Current.Deki.ImageMagickTimeout))).Wait(); // record stats about this imagemagick execution sw.Stop(); AddToStats(HEADERSTAT_CONVERT, sw.ElapsedMilliseconds); int status = exitValues.Item1; Stream outputStream = exitValues.Item2; Stream errorStream = exitValues.Item3; if (outputStream.Length == 0) { using (StreamReader error = new StreamReader(errorStream)) { _log.WarnMethodCall("Imagemagick convert failed", args, status, error.ReadToEnd()); } return(null); } _log.InfoFormat("Imagemagick convert finished in {0}ms. Args:{1}", sw.ElapsedMilliseconds, args); return(new StreamInfo(outputStream, outputStream.Length, mime)); } }
public DekiScriptDom Visit(DekiScriptDomIfElse expr, DekiScriptOptimizerState state) { List <Tuplet <DekiScriptExpression, DekiScriptDom> > conditionals = new List <Tuplet <DekiScriptExpression, DekiScriptDom> >(); for (int i = 0; i < expr.Conditionals.Length; i++) { Tuplet <DekiScriptExpression, DekiScriptDom> conditional = expr.Conditionals[i]; // evaluate current branch DekiScriptExpression inner = conditional.Item1.VisitWith(DekiScriptExpressionOptimizer.Instance, state); DekiScriptDom node = conditional.Item2.VisitWith(this, state); // check if test has a constant outcome if (inner is DekiScriptLiteral) { if (((DekiScriptLiteral)inner).IsNilFalseZero) { // NOTE (steveb): this conditional will never be successful; skip it } else { // NOTE (steveb): this conditional will is always successful; make it the "else" branch // check if there are any previous branches if (conditionals.Count == 0) { // just return the inner node structure return(node); } // add branch as final "else" branch conditionals.Add(new Tuplet <DekiScriptExpression, DekiScriptDom>(null, node)); break; } } else { conditionals.Add(new Tuplet <DekiScriptExpression, DekiScriptDom>(inner, node)); } } return(new DekiScriptDomIfElse(expr.Location, conditionals.ToArray())); }
protected override XDoc GetConfigForWikiId(string wikiId) { XDoc configDoc = XDoc.Empty; DreamMessage p = _directory.At(wikiId).GetAsync().Wait(); if (p.IsSuccessful && p.ContentType.IsXml) { XDoc wikiDoc = p.ToDocument(); configDoc = wikiDoc["config"]; List<string> hosts = new List<string>(); foreach (XDoc hostDoc in configDoc["host"]) { string host = hostDoc.AsText; hosts.Add(host); lock (_hostsToWikiIds) { _hostsToWikiIds[host] = new Tuplet<string, DateTime>(wikiId, DateTime.UtcNow); } } configDoc["host"].RemoveAll(); configDoc.Elem("hosts", string.Join(",", hosts.ToArray())); string status = wikiDoc["status"].AsText; if (!string.IsNullOrEmpty(status)) { switch (status.ToLowerInvariant()) { case "active": break; default: //TODO: define custom errors for more statuses throw new DreamAbortException(new DreamMessage(DreamStatus.Gone, null, MimeType.TEXT, string.Format("Site is currently unavailable. Status: {0}", status))); } } DateTime updated = wikiDoc["date.updated"].AsDate ?? DateTime.MinValue; if(updated != DateTime.MinValue) { configDoc.Attr("updated", updated); } } else { _log.WarnFormat("Unable to lookup config for site '{0}'. Return status: '{1}'", wikiId, p.Status); } return configDoc; }
protected override XDoc GetConfigForWikiId(string wikiId) { if(string.IsNullOrEmpty(wikiId)) { return null; } var p = DirectoryGetConfigForWikiId(wikiId); if(p.IsSuccessful && p.ContentType.IsXml) { var configDoc = XDoc.Empty; var wikiDoc = p.ToDocument(); configDoc = wikiDoc["config"]; foreach(XDoc hostDoc in configDoc["host"]) { string host = hostDoc.AsText; lock(_hostsToWikiIds) { _hostsToWikiIds[host] = new Tuplet<string, DateTime>(wikiId, DateTime.UtcNow); } } var updated = wikiDoc["date.updated"].AsDate ?? DateTime.MinValue; if(updated != DateTime.MinValue) { configDoc.Attr("updated", updated); } return configDoc; } _log.WarnFormat("Unable to lookup config for site '{0}'. Return status: '{1}'", wikiId, p.Status); return null; }
protected override string GetWikiIdByHostname(string hostname) { Tuplet<string, DateTime> wikiId = null; lock(_hostsToWikiIds) { _hostsToWikiIds.TryGetValue(hostname, out wikiId); //Associations between a hostname and a wiki id should timeout at least every 5 minutes to allow hostnames to be switched. if (wikiId != null && wikiId.Item2.Add(TimeSpan.FromSeconds(Math.Min((int) InactiveInstanceTimeOut.TotalSeconds, HOST_WIKIID_TIMEOUT))) < DateTime.UtcNow) { _hostsToWikiIds.Remove(hostname); wikiId = null; } } if (wikiId == null) { DreamMessage p = _directory.At("="+hostname).GetAsync().Wait(); if (p.IsSuccessful) { XDoc wikiDoc = p.ToDocument(); wikiId = new Tuplet<string, DateTime>(wikiDoc["@id"].AsText, DateTime.UtcNow); lock (_hostsToWikiIds) { _hostsToWikiIds[hostname] = wikiId; } } } if( wikiId == null) return null; else return wikiId.Item1; }
private Result<IssueData[]> ProcessIssueBatch(ElasticThreadPool pool, string projectId, string filterId, int pageNumber, int issuesInBatch, Tuplet<bool> canceled, Result<IssueData[]> result) { pool.QueueWorkItem(HandlerUtil.WithEnv(delegate { // TODO (steveb): use result.IsCanceled instead of shared tuple once cancellation is supported on the result object // check if request has been canceled if(!canceled.Item1) { IssueData[] issuesForBatch; if(!string.IsNullOrEmpty(filterId)) { issuesForBatch = _service.mc_filter_get_issues(_username, _password, projectId, filterId, pageNumber.ToString(), issuesInBatch.ToString()); } else { issuesForBatch = _service.mc_project_get_issues(_username, _password, projectId, pageNumber.ToString(), issuesInBatch.ToString()); } result.Return(issuesForBatch); } else { // TODO (steveb): throw a more specific exception result.Throw(new Exception("unspecified error")); } },TaskEnv.Clone())); return result; }
private IssueData[] RetrieveIssueData(string username, string password, string projectId, string filterId, int pageNumber, int numberPerPage) { using(ElasticThreadPool pool = new ElasticThreadPool(0, 2)) { List<IssueData> result = new List<IssueData>(); List<Result<IssueData[]>> results = new List<Result<IssueData[]>>(); Tuplet<bool> canceled = new Tuplet<bool>(false); for(int issuesRemaining = numberPerPage; issuesRemaining > 0; issuesRemaining -= MAX_ISSUES_IN_REQUEST, ++pageNumber) { int issuesInBatch = Math.Min(issuesRemaining, MAX_ISSUES_IN_REQUEST); results.Add(ProcessIssueBatch(pool, projectId, filterId, pageNumber, issuesInBatch, canceled, new Result<IssueData[]>(TimeSpan.FromSeconds(30)))); } Dictionary<string, IssueData> tempHash = new Dictionary<string, IssueData>(); foreach(Result<IssueData[]> r in results) { IssueData[] batch = r.Wait(); //HACK: Workaround for Mantis's broken paging: Asking for a batch at a page number that doesnt exist // will return the first page's results. // This takes care of the case when the #of tickets is evenly divisible by the batch size. (i.e 100 tix, 20/page) foreach(IssueData bug in batch) { if(!tempHash.ContainsKey(bug.id)) { tempHash[bug.id] = bug; result.Add(bug); } } if(batch.Length < MAX_ISSUES_IN_REQUEST) { //the current batch did not fill up, don't go to the next batch canceled.Item1 = true; break; } } return result.ToArray(); } }
public XDoc WikiLanugages( [DekiExtParam("Language to title map of all page versions written in other languages")] Hashtable languages ) { // Extract the language link information and sort List<Tuplet<string, string>> languageLinks = new List<Tuplet<string, string>>(languages.Count); foreach(DictionaryEntry entry in languages) { CultureInfo ci = new CultureInfo((string)entry.Key); Tuplet<string, string> languageLink = new Tuplet<string, string> { Item1 = ci.TextInfo.ToTitleCase(ci.NativeName), Item2 = Utils.AsPublicUiUri(Title.FromUriPath((string)entry.Value)) }; languageLinks.Add(languageLink); } languageLinks.Sort((left, right) => string.Compare(left.Item1, right.Item1, StringComparison.InvariantCulture)); // Generate an HTML list of the language data XDoc result = new XDoc("html").Start("body").Attr("target", "languages"); result.Start("ul"); for(int i = 0; i < languageLinks.Count; i++) { result.Start("li").Start("a").Attr("href", languageLinks[i].Item2).Value(languageLinks[i].Item1).End().End(); } return result.End().End(); }
protected override string GetWikiIdByHostname(string hostname) { Tuplet<string, DateTime> wikiId; lock(_hostsToWikiIds) { _hostsToWikiIds.TryGetValue(hostname, out wikiId); // Associations between a hostname and a wiki id should timeout at least every 5 minutes to allow hostnames to be switched. if(wikiId != null) { TimeSpan timeSpanSinceLastCheck = DateTime.UtcNow - wikiId.Item2; if(timeSpanSinceLastCheck > InactiveInstanceTimeOut || timeSpanSinceLastCheck > TimeSpan.FromSeconds(HOST_WIKIID_TIMEOUT)) { _hostsToWikiIds.Remove(hostname); wikiId = null; } } } if(wikiId == null) { DreamMessage p = DirectoryGetWikiIdByHostname(hostname); if(p.IsSuccessful) { XDoc wikiDoc = p.ToDocument(); wikiId = new Tuplet<string, DateTime>(wikiDoc["@id"].AsText, DateTime.UtcNow); lock(_hostsToWikiIds) { _hostsToWikiIds[hostname] = wikiId; } } else { _log.WarnFormat("unable find a wikiid for hostname '{0}'", hostname); } } return wikiId == null ? null : wikiId.Item1; }
private static void Highlight_InlineTextChanges(Tuplet<ArrayDiffKind, Token>[] diff, int index, List<Tuplet<ArrayDiffKind, Token>> combinedChanges, List<Tuplet<ArrayDiffKind, Token>> beforeChanges, List<Tuplet<ArrayDiffKind, Token>> afterChanges, out int next) { int lastAdded = index; int lastRemoved = index; int firstAdded = -1; int firstRemoved = -1; Tuplet<ArrayDiffKind, Token> item; // determine how long the chain of intermingled changes is for(int i = index, sameCounter = 0; (i < diff.Length) && ((diff[i].Item2.Type == XmlNodeType.Text) || (diff[i].Item2.Type == XmlNodeType.Whitespace) || diff[i].Item2.Type == XmlNodeType.SignificantWhitespace) && (sameCounter <= MAX_SAME_COUNTER); ++i) { item = diff[i]; Token token = item.Item2; if((token.Value.Length > 0) && !char.IsWhiteSpace(token.Value[0])) { if(item.Item1 == ArrayDiffKind.Added) { sameCounter = 0; if(firstAdded == -1) { firstAdded = i; } lastAdded = i; } else if(item.Item1 == ArrayDiffKind.Removed) { sameCounter = 0; if(firstRemoved == -1) { firstRemoved = i; } lastRemoved = i; } else { // we count the number of non-changed elements to break-up long runs with no changes ++sameCounter; } } } // set index of next element next = Math.Max(lastAdded, lastRemoved) + 1; // check if any text was added if(firstAdded != -1) { // add all unchanged text before the first added text for(int i = index; i < firstAdded; ++i) { if(diff[i].Item1 == ArrayDiffKind.Same) { item = new Tuplet<ArrayDiffKind, Token>(ArrayDiffKind.Same, diff[i].Item2); combinedChanges.Add(item); afterChanges.Add(item); } } // add all text nodes that were added in a row object key = new object(); item = new Tuplet<ArrayDiffKind, Token>(ArrayDiffKind.Same, new Token(XmlNodeType.Element, INSERTED, key)); combinedChanges.Add(item); afterChanges.Add(item); item = new Tuplet<ArrayDiffKind, Token>(ArrayDiffKind.Same, new Token(XmlNodeType.EndElement, string.Empty, null)); combinedChanges.Add(item); afterChanges.Add(item); for(int i = firstAdded; i <= lastAdded; ++i) { if(diff[i].Item1 != ArrayDiffKind.Removed) { item = new Tuplet<ArrayDiffKind, Token>(ArrayDiffKind.Same, diff[i].Item2); combinedChanges.Add(item); afterChanges.Add(item); } } item = new Tuplet<ArrayDiffKind, Token>(ArrayDiffKind.Same, new Token(XmlNodeType.None, INSERTED, key)); combinedChanges.Add(item); afterChanges.Add(item); // add all unchanged text after the last added text for(int i = lastAdded + 1; i < next; ++i) { if(diff[i].Item1 == ArrayDiffKind.Same) { item = new Tuplet<ArrayDiffKind, Token>(ArrayDiffKind.Same, diff[i].Item2); combinedChanges.Add(item); afterChanges.Add(item); } } } else { // add all unchanged text before the first added text for(int i = index; i < next; ++i) { if(diff[i].Item1 == ArrayDiffKind.Same) { item = new Tuplet<ArrayDiffKind, Token>(ArrayDiffKind.Same, diff[i].Item2); combinedChanges.Add(item); afterChanges.Add(item); } } } // check if any text was removed if(firstRemoved != -1) { // add all unchanged text before the first removed text for(int i = index; i < firstRemoved; ++i) { if(diff[i].Item1 == ArrayDiffKind.Same) { item = new Tuplet<ArrayDiffKind, Token>(ArrayDiffKind.Same, diff[i].Item2); if((item.Item2.Value.Length > 0) && !char.IsWhiteSpace(item.Item2.Value[0])) { combinedChanges.Add(item); } beforeChanges.Add(item); } } // add all text nodes that were removed in a row object key = new object(); item = new Tuplet<ArrayDiffKind, Token>(ArrayDiffKind.Same, new Token(XmlNodeType.Element, DELETED, key)); combinedChanges.Add(item); beforeChanges.Add(item); item = new Tuplet<ArrayDiffKind, Token>(ArrayDiffKind.Same, new Token(XmlNodeType.EndElement, string.Empty, null)); combinedChanges.Add(item); beforeChanges.Add(item); for(int i = firstRemoved; i <= lastRemoved; ++i) { if(diff[i].Item1 != ArrayDiffKind.Added) { item = new Tuplet<ArrayDiffKind, Token>(ArrayDiffKind.Same, diff[i].Item2); combinedChanges.Add(item); beforeChanges.Add(item); } } item = new Tuplet<ArrayDiffKind, Token>(ArrayDiffKind.Same, new Token(XmlNodeType.None, DELETED, key)); combinedChanges.Add(item); beforeChanges.Add(item); // add all unchanged text after the last removed text for(int i = lastRemoved + 1; i < next; ++i) { if(diff[i].Item1 == ArrayDiffKind.Same) { item = new Tuplet<ArrayDiffKind, Token>(ArrayDiffKind.Same, diff[i].Item2); combinedChanges.Add(item); beforeChanges.Add(item); } } } else { // add all unchanged text before the first removed text for(int i = index; i < next; ++i) { if(diff[i].Item1 == ArrayDiffKind.Same) { item = new Tuplet<ArrayDiffKind, Token>(ArrayDiffKind.Same, diff[i].Item2); if((item.Item2.Value.Length > 0) && !char.IsWhiteSpace(item.Item2.Value[0])) { combinedChanges.Add(item); } beforeChanges.Add(item); } } } }
/// <summary> /// Write a difference set. /// </summary> /// <param name="diffset">Difference set.</param> /// <param name="writer">TextWriter to write the set to.</param> public static void Write(Tuplet<ArrayDiffKind, Token>[] diffset, TextWriter writer) { foreach(Tuplet<ArrayDiffKind, Token> entry in diffset) { switch(entry.Item1) { case ArrayDiffKind.Same: writer.WriteLine(" " + entry.Item2); break; case ArrayDiffKind.Removed: writer.WriteLine("-" + entry.Item2); break; case ArrayDiffKind.Added: writer.WriteLine("+" + entry.Item2); break; case ArrayDiffKind.AddedLeft: writer.WriteLine("+<" + entry.Item2); break; case ArrayDiffKind.AddedRight: writer.WriteLine("+>" + entry.Item2); break; case ArrayDiffKind.RemovedLeft: writer.WriteLine("-<" + entry.Item2); break; case ArrayDiffKind.RemovedRight: writer.WriteLine("->" + entry.Item2); break; } } }
/// <summary> /// Create a highlight document from a set of differences. /// </summary> /// <param name="diff">Difference set.</param> /// <returns>Highlight document.</returns> public static XDoc Highlight(Tuplet<ArrayDiffKind, Token>[] diff) { XDoc combined; List<Tuplet<string, string, string>> invisibleChanges; XDoc before; XDoc after; Highlight(diff, out combined, out invisibleChanges, out before, out after); return combined; }
public Yield GetQueuedItem(DreamContext context, DreamMessage request, Result<DreamMessage> response) { uint pageId = context.GetParam<uint>("pageid", 0); string containerId = context.GetParam("containerId", null); string authtoken = null; foreach(DreamCookie cookie in request.Cookies) { if(StringUtil.EqualsInvariantIgnoreCase("authtoken", cookie.Name)) { authtoken = cookie.Value; break; } } if(pageId == 0) { _log.WarnFormat("Bad pageId"); response.Return(DreamMessage.BadRequest("Bad pageId")); yield break; } if(string.IsNullOrEmpty(containerId)) { _log.WarnFormat("Missing containerId"); response.Return(DreamMessage.BadRequest("Missing containerId")); yield break; } if(string.IsNullOrEmpty(authtoken)) { _log.WarnFormat("Unable to retrieve subscriber credentials from cookie"); response.Return(DreamMessage.BadRequest("Unable to retrieve subscriber credentials from cookie")); yield break; } Tuplet<string, DateTime> userCache; if(!_userCache.TryGetValue(authtoken, out userCache)) { Result<DreamMessage> userResult; yield return userResult = _deki.At("users", "current").WithHeader("X-Authtoken", authtoken).GetAsync(); if(!userResult.Value.IsSuccessful) { _log.WarnFormat("Unable to retrieve user info for provided credentials"); response.Return(DreamMessage.BadRequest("Unable to retrieve user info for provided credentials")); yield break; } XDoc userDoc = userResult.Value.ToDocument(); _log.DebugFormat("caching user info for '{0}': {1}", userDoc["username"].AsText, userDoc["@href"].AsUri.AsPublicUri()); userCache = new Tuplet<string, DateTime>(userDoc["@href"].AsUri.AsPublicUri().Path, DateTime.UtcNow); lock(_userCache) { _userCache[authtoken] = userCache; } } string subscriber = userCache.Item1; lock(_subscriptions) { Subscription subscription; if(!_subscriptions.TryGetValue(pageId, out subscription)) { subscription = new Subscription(); _subscriptions[pageId] = subscription; _log.DebugFormat("created subscription for {0}", pageId); } _log.DebugFormat("checking subscription for {0}", subscriber); if(!subscription.HasChanged(subscriber)) { response.Return(DreamMessage.Ok()); yield break; } } XDoc doc = new XDoc("div") .Attr("class", "systemmsg") .Start("div") .Attr("class", "inner") .Value("The page has changed. Click ") .Start("a") .Attr("rel", "custom") .Attr("href", "") .Value("here") .End() .Value(" to reload.") .End() .Start("script") .Attr("type", "text/javascript") .Value(string.Format("$('#{0}').slideDown('slow');", containerId)) .End(); _log.DebugFormat("page {0} changed deliverd", pageId); response.Return(DreamMessage.Ok(doc)); yield break; }
private List<XDoc> CalculateSubscriptions() { var subscriptions = new List<XDoc>(); var wikiis = new List<Tuplet<string, SiteInfo>>(); lock(_subscriptions) { foreach(KeyValuePair<string, SiteInfo> wiki in _subscriptions) { wikiis.Add(new Tuplet<string, SiteInfo>(wiki.Key, wiki.Value)); } } int pageSubscriptions = 0; int subscribedUsers = 0; foreach(Tuplet<string, SiteInfo> wiki in wikiis) { var subscriptionLookup = new Dictionary<uint, Tuplet<string, List<uint>>>(); var key = wiki.Item1; var siteInfo = wiki.Item2; lock(siteInfo) { foreach(UserInfo info in siteInfo.Users.Values) { subscribedUsers++; foreach(Tuplet<uint, string> resource in info.Resources) { Tuplet<string, List<uint>> subs; if(!subscriptionLookup.TryGetValue(resource.Item1, out subs)) { subs = new Tuplet<string, List<uint>>(resource.Item2, new List<uint>()); subscriptionLookup.Add(resource.Item1, subs); } else if(resource.Item2 == "infinity") { subs.Item1 = resource.Item2; } subs.Item2.Add(info.Id); } } } foreach(KeyValuePair<uint, Tuplet<string, List<uint>>> kvp in subscriptionLookup) { XDoc subscription = new XDoc("subscription") .Elem("channel", string.Format("event://{0}/deki/pages/create", key)) .Elem("channel", string.Format("event://{0}/deki/pages/update", key)) .Elem("channel", string.Format("event://{0}/deki/pages/delete", key)) .Elem("channel", string.Format("event://{0}/deki/pages/revert", key)) .Elem("channel", string.Format("event://{0}/deki/pages/move", key)) .Elem("channel", string.Format("event://{0}/deki/pages/tags/update", key)) .Elem("channel", string.Format("event://{0}/deki/pages/dependentschanged/comments/create", key)) .Elem("channel", string.Format("event://{0}/deki/pages/dependentschanged/comments/update", key)) .Elem("channel", string.Format("event://{0}/deki/pages/dependentschanged/comments/delete", key)) .Elem("channel", string.Format("event://{0}/deki/pages/dependentschanged/files/create", key)) .Elem("channel", string.Format("event://{0}/deki/pages/dependentschanged/files/update", key)) .Elem("channel", string.Format("event://{0}/deki/pages/dependentschanged/files/delete", key)) .Elem("channel", string.Format("event://{0}/deki/pages/dependentschanged/files/properties/*", key)) .Elem("channel", string.Format("event://{0}/deki/pages/dependentschanged/files/restore", key)) .Elem("uri.resource", string.Format("deki://{0}/pages/{1}#depth={2}", key, kvp.Key, kvp.Value.Item1)) .Elem("uri.proxy", _destination); pageSubscriptions++; foreach(int userId in kvp.Value.Item2) { subscription .Start("recipient") .Attr("userid", userId) .Elem("uri", string.Format("deki://{0}/users/{1}", key, userId)) .End(); } subscriptions.Add(subscription); } } _log.DebugFormat("calculated subscription set with {0} page subscriptions for {1} users", pageSubscriptions, subscribedUsers); return subscriptions; }
//--- Methods --- protected override Yield Start(XDoc config, Result result) { yield return Coroutine.Invoke(base.Start, config, new Result()); // set up plug for phpscript that will handle the notifications _emailer = Plug.New(config["uri.emailer"].AsUri); // set up plug deki, so we can validate users _deki = Plug.New(config["uri.deki"].AsUri); // get the apikey, which we will need as a subscription auth token for subscriptions not done on behalf of a user _apikey = config["apikey"].AsText; _cache = new PageChangeCache(_deki.With("apikey", _apikey), TimeSpan.FromSeconds(config["page-cache-ttl"].AsInt ?? 2)); // resource manager for email template string resourcePath = Config["resources-path"].AsText; if(!string.IsNullOrEmpty(resourcePath)) { _resourceManager = new PlainTextResourceManager(Environment.ExpandEnvironmentVariables(resourcePath)); } else { // creating a test resource manager _log.WarnFormat("'resource-path' was not defined in Config, using a test resource manager for email templating"); TestResourceSet testSet = new TestResourceSet(); testSet.Add("Notification.Page.email-subject", "Page Modified"); testSet.Add("Notification.Page.email-header", "The following pages have changed:"); _resourceManager = new PlainTextResourceManager(testSet); } // get persisted subscription storage List<Tuplet<string, List<XDoc>>> allWikiSubs = new List<Tuplet<string, List<XDoc>>>(); Result<DreamMessage> storageCatalog; yield return storageCatalog = Storage.At("subscriptions").GetAsync(); foreach(XDoc wikiSubs in storageCatalog.Value.ToDocument()["folder/name"]) { string wikihost = wikiSubs.AsText; Tuplet<string, List<XDoc>> wikiDoc = new Tuplet<string, List<XDoc>>(wikihost, new List<XDoc>()); allWikiSubs.Add(wikiDoc); Result<DreamMessage> wikiUsers; yield return wikiUsers = Storage.At("subscriptions", wikihost).GetAsync(); foreach(XDoc userDocname in wikiUsers.Value.ToDocument()["file/name"]) { string userFile = userDocname.AsText; if(!userFile.EndsWith(".xml")) { _log.WarnFormat("Found stray file '{0}' in wiki '{1}' store, ignoring", userFile, wikihost); continue; } Result<DreamMessage> userDoc; yield return userDoc = Storage.At("subscriptions", wikihost, userFile).GetAsync(); try { wikiDoc.Item2.Add(userDoc.Value.ToDocument()); } catch(InvalidDataException e) { _log.Error(string.Format("Unable to retrieve subscription store for user {0}/{1}", wikihost, userFile), e); } } } _subscriptions = new SubscriptionManager(Self.Uri.AsServerUri().At("notify"), allWikiSubs); _subscriptions.RecordsChanged += PersistSubscriptions; _subscriptions.SubscriptionsChanged += PushSubscriptionSetUpstream; // set up subscription for pubsub _baseSubscriptionSet = new XDoc("subscription-set") .Elem("uri.owner", Self.Uri.AsServerUri().ToString()) .Start("subscription") .Elem("channel", "event://*/deki/users/*") .Add(DreamCookie.NewSetCookie("service-key", InternalAccessKey, Self.Uri).AsSetCookieDocument) .Start("recipient") .Attr("authtoken", _apikey) .Elem("uri", Self.Uri.AsServerUri().At("updateuser").ToString()) .End() .End(); XDoc subSet = _baseSubscriptionSet.Clone(); foreach(XDoc sub in _subscriptions.Subscriptions) { subSet.Add(sub); } Result<DreamMessage> subscribe; yield return subscribe = PubSub.At("subscribers").PostAsync(subSet); string accessKey = subscribe.Value.ToDocument()["access-key"].AsText; XUri location = subscribe.Value.Headers.Location; Cookies.Update(DreamCookie.NewSetCookie("access-key", accessKey, location), null); _subscriptionLocation = Plug.New(location.AsLocalUri().WithoutQuery()); _log.DebugFormat("set up initial subscription location at {0}", _subscriptionLocation.Uri); // set up notification accumulator queue TimeSpan accumulationMinutes = TimeSpan.FromSeconds(config["accumulation-time"].AsInt ?? 10 * 60); _log.DebugFormat("Initializing queue with {0:0.00} minute accumulation", accumulationMinutes.TotalMinutes); _notificationQueue = new NotificationDelayQueue(accumulationMinutes, SendEmail); result.Return(); }
private void PutFileInternal(string s3Filename, string filename, StreamInfo file) { string tmpfile; using(file) { tmpfile = Path.GetTempFileName(); // create tmp file try { // copy stream to tmp file using(Stream stream = File.Create(tmpfile)) { file.Stream.CopyTo(stream, file.Length, new Result<long>(TimeSpan.MaxValue)).Wait(); } // create cached entry TaskTimer timer = TaskTimer.New(TimeSpan.FromSeconds(CACHE_TTL), OnTimer, s3Filename, TaskEnv.None); lock(_cache) { // cach everything we know about the file _cache[s3Filename] = new Tuplet<string, TaskTimer, DateTime?>(tmpfile, timer, file.Modified); } } catch(Exception e) { try { // delete tmp file if it exists SafeFileDelete(tmpfile); lock(_cache) { _cache.Remove(s3Filename); } } catch { } throw new DreamInternalErrorException(string.Format("Unable to cache file attachment to '{0}' ({1})", s3Filename, e.Message)); } } // forward cached file to S3 Stream filestream = File.Open(tmpfile, FileMode.Open, FileAccess.Read, FileShare.Read); file = new StreamInfo(filestream, file.Length, file.Type); DreamMessage s3Msg = DreamMessage.Ok(file.Type, file.Length, file.Stream); s3Msg.Headers.ContentDisposition = new ContentDisposition(true, DateTime.UtcNow, null, null, filename, file.Length); // Note (arnec): The timeout is just a workaround Plug not having some kind of heartbeat on progress. Ideally 30 seconds of inactivity // should be perfectly fine, as long as we track uploads that are proceeding as active _s3.AtPath(s3Filename).WithTimeout(TimeSpan.FromMinutes(30)).Put(s3Msg); }
/// <summary> /// Create before, after and combined highlight documents for a set of differences. /// </summary> /// <param name="diff">Difference set.</param> /// <param name="combined">Output of combined highlight document.</param> /// <param name="combinedInvisible">Output of the combined invisible differences.</param> /// <param name="before">Output of before difference highlight document.</param> /// <param name="after">Output of after difference highlight document.</param> public static void Highlight(Tuplet<ArrayDiffKind, Token>[] diff, out XDoc combined, out List<Tuplet<string, string, string>> combinedInvisible /* tuple(xpath, before, after) */, out XDoc before, out XDoc after) { if(diff == null) { throw new ArgumentNullException("diff"); } List<Tuplet<ArrayDiffKind, Token>> combinedChanges = new List<Tuplet<ArrayDiffKind, Token>>(diff.Length); combinedInvisible = new List<Tuplet<string, string, string>>(); List<Tuplet<ArrayDiffKind, Token>> beforeChanges = new List<Tuplet<ArrayDiffKind, Token>>(diff.Length); List<Tuplet<ArrayDiffKind, Token>> afterChanges = new List<Tuplet<ArrayDiffKind, Token>>(diff.Length); bool changedElement = false; Stack<List<string>> path = new Stack<List<string>>(); Dictionary<string, Tuplet<string, string, string>> invisibleChangesLookup = new Dictionary<string, Tuplet<string, string, string>>(); path.Push(new List<string>()); for(int i = 0; i < diff.Length; ++i) { Tuplet<ArrayDiffKind, Token> item = diff[i]; Token token = item.Item2; switch(item.Item1) { case ArrayDiffKind.Added: switch(token.Type) { case XmlNodeType.Text: if((token.Value.Length > 0) && !char.IsWhiteSpace(token.Value[0])) { Highlight_InlineTextChanges(diff, i, combinedChanges, beforeChanges, afterChanges, out i); // adjust iterator since it will be immediately increased again --i; continue; } break; case XmlNodeType.Attribute: if(!changedElement) { string[] parts = token.Value.Split(new char[] { '=' }, 2); string xpath = ComputeXPath(path, "@" + parts[0]); Tuplet<string, string, string> beforeAfter; if(invisibleChangesLookup.TryGetValue(xpath, out beforeAfter)) { beforeAfter.Item3 = parts[1]; } else { beforeAfter = new Tuplet<string, string, string>(xpath, null, parts[1]); combinedInvisible.Add(beforeAfter); invisibleChangesLookup[xpath] = beforeAfter; } } break; case XmlNodeType.Element: // NOTE (steveb): this check shouldn't be needed, but just in case, it's better to have a wrong path than an exception! if(path.Count > 0) { path.Peek().Add(token.Value); } path.Push(new List<string>()); changedElement = true; break; case XmlNodeType.None: // NOTE (steveb): this check shouldn't be needed, but just in case, it's better to have a wrong path than an exception! if(path.Count > 0) { path.Pop(); } break; } item = new Tuplet<ArrayDiffKind, Token>(ArrayDiffKind.Same, token); afterChanges.Add(item); combinedChanges.Add(item); break; case ArrayDiffKind.Removed: switch(token.Type) { case XmlNodeType.Text: if((token.Value.Length > 0) && !char.IsWhiteSpace(token.Value[0])) { Highlight_InlineTextChanges(diff, i, combinedChanges, beforeChanges, afterChanges, out i); // adjust iterator since it will be immediately increased again --i; continue; } else { // keep whitespace text combinedChanges.Add(new Tuplet<ArrayDiffKind, Token>(ArrayDiffKind.Same, token)); } break; case XmlNodeType.Attribute: if(!changedElement) { string[] parts = token.Value.Split(new char[] { '=' }, 2); string xpath = ComputeXPath(path, "@" + parts[0]); Tuplet<string, string, string> beforeAfter; if(invisibleChangesLookup.TryGetValue(xpath, out beforeAfter)) { beforeAfter.Item2 = parts[1]; } else { beforeAfter = new Tuplet<string, string, string>(xpath, parts[1], null); combinedInvisible.Add(beforeAfter); invisibleChangesLookup[xpath] = beforeAfter; } } break; case XmlNodeType.Whitespace: case XmlNodeType.SignificantWhitespace: // keep whitespace text combinedChanges.Add(new Tuplet<ArrayDiffKind, Token>(ArrayDiffKind.Same, token)); break; case XmlNodeType.Element: changedElement = true; break; } beforeChanges.Add(new Tuplet<ArrayDiffKind, Token>(ArrayDiffKind.Same, token)); break; case ArrayDiffKind.Same: switch(token.Type) { case XmlNodeType.Element: changedElement = false; // NOTE (steveb): this check shouldn't be needed, but just in case, it's better to have a wrong path than an exception! if(path.Count > 0) { path.Peek().Add(token.Value); } path.Push(new List<string>()); break; case XmlNodeType.None: // NOTE (steveb): this check shouldn't be needed, but just in case, it's better to have a wrong path than an exception! if(path.Count > 0) { path.Pop(); } break; } combinedChanges.Add(item); beforeChanges.Add(item); afterChanges.Add(item); break; case ArrayDiffKind.AddedLeft: case ArrayDiffKind.AddedRight: case ArrayDiffKind.RemovedLeft: case ArrayDiffKind.RemovedRight: // TODO (steveb): process conflicting changes throw new NotImplementedException("cannot highlight changes for a diff with conflicts"); } } before = Detokenize(beforeChanges.ToArray()); after = Detokenize(afterChanges.ToArray()); combined = Detokenize(combinedChanges.ToArray()); }
public void UpdateInfoMessage(string source, string message) { lock(_infos) { Tuplet<int, string> info; if(!_infos.TryGetValue(source, out info)) { info = new Tuplet<int, string>(0, null); _infos[source] = info; } ++info.Item1; info.Item2 = message; } }
private static int Detokenize(Tuplet<ArrayDiffKind, Token>[] tokens, int index, XmlElement current, XmlDocument doc) { for(; index < tokens.Length; ++index) { Tuplet<ArrayDiffKind, Token> token = tokens[index]; switch(token.Item1) { case ArrayDiffKind.Same: case ArrayDiffKind.Added: switch(token.Item2.Type) { case XmlNodeType.CDATA: if(current == null) { throw new ArgumentNullException("current"); } current.AppendChild(doc.CreateCDataSection(token.Item2.Value)); break; case XmlNodeType.Comment: if(current == null) { throw new ArgumentNullException("current"); } current.AppendChild(doc.CreateComment(token.Item2.Value)); break; case XmlNodeType.SignificantWhitespace: if(current == null) { throw new ArgumentNullException("current"); } current.AppendChild(doc.CreateSignificantWhitespace(token.Item2.Value)); break; case XmlNodeType.Text: if(current == null) { throw new ArgumentNullException("current"); } current.AppendChild(doc.CreateTextNode(token.Item2.Value)); break; case XmlNodeType.Whitespace: if(current == null) { throw new ArgumentNullException("current"); } current.AppendChild(doc.CreateWhitespace(token.Item2.Value)); break; case XmlNodeType.Element: XmlElement next = doc.CreateElement(token.Item2.Value); if(current == null) { doc.AppendChild(next); } else { current.AppendChild(next); } index = Detokenize(tokens, index + 1, next, doc); break; case XmlNodeType.Attribute: if(current == null) { throw new ArgumentNullException("current"); } string[] parts = token.Item2.Value.Split(new char[] { '=' }, 2); current.SetAttribute(parts[0], parts[1]); break; case XmlNodeType.EndElement: // nothing to do break; case XmlNodeType.None: if(current == null) { throw new ArgumentNullException("current"); } // ensure we're closing the intended element if(token.Item2.Value != current.Name) { throw new InvalidOperationException(string.Format("mismatched element ending; found </{0}>, expected </{1}>", token.Item2.Value, current.Name)); } // we're done with this sequence return index; default: throw new InvalidOperationException("unhandled node type: " + token.Item2.Type); } break; case ArrayDiffKind.Removed: // ignore removed nodes break; default: throw new InvalidOperationException("invalid diff kind: " + token.Item1); } } if(current != null) { throw new InvalidOperationException("unexpected end of tokens"); } return index; }
//--- Class Methods --- private static void Match(Tuplet<ArrayDiffKind, XDocDiff.Token> item, ArrayDiffKind change, XmlNodeType type, string value) { Assert.AreEqual(item.Item1, change); Assert.AreEqual(item.Item2.Type, type); Assert.AreEqual(item.Item2.Value, value); }
/// <summary> /// Create a document from a difference set. /// </summary> /// <param name="tokens">Difference set.</param> /// <returns>Detokenized document.</returns> public static XDoc Detokenize(Tuplet<ArrayDiffKind, Token>[] tokens) { XmlDocument doc = XDoc.NewXmlDocument(); Detokenize(tokens, 0, null, doc); return new XDoc(doc); }
private static DekiResource GetChangeSummary(Tuplet<ArrayDiffKind, XDocDiff.Token>[] diff, out int added, out int removed, out int attributes, out int structural) { added = 0; removed = 0; attributes = 0; structural = 0; // count changes for(int i = 0; i < diff.Length; ++i) { switch(diff[i].Item1) { case ArrayDiffKind.Added: if((diff[i].Item2.Type == XmlNodeType.Text) && (diff[i].Item2.Value.Length > 0) && !char.IsWhiteSpace(diff[i].Item2.Value[0])) { ++added; } else if(diff[i].Item2.Type == XmlNodeType.Attribute) { ++attributes; } else if((diff[i].Item2.Type == XmlNodeType.Element) || (diff[i].Item2.Type == XmlNodeType.None)) { ++structural; } break; case ArrayDiffKind.Removed: if((diff[i].Item2.Type == XmlNodeType.Text) && (diff[i].Item2.Value.Length > 0) && !char.IsWhiteSpace(diff[i].Item2.Value[0])) { ++removed; } else if(diff[i].Item2.Type == XmlNodeType.Attribute) { ++attributes; } else if((diff[i].Item2.Type == XmlNodeType.Element) || (diff[i].Item2.Type == XmlNodeType.None)) { ++structural; } break; } } // compute summary DekiResource result; if((added > 0) && (removed > 0)) { result = DekiResources.PAGE_DIFF_SUMMARY(added, removed); } else if(added > 0) { result = DekiResources.PAGE_DIFF_SUMMARY_ADDED(added); } else if(removed > 0) { result = DekiResources.PAGE_DIFF_SUMMARY_REMOVED(removed); } else if((attributes > 0) || (structural > 0)) { result = DekiResources.PAGE_DIFF_SUMMARY_NOT_VISIBLE(); } else { result = DekiResources.PAGE_DIFF_SUMMARY_NOTHING(); } return result; }
public void Many_producers_many_consumers_loop_with_foreach() { int n = 200; List<string> enqueued = new List<string>(); List<string> dequeued = new List<string>(); BlockingQueue<string> q = new BlockingQueue<string>(); Thread c1 = new Thread(MultiConsumerForeachLoop); Thread c2 = new Thread(MultiConsumerForeachLoop); Thread c3 = new Thread(MultiConsumerForeachLoop); c1.IsBackground = true; c2.IsBackground = true; c3.IsBackground = true; Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent> v1 = new Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent>(q, dequeued, 0, new ManualResetEvent(false)); c1.Start(v1); Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent> v2 = new Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent>(q, dequeued, 0, new ManualResetEvent(false)); c2.Start(v2); Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent> v3 = new Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent>(q, dequeued, 0, new ManualResetEvent(false)); c3.Start(v3); Thread p1 = new Thread(MultiProducer); Thread p2 = new Thread(MultiProducer); Thread p3 = new Thread(MultiProducer); p1.IsBackground = true; p2.IsBackground = true; p3.IsBackground = true; Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent> p1v = new Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent>(q, enqueued, n, new ManualResetEvent(false)); p1.Start(p1v); Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent> p2v = new Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent>(q, enqueued, n, new ManualResetEvent(false)); p2.Start(p2v); Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent> p3v = new Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent>(q, enqueued, n, new ManualResetEvent(false)); p3.Start(p3v); Assert.IsTrue(p1v.Item4.WaitOne(5000, false), "producer 1 did not finish"); Assert.IsTrue(p2v.Item4.WaitOne(5000, false), "producer 2 did not finish"); Assert.IsTrue(p3v.Item4.WaitOne(5000, false), "producer 3 did not finish"); q.Close(); Assert.IsTrue(v1.Item4.WaitOne(15000, false), "consumer 1 did not finish"); Assert.IsTrue(v2.Item4.WaitOne(15000, false), "consumer 2 did not finish"); Assert.IsTrue(v3.Item4.WaitOne(15000, false), "consumer 3 did not finish"); _log.DebugFormat("consumer 1 processed {0}", v1.Item3); _log.DebugFormat("consumer 2 processed {0}", v2.Item3); _log.DebugFormat("consumer 3 processed {0}", v3.Item3); Assert.GreaterOrEqual(v1.Item3, n * 3 / 4); Assert.GreaterOrEqual(v2.Item3, n * 3 / 4); Assert.GreaterOrEqual(v3.Item3, n * 3 / 4); Assert.AreEqual(enqueued.Count, dequeued.Count); for(int i = 0; i < n; i++) { Assert.Contains(dequeued[i], enqueued); } }
public void One_producer_many_consumers_loop_with_foreach() { int n = 500; var enqueued = new List<string>(); var dequeued = new List<string>(); var q = new BlockingQueue<string>(); var c1 = new Thread(MultiConsumerForeachLoop) { IsBackground = true }; var c2 = new Thread(MultiConsumerForeachLoop) { IsBackground = true }; var c3 = new Thread(MultiConsumerForeachLoop) { IsBackground = true }; var v1 = new Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent>(q, dequeued, 0, new ManualResetEvent(false)); c1.Start(v1); var v2 = new Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent>(q, dequeued, 0, new ManualResetEvent(false)); c2.Start(v2); var v3 = new Tuplet<BlockingQueue<string>, List<string>, int, ManualResetEvent>(q, dequeued, 0, new ManualResetEvent(false)); c3.Start(v3); Thread.Sleep(1000); for(int i = 0; i < n; i++) { string guid = Guid.NewGuid().ToString(); q.Enqueue(guid); enqueued.Add(guid); } q.Close(); Assert.IsTrue(v1.Item4.WaitOne(10000, false), "thread 1 did not finish"); Assert.IsTrue(v2.Item4.WaitOne(10000, false), "thread 2 did not finish"); Assert.IsTrue(v3.Item4.WaitOne(10000, false), "thread 3 did not finish"); _log.DebugFormat("Thread 1 processed {0}", v1.Item3); _log.DebugFormat("Thread 2 processed {0}", v2.Item3); _log.DebugFormat("Thread 3 processed {0}", v3.Item3); Console.WriteLine("Thread 1 processed {0}", v1.Item3); Console.WriteLine("Thread 2 processed {0}", v2.Item3); Console.WriteLine("Thread 3 processed {0}", v3.Item3); Assert.GreaterOrEqual(v1.Item3, n / 4); Assert.GreaterOrEqual(v2.Item3, n / 4); Assert.GreaterOrEqual(v3.Item3, n / 4); Assert.AreEqual(n, dequeued.Count); Assert.AreEqual(dequeued.OrderBy(x => x).ToArray(), enqueued.OrderBy(x => x).ToArray()); }
public void AddActivityDescription(object key, string description) { lock(_activities) { _activities[key] = new Tuplet<DateTime, string>(DateTime.UtcNow, description); } }
public void SubscriptionManager_with_initial_subscriptions() { List<Tuplet<string, List<XDoc>>> subs = new List<Tuplet<string, List<XDoc>>>(); List<XDoc> x = new List<XDoc>(); Tuplet<string, List<XDoc>> xSubs = new Tuplet<string, List<XDoc>>("x", x); subs.Add(xSubs); x.Add(new XDoc("user") .Attr("userid", 1) .Elem("email", "foo") .Start("subscription.page").Attr("id", 1).Attr("depth", 0).End()); x.Add(new XDoc("user") .Attr("userid", 2) .Elem("email", "foo") .Start("subscription.page").Attr("id", 1).Attr("depth", 0).End() .Start("subscription.page").Attr("id", 2).Attr("depth", 0).End()); x.Add(new XDoc("user") .Attr("userid", 3) .Elem("email", "foo") .Start("subscription.page").Attr("id", 2).Attr("depth", 0).End()); List<XDoc> y = new List<XDoc>(); Tuplet<string, List<XDoc>> ySubs = new Tuplet<string, List<XDoc>>("y", y); subs.Add(ySubs); y.Add(new XDoc("user") .Attr("userid", 10) .Elem("email", "foo") .Start("subscription.page").Attr("id", 1).Attr("depth", 0).End()); SubscriptionManager subscriptionManager = new SubscriptionManager(new XUri("test://"), subs); List<XDoc> subscriptions = new List<XDoc>(subscriptionManager.Subscriptions); Assert.AreEqual(3, subscriptions.Count); bool foundXa = false; bool foundXb = false; bool foundYa = false; foreach(XDoc sub in subscriptions) { switch(sub["channel"].AsText) { case "event://x/deki/pages/create": switch(sub["uri.resource"].AsText) { case "deki://x/pages/1#depth=0": Assert.AreEqual(2, sub["recipient"].ListLength); foundXa = true; break; case "deki://x/pages/2#depth=0": Assert.AreEqual(2, sub["recipient"].ListLength); foundXb = true; break; default: Assert.Fail("bad resource for deki X"); break; } break; case "event://y/deki/pages/create": if(sub["uri.resource"].AsText != "deki://y/pages/1#depth=0") { Assert.Fail("bad resource for deki Y"); } XDoc recipient = sub["recipient"]; Assert.AreEqual(1, recipient.ListLength); Assert.AreEqual("10", recipient["@userid"].AsText); foundYa = true; break; } } Assert.IsTrue(foundXa); Assert.IsTrue(foundXb); Assert.IsTrue(foundYa); Assert.IsNotNull(subscriptionManager.GetUser("x", 1, false)); Assert.IsNotNull(subscriptionManager.GetUser("x", 2, false)); Assert.IsNotNull(subscriptionManager.GetUser("x", 3, false)); Assert.IsNull(subscriptionManager.GetUser("x", 10, false)); Assert.IsNotNull(subscriptionManager.GetUser("y", 10, false)); }
private void PutFileInternal(string s3Filename, string filename, StreamInfo file) { var tmpfile = Path.Combine(_tempDirectory, Guid.NewGuid() + ".cache"); try { using(file) { Tuplet<string, TaskTimer, DateTime?> entry = null; // create tmp file try { // copy stream to tmp file using(Stream stream = File.Create(tmpfile)) { file.Stream.CopyTo(stream, file.Length, new Result<long>(TimeSpan.MaxValue)).Wait(); } // create cached entry if(_cacheTtl != TimeSpan.Zero) { lock(_cache) { if(_cache.TryGetValue(s3Filename, out entry)) { entry.Item2.Change(_cacheTtl, TaskEnv.None); entry.Item3 = file.Modified; } else { var timer = _timerFactory.New(_cacheTtl, OnTimer, s3Filename, TaskEnv.None); _cache[s3Filename] = entry = new Tuplet<string, TaskTimer, DateTime?>(tmpfile, timer, file.Modified); } } } } catch(Exception e) { try { // delete tmp file and clear out timer and cache, if any exist SafeFileDelete(tmpfile); if(entry != null) { lock(_cache) { entry.Item2.Cancel(); _cache.Remove(s3Filename); } } } catch(Exception e2) { _log.WarnFormat("Failed cleaned-up post tmp file creation failure for attachment {0}: {1}", s3Filename, e2.Message); } throw new DreamInternalErrorException(string.Format("Unable to cache file attachment to '{0}' ({1})", s3Filename, e.Message)); } } // forward cached file to S3 Stream filestream = File.Open(tmpfile, FileMode.Open, FileAccess.Read, FileShare.Read); file = new StreamInfo(filestream, file.Length, file.Type); var s3Msg = DreamMessage.Ok(file.Type, file.Length, file.Stream); s3Msg.Headers.ContentDisposition = new ContentDisposition(true, DateTime.UtcNow, null, null, filename, file.Length); // Note (arnec): The timeout is just a workaround Plug not having some kind of heartbeat on progress. Ideally 30 seconds of inactivity // should be perfectly fine, as long as we track uploads that are proceeding as active _s3.AtPath(s3Filename).WithTimeout(TimeSpan.FromMinutes(30)).Put(s3Msg); } finally { if(_cacheTtl == TimeSpan.Zero) { SafeFileDelete(tmpfile); } } }
private Yield ConvertToText(string extension, XUri contentUri, Result<Tuplet<string, int>> result) { Tuplet<string, int> value = new Tuplet<string, int>(string.Empty, 0); SearchFilter filter; _searchFilters.TryGetValue(extension, out filter); if(filter == null) { // see if a wildcard was defined _searchFilters.TryGetValue(".*", out filter); } if(filter != null) { // fetch content from source Result<DreamMessage> contentResult; yield return contentResult = Plug.New(contentUri).With("apikey", _apikey).InvokeEx("GET", DreamMessage.Ok(), new Result<DreamMessage>()); DreamMessage content = contentResult.Value; if(!content.IsSuccessful) { content.Close(); throw new DreamBadRequestException(string.Format("unable to fetch content from '{0}", contentUri)); } value.Item2 = (int)content.ContentLength; // check filter type if(filter.FileName == string.Empty) { // file is already in text format value.Item1 = content.AsText(); } else { // convert source document to text Stream output = null; Stream error = null; // invoke converter string processArgs = string.Format(PhpUtil.ConvertToFormatString(filter.Arguments), extension); _log.DebugFormat("executing: {0} {1}", filter.FileName, processArgs); Result<Tuplet<int, Stream, Stream>> exitResult; // TODO (steveb): use WithCleanup() to dispose of resources in case of failure yield return exitResult = Async.ExecuteProcess(filter.FileName, processArgs, content.AsStream(), new Result<Tuplet<int, Stream, Stream>>(_filterTimeout)).Catch(); content.Close(); if(exitResult.HasException) { result.Throw(exitResult.Exception); yield break; } try { Tuplet<int, Stream, Stream> exitValues = exitResult.Value; int status = exitValues.Item1; output = exitValues.Item2; error = exitValues.Item3; // check if converter was successful if(status == 0) { // capture converter output as text for indexing using(StreamReader sr = new StreamReader(output)) { value.Item1 = sr.ReadToEnd(); } } else { // log convert error string stderr = string.Empty; try { using(StreamReader sr = new StreamReader(error)) { stderr = sr.ReadToEnd(); } } catch { stderr = "(unabled to read stderr from converter)"; } _log.WarnFormat("error converting content at '{0}', exitCode: {1}, stderr: {2}", contentUri, status, stderr); } } finally { // make sure the output stream gets closed try { if(output != null) { output.Close(); } } catch { } // make sure the error stream gets closed try { if(error != null) { error.Close(); } } catch { } } } } result.Return(value); yield break; }