public void RiakObjectLinksAreTheSameAsLinksRetrievedViaMapReduce() { var jeremiah = Client.Get(TestBucket, Jeremiah).Value; var jLinks = jeremiah.Links; var input = new RiakBucketKeyInput() .Add(new RiakObjectId(TestBucket, Jeremiah)); #pragma warning disable 618 var query = new RiakMapReduceQuery().Inputs(input).Link(l => l.AllLinks().Keep(true)); #pragma warning restore 618 var mrResult = Client.MapReduce(query); mrResult.IsSuccess.ShouldBeTrue(); // TODO: FUTURE - Is *this* chunk of code acceptable? // This should probably be taken care of in the RiakClient.WalkLinks var listOfLinks = mrResult.Value.PhaseResults.OrderBy(pr => pr.Phase) .ElementAt(0).Values .Select(v => RiakLink.ParseArrayFromJsonString(v.FromRiakString())); var mrLinks = listOfLinks.SelectMany(l => l).ToList(); mrLinks.Count().ShouldEqual(jLinks.Count); foreach (var link in jLinks) { mrLinks.ShouldContain(link); } }
public IObservable <Either <RiakException, RiakObject> > WalkLinks(RiakObject riakObject, IList <RiakLink> riakLinks) { var observables = Observable.Create <Either <RiakException, RiakObject> >(async obs => { try { System.Diagnostics.Debug.Assert(riakLinks.Count > 0, "Link walking requires at least one link"); var input = new RiakBucketKeyInput() .Add(riakObject.Bucket, riakObject.Key); var query = new RiakMapReduceQuery() .Inputs(input); var lastLink = riakLinks.Last(); foreach (var riakLink in riakLinks) { var link = riakLink; var keep = ReferenceEquals(link, lastLink); query.Link(l => l.FromRiakLink(link).Keep(keep)); } var result = await MapReduce(query).ConfigureAwait(false); var linkResults = result.PhaseResults .GroupBy(r => r.Phase) .Where(g => g.Key == riakLinks.Count - 1); var linkResultStrings = linkResults .SelectMany(lr => lr.ToList(), (lr, r) => new { lr, r }) .SelectMany(@t => @t.r.Values, (@t, s) => s.FromRiakString()); //var linkResultStrings = linkResults.SelectMany(g => g.Select(r => r.Values.Value.FromRiakString())); var rawLinks = linkResultStrings .SelectMany(RiakLink.ParseArrayFromJsonString) .Distinct(); var oids = rawLinks .Select(l => new RiakObjectId(l.Bucket, l.Key)) .ToList(); var source = Get(oids, new RiakGetOptions()); return(source.Subscribe(obs.OnNext, obs.OnError, obs.OnCompleted)); } catch (Exception exception) { obs.OnError(exception); } return(Disposable.Empty); }); return(observables); }
public void LotsOfConcurrentStreamingMapRedRequestsShouldWork() { var keys = new List <string>(); for (var i = 1; i < 11; i++) { var key = "key" + i; var doc = new RiakObject(MapReduceBucket, key, new { value = i }); keys.Add(key); var result = Client.Put(doc, new RiakPutOptions { ReturnBody = true }); result.ShouldNotBeNull(); } var input = new RiakBucketKeyInput(); keys.ForEach(k => input.Add(MapReduceBucket, k)); var query = new RiakMapReduceQuery() .Inputs(input) .MapJs(m => m.Source(@"function(o){return[1];}")) .ReduceJs(r => r.Name(@"Riak.reduceSum").Keep(true)); query.Compile(); var results = new List <RiakMapReduceResultPhase> [ThreadCount]; var watch = Stopwatch.StartNew(); Parallel.For(0, ThreadCount, i => { results[i] = DoStreamingMapRed(query); }); watch.Stop(); var executionTime = watch.Elapsed; var failures = 0; foreach (var result in results) { if (result.Count > 0) { var lastResult = result.OrderByDescending(r => r.Phase).First(); var resultValue = JsonConvert.DeserializeObject <int[]>(lastResult.Values.First().FromRiakString()); //var resultValue = JsonConvert.DeserializeObject<int[]>(r.Value.PhaseResults.ElementAt(1).Values.First().FromRiakString())[0]; // due to the speed which things happen at, we can't gaurantee all 10 will be in the result set resultValue[0].IsAtLeast(5); //lastResult.GetObject<int[]>()[0].ShouldEqual(10); } else { ++failures; } } Console.WriteLine("Total of {0} out of {1} failed to execute due to connection contention. Execution time = {2} milliseconds, for an average of {3} milliseconds", failures, ThreadCount * ActionCount, executionTime.TotalMilliseconds, (executionTime.TotalMilliseconds / (ThreadCount * ActionCount))); }
public void LotsOfConcurrentMapRedRequestsShouldWork() { var keys = new List <string>(); for (var i = 1; i < 11; i++) { var key = "key" + i; var doc = new RiakObject(MapReduceBucket, key, new { value = i }); keys.Add(key); var result = Client.Put(doc, new RiakPutOptions { ReturnBody = true }); result.ShouldNotBeNull(); } var input = new RiakBucketKeyInput(); keys.ForEach(k => input.Add(MapReduceBucket, k)); var query = new RiakMapReduceQuery() .Inputs(input) .MapJs(m => m.Source(@"function(o){return[1];}")) .ReduceJs(r => r.Name(@"Riak.reduceSum").Keep(true)); query.Compile(); var results = new List <RiakMapReduceResult> [ThreadCount]; var watch = Stopwatch.StartNew(); Parallel.For(0, ThreadCount, i => { results[i] = DoMapRed(query); }); watch.Stop(); var executionTime = watch.Elapsed; var failures = 0; foreach (var r in results.SelectMany(l => l)) { if (r != null) { var resultValue = JsonConvert.DeserializeObject <int[]>(r.PhaseResults.ElementAt(1).Values.First().FromRiakString())[0]; resultValue.ShouldEqual(10); //r.Value.PhaseResults.ElementAt(1).GetObject<int[]>()[0].ShouldEqual(10); } else { // the only acceptable result is that it ran out of retries when // talking to the cluster (trying to get a connection) //r.ResultCode.ShouldEqual(ResultCode.NoRetries); ++failures; } } Console.WriteLine("Total of {0} out of {1} failed to execute due to connection contention. Execution time = {2} milliseconds, for an average of {3} milliseconds", failures, ThreadCount * ActionCount, executionTime.TotalMilliseconds, (executionTime.TotalMilliseconds / (ThreadCount * ActionCount))); }
/// <summary> /// Get the results of an index query prepared for use in a <see cref="CorrugatedIron.Models.MapReduce.MapReduceQuery"/> /// </summary> /// <returns> /// A <see cref="RiakBucketKeyInput"/> of the index query results /// </returns> /// <param name='indexQuery'> /// Index query. /// </param> public RiakBucketKeyInput GetIndex(RiakIndexInput indexQuery) { var query = new RiakMapReduceQuery() .Inputs(indexQuery).ReduceErlang(r => r.ModFun("riak_kv_mapreduce", "reduce_identity").Keep(true)); var result = MapReduce(query); var keys = result.Value.PhaseResults.OrderBy(pr => pr.Phase).ElementAt(0).GetObjects <RiakObjectId>(); return(RiakBucketKeyInput.FromRiakObjectIds(keys)); }
public void RiakBucketKeyInputSeralisesCorrectly() { var input = new RiakBucketKeyInput() .Add("foo", "bar") .Add("foo", "baz") .Add("dooby", "scooby"); var s = Serialize(input.WriteJson); Assert.AreEqual(s, "\"inputs\":[[\"foo\",\"bar\"],[\"foo\",\"baz\"],[\"dooby\",\"scooby\"]]"); }
public void FromRiakObjectIdsHelperMethodSerializesCorrectly() { var ids = new List <RiakObjectId> { new RiakObjectId("bazType", "foo", "bar"), new RiakObjectId("bazType", "foo", "baz"), new RiakObjectId("bazType", "dooby", "scooby") }; var input = RiakBucketKeyInput.FromRiakObjectIds(ids); var s = Serialize(input.WriteJson); Assert.AreEqual(s, "\"inputs\":[[\"foo\",\"bar\",\"bazType\"],[\"foo\",\"baz\",\"bazType\"],[\"dooby\",\"scooby\",\"bazType\"]]"); }
public Task <RiakResult <IList <RiakObject> > > WalkLinks(RiakObject riakObject, IList <RiakLink> riakLinks) { System.Diagnostics.Debug.Assert(riakLinks.Count > 0, "Link walking requires at least one link"); var input = new RiakBucketKeyInput(); input.AddBucketKey(riakObject.Bucket, riakObject.Key); var query = new RiakMapReduceQuery() .Inputs(input); var lastLink = riakLinks.Last(); foreach (var riakLink in riakLinks) { var link = riakLink; var keep = ReferenceEquals(link, lastLink); query.Link(l => l.FromRiakLink(link).Keep(keep)); } return(MapReduce(query) .ContinueWith((Task <RiakResult <RiakMapReduceResult> > finishedTask) => { var result = finishedTask.Result; if (result.IsSuccess) { var linkResults = result.Value.PhaseResults.GroupBy(r => r.Phase).Where(g => g.Key == riakLinks.Count - 1); var linkResultStrings = linkResults.SelectMany(lr => lr.ToList(), (lr, r) => new { lr, r }) .SelectMany(@t => @t.r.Values, (@t, s) => s.FromRiakString()); //var linkResultStrings = linkResults.SelectMany(g => g.Select(r => r.Values.Value.FromRiakString())); var rawLinks = linkResultStrings.SelectMany(RiakLink.ParseArrayFromJsonString).Distinct(); var oids = rawLinks.Select(l => new RiakObjectId(l.Bucket, l.Key)).ToList(); return Get(oids, new RiakGetOptions()) .ContinueWith((Task <IEnumerable <RiakResult <RiakObject> > > getTask) => { var objects = getTask.Result; // FIXME // we could be discarding results here. Not good? // This really should be a multi-phase map/reduce return RiakResult <IList <RiakObject> > .Success(objects.Where(r => r.IsSuccess).Select(r => r.Value).ToList()); }); } return RiakResult <IList <RiakObject> > .ErrorTask(result.ResultCode, result.ErrorMessage, result.NodeOffline); }).Unwrap()); }
public void RiakBucketKeyInputSerializesCorrectly() { var inputList = new List <RiakObjectId> { new RiakObjectId("foo", "baz"), new RiakObjectId("dooby", "scooby") }; var input = new RiakBucketKeyInput() .Add(new RiakObjectId("foo", "bar")) .Add(inputList) .Add(inputList[0], inputList[1]); var s = Serialize(input.WriteJson); Assert.AreEqual(s, SerializedRiakBucketKeyInput); }
public void RiakBucketKeyInputSerializesCorrectlyOldInterface() { var inputList = new List <Tuple <string, string> > { new Tuple <string, string>("foo", "baz"), new Tuple <string, string>("dooby", "scooby") }; #pragma warning disable 612, 618 var input = new RiakBucketKeyInput() .Add("foo", "bar") .Add(inputList) .Add(inputList[0], inputList[1]); #pragma warning restore 612, 618 var s = Serialize(input.WriteJson); Assert.AreEqual(s, SerializedRiakBucketKeyInput); }
/// <summary> /// Retrieve arbitrarily deep list of links for a <see cref="RiakObject"/> /// </summary> /// <returns> /// A list of <see cref="RiakObject"/> identified by the list of links. /// </returns> /// <param name='riakObject'> /// The initial object to use for the beginning of the link walking. /// </param> /// <param name='riakLinks'> /// A list of link definitions /// </param> /// <remarks>Refer to http://wiki.basho.com/Links-and-Link-Walking.html for more information.</remarks> public RiakResult <IList <RiakObject> > WalkLinks(RiakObject riakObject, IList <RiakLink> riakLinks) { var input = new RiakBucketKeyInput(); input.AddBucketKey(riakObject.Bucket, riakObject.Key); var query = new RiakMapReduceQuery() .Inputs(input); foreach (var riakLink in riakLinks) { var link = riakLink; var keep = link == riakLinks.Last(); query.Link(l => l.FromRiakLink(link).Keep(keep)); } var result = MapReduce(query); if (result.IsSuccess) { var linkResults = result.Value.PhaseResults.GroupBy(r => r.Phase).Where(g => g.Key == riakLinks.Count - 1); var linkResultStrings = linkResults.SelectMany(lr => lr.ToList(), (lr, r) => new { lr, r }) .SelectMany(@t => @t.r.Values, (@t, s) => s.FromRiakString()); //var linkResultStrings = linkResults.SelectMany(g => g.Select(r => r.Values.Value.FromRiakString())); var rawLinks = linkResultStrings.SelectMany(RiakLink.ParseArrayFromJsonString).Distinct(); var oids = rawLinks.Select(l => new RiakObjectId(l.Bucket, l.Key)).ToList(); var objects = Get(oids); // FIXME // we could be discarding results here. Not good? // This really should be a multi-phase map/reduce return(RiakResult <IList <RiakObject> > .Success(objects.Where(r => r.IsSuccess).Select(r => r.Value).ToList())); } return(RiakResult <IList <RiakObject> > .Error(result.ResultCode, result.ErrorMessage)); }
public RiakMapReduceQuery Inputs(RiakBucketKeyInput riakBucketKeyInputs) { _inputs = riakBucketKeyInputs; return(this); }
/// <summary> /// Add a collection of <see cref="RiakObjectId"/>'s to the list of inputs. /// </summary> /// <param name="riakBucketKeyInputs">The <see cref="RiakBucketKeyInput"/> to add to the list of inputs.</param> /// <returns>A reference to this updated instance, for fluent chaining.</returns> public RiakMapReduceQuery Inputs(RiakBucketKeyInput riakBucketKeyInputs) { inputs = riakBucketKeyInputs; return this; }