public EventListViewModel(IConcurrencyService concurrencyService) { var eventsCollection = new ObservableCollectionExtended <EventItem>(); Events = new ReadOnlyObservableCollection <EventItem>(eventsCollection); var selectionChangeSubject = new Subject <Func <Selection, Selection> >(); mSubmitSelectionChange = selectionChangeSubject.OnNext; WhenEventSelectionChanges = selectionChangeSubject .ObserveOn(concurrencyService.TaskPoolRxScheduler); this.WhenActivated((CompositeDisposable disposables) => { var whenIncludeInputsChanges = IncludeInputObservables ?.ObserveOn(concurrencyService.TaskPoolRxScheduler) .DistinctUntilChanged() ?? Observable.Return(false); var eventsProcessedUntilInvalid = whenIncludeInputsChanges.Publish(whenIncludeInputsChangesPub => whenIncludeInputsChangesPub.SelectMany(includeInputs => Observables .ObserveOn(concurrencyService.TaskPoolRxScheduler) .Replay(observablesPub => { var observablesExpanded = observablesPub .MergeMany(obs => Observable.Return(obs)) .ExpandDistinct(obs => includeInputs ? obs.Inputs : Observable.Empty <IObservableInstance>()); var events = observablesExpanded .SelectMany(obs => Observable.Return(EventItem.FromObservableInstance(obs)) .Concat(obs.Subscriptions.SelectMany(sub => sub.Events.Select(e => EventItem.FromStreamEvent(sub, e))))) .Merge(ClientEvents?.Select(EventItem.FromClientEvent) ?? Observable.Empty <EventItem>()); var eventsProcessed = events .ToObservableChangeSet(e => e.SequenceId) .Filter(SequenceIdRange?.Select(CreateFilter) ?? Observable.Return <Func <EventItem, bool> >(_ => true)) .Batch(TimeSpan.FromMilliseconds(100)) .Sort(Utility.Comparer <EventItem> .ByKey(e => e.SequenceId)); // Terminate the stream if an observable is removed or the include inputs // flag changes, as in both cases we need to rebuild the output from scratch. return(eventsProcessed .TakeUntil(observablesPub.Where(chg => chg.Removes > 0)) .TakeUntil(whenIncludeInputsChangesPub.Where(ii => ii != includeInputs))); })) ) .SubscribeOn(concurrencyService.TaskPoolRxScheduler); Observable.Defer(() => { eventsCollection.Clear(); return(eventsProcessedUntilInvalid .ObserveOn(concurrencyService.DispatcherRxScheduler) .Bind(eventsCollection, new SortedObservableCollectionAdaptor <EventItem, long>(int.MaxValue))); }) .SubscribeOn(concurrencyService.DispatcherRxScheduler) .Repeat() .Subscribe() .DisposeWith(disposables); }); Func <EventItem, bool> CreateFilter((long start, long end) range) { return(e => e.SequenceId >= range.start && e.SequenceId <= range.end); } }
public override IObservable <Mat> Generate() { return(Observable.Defer(() => Observable.Return(CreateBuffer()))); }
public override IObservable <Normal> Generate() { return(Observable.Defer(() => Observable.Return(new Normal(Mean, StdDev)))); }
IObservable <UserAndScopes> GetUserFromApi() { return(Observable.Defer(() => ApiClient.GetUser())); }
public void CombineShouldActuallyPerformCombination() { var sub = new Subject <int>(); var subCount = 0; var unsubCount = 0; var source = Observable.Defer(() => { Interlocked.Increment(ref subCount); return(sub.AsObservable().Finally(() => { Interlocked.Increment(ref unsubCount); })); }); Func <long, IObservable <int> > sourceFactory = x => source.Where(i => i <= x); long Aggregate(long acc, long next) => Math.Max(acc, next); var c = sourceFactory.CombinedSubscriptions(Aggregate, -1L); var c0 = c(0); var l0 = new List <int>(); var c3 = c(3); var l3 = new List <int>(); subCount.Should().Be(0); unsubCount.Should().Be(0); using (c0.Subscribe(x => l0.Add(x))) { subCount.Should().Be(1); unsubCount.Should().Be(0); sub.OnNext(0); l0.Should().BeEquivalentTo(new[] { 0 }); sub.OnNext(1); l0.Should().BeEquivalentTo(new[] { 0 }); using (c3.Subscribe(x => l3.Add(x))) { subCount.Should().Be(2); unsubCount.Should().Be(1); sub.OnNext(3); l0.Should().BeEquivalentTo(new[] { 0, 3 }); l3.Should().BeEquivalentTo(new[] { 3 }); sub.OnNext(4); l0.Should().BeEquivalentTo(new[] { 0, 3 }); l3.Should().BeEquivalentTo(new[] { 3 }); // Resubscribing with same key should have no effect using (c3.Subscribe(x => { })) { subCount.Should().Be(2); unsubCount.Should().Be(1); // Resubscribing with another key to the same aggregate using (c0.Subscribe(x => { })) { subCount.Should().Be(2); unsubCount.Should().Be(1); } } subCount.Should().Be(2); unsubCount.Should().Be(1); } subCount.Should().Be(3); unsubCount.Should().Be(2); sub.OnNext(3); l0.Should().BeEquivalentTo(new[] { 0, 3 }); l3.Should().BeEquivalentTo(new[] { 3 }); } subCount.Should().Be(3); unsubCount.Should().Be(3); }
protected override void OnCreate(Bundle bundle) { base.OnCreate(bundle); var baseDir = Path.Combine(Application.ApplicationInfo.DataDir, "image_cache"); if (!Directory.Exists(baseDir)) { Directory.CreateDirectory(baseDir); } var from = new string [] { "Text", "Name", "Icon" }; var to = new int [] { Resource.Id.textMessage, Resource.Id.textName, Resource.Id.iconView }; var data = new List <IDictionary <string, object> > (); data.Add(new JavaDictionary <string, object> () { { "Text", "loading" }, { "Name", "" } }); var urls = new Dictionary <Uri, List <string> > (); var getUrl = new Uri("https://api.github.com/repos/mono/mono/commits"); #if REACTIVE var hr = new HttpWebRequest(getUrl); var req = Observable.FromAsyncPattern <WebResponse> (hr.BeginGetResponse, hr.EndGetResponse); Observable.Defer(req).Subscribe(v => { var v = hr.GetResponse(); var json = (IEnumerable <JsonValue>)JsonValue.Load(v.GetResponseStream()); #else var wc = new WebClient(); wc.Headers ["USER-AGENT"] = "Xamarin Android sample HTTP client"; wc.DownloadStringCompleted += (sender, e) => { data.Clear(); var v = e.Result; var json = (IEnumerable <JsonValue>)JsonValue.Parse(v); #endif #if REACTIVE json.ToObservable().Select(j => j.AsDynamic()).Subscribe(jitem => { #else foreach (var item in json.Select(j => j.AsDynamic())) { #endif var uri = new Uri(((string)item.author.avatar_url) ?? "http://www.gravatar.com/avatar/default.jpg"); var file = Path.Combine(baseDir, (string)item.author.id + new FileInfo(uri.LocalPath).Extension); if (!urls.ContainsKey(uri)) { urls.Add(uri, new List <string> () { file }); } else { urls [uri].Add(file); } data.Add(new JavaDictionary <string, object> () { { "Text", item.commit.message }, { "Name", item.author.login }, { "Icon", Path.Combine(baseDir, file) } }); #if REACTIVE }); #else } #endif urls.ToList().ForEach(p => { var iwc = new WebClient(); iwc.DownloadDataCompleted += (isender, ie) => p.Value.ForEach(s => { using (var fs = File.Create(s)) if (ie.Result != null) { fs.Write(ie.Result, 0, ie.Result.Length); } }); iwc.DownloadDataAsync(p.Key); }); this.RunOnUiThread(() => { ListAdapter = new SimpleAdapter(this, data, Resource.Layout.ListItem, from, to); }); #if REACTIVE }); #else };
public override IObservable <Point2d> Generate() { return(Observable.Defer(() => Observable.Return(new Point2d(X, Y)))); }
private IObservable <Unit> ActionAnimations(CharacterAction[] actions) { if (!actions.Any()) { return(Observable.ReturnUnit()); } var actionList = new List <IObservable <Unit> >(); foreach (var action in actions) { //攻撃 var attackAction = action as CharacterAttackAction; if (attackAction != null) { actionList.Add(AttackAnimation(attackAction)); continue; } //拾う var pickupAction = action as CharacterItemPickupAction; if (pickupAction != null) { actionList.Add(Observable.Defer(() => { if (pickupAction.IsSuccess) { StaticData.Message.ShowMessage(string.Format("{0}を拾った", pickupAction.TargetItem.Name), false); } else { StaticData.Message.ShowMessage(string.Format("持ち物がいっぱいで{0}を拾えなかった", pickupAction.TargetItem.Name), false); } return(Observable.ReturnUnit()); })); continue; } //置く var putAction = action as CharacterItemPutAction; if (putAction != null) { actionList.Add(Observable.Defer(() => { if (putAction.IsSuccess) { StaticData.Message.ShowMessage(string.Format("{0}を置いた", putAction.TargetItem.Name), false); } else { StaticData.Message.ShowMessage(string.Format("ここには置けない"), false); } return(Observable.ReturnUnit()); })); continue; } //投げる var throwAction = action as CharacterThrowAction; if (throwAction != null) { actionList.Add(ThrowAnimation(throwAction)); continue; } //飲む var drinkAction = action as CharacterDrinkPotionAction; if (drinkAction != null) { actionList.Add(Observable.Defer(() => { StaticData.Message.ShowMessage(string.Format("{0}を飲んだ", drinkAction.TargetItem.Name), false); return(Observable.ReturnUnit()); }) .SelectMany(ActionAnimations(drinkAction.SubActions.ToArray())) .Last()); continue; } //メッセージ var messageAction = action as MessageAction; if (messageAction != null) { actionList.Add(Observable.Defer(() => { StaticData.Message.ShowMessage(messageAction.Message, messageAction.IsWait); return(Observable.ReturnUnit()); })); continue; } //該当なし actionList.Add(Observable.ReturnUnit()); } return(actionList.Concat().Last()); }
public override IObservable <Mat> Process(IObservable <Mat> source) { return(Observable.Defer(() => { int rows = 0; double[] data = null; double[] feedforwardCoefficients = null; double[] feedbackCoefficients = null; double[] dataWeights = null; double[] dataMemory = null; double[] outputWeights = null; double[] outputMemory = null; return source.Select(input => { if (FeedforwardCoefficients != feedforwardCoefficients || FeedbackCoefficients != feedbackCoefficients || rows != input.Rows || data != null && data.Length != rows * input.Cols) { rows = input.Rows; feedforwardCoefficients = FeedforwardCoefficients; feedbackCoefficients = FeedbackCoefficients; dataWeights = InitializeWeights(feedforwardCoefficients); outputWeights = InitializeWeights(feedbackCoefficients); for (int i = 0; i < outputWeights.Length - 1; i++) { outputWeights[i] = -outputWeights[i]; } if (dataWeights != IdentityWeight || outputWeights != IdentityWeight) { data = new double[rows * input.Cols]; dataMemory = new double[rows * (dataWeights.Length - 1)]; outputMemory = new double[rows * (outputWeights.Length - 1)]; } } if (dataWeights == IdentityWeight && outputWeights == IdentityWeight) { return input; } else { var dataHandle = GCHandle.Alloc(data, GCHandleType.Pinned); try { var output = new Mat(input.Size, input.Depth, input.Channels); using (var dataHeader = new Mat(input.Size, Depth.F64, 1, dataHandle.AddrOfPinnedObject())) { CV.Convert(input, dataHeader); ProcessData(rows, data, dataWeights, dataMemory, outputWeights, outputMemory); CV.Convert(dataHeader, output); } return output; } finally { dataHandle.Free(); } } }); })); }
public IObservable <Unit> Load() { return(Observable.Defer(() => Observable.Start(() => Settings.Default.Reload()))); }
public static IObservable <T> WhenApplication <T>(this ApplicationModulesManager manager, Func <XafApplication, IObservable <T> > retriedExecution) => manager.WhereApplication().ToObservable(ImmediateScheduler.Instance) .SelectMany(application => Observable.Defer(() => retriedExecution(application)).Retry(application));
public IObservable <Unit> Save() { return(Observable.Defer(() => Observable.Start(() => Settings.Default.Save()))); }
public override IObservable <Vector2> Generate() { return(Observable.Defer(() => Observable.Return(new Vector2(X, Y)))); }
public override IObservable <TSource> Process <TSource>(IObservable <TSource> source) { return(Observable.Defer(() => { var fbo = 0; var faceSize = 0; var depthRenderbuffer = 0; var colorTarget = default(Texture); var name = TextureName; if (string.IsNullOrEmpty(name)) { throw new InvalidOperationException("A texture name must be specified."); } return source.CombineEither( ShaderManager.WindowSource.Do(window => { window.Update(() => { GL.GenFramebuffers(1, out fbo); GL.GenRenderbuffers(1, out depthRenderbuffer); colorTarget = window.ResourceManager.Load <Texture>(name); GL.BindTexture(TextureTarget.TextureCubeMap, colorTarget.Id); GL.GetTexLevelParameter(TextureTarget, 0, GetTextureParameter.TextureWidth, out faceSize); GL.BindFramebuffer(FramebufferTarget.Framebuffer, fbo); GL.BindRenderbuffer(RenderbufferTarget.Renderbuffer, depthRenderbuffer); GL.RenderbufferStorage(RenderbufferTarget.Renderbuffer, RenderbufferStorage.DepthComponent, faceSize, faceSize); GL.FramebufferRenderbuffer(FramebufferTarget.Framebuffer, FramebufferAttachment.DepthAttachment, RenderbufferTarget.Renderbuffer, depthRenderbuffer); GL.FramebufferTexture2D(FramebufferTarget.Framebuffer, FramebufferAttachment.ColorAttachment0, TextureTarget, colorTarget.Id, 0); GL.BindTexture(TextureTarget.TextureCubeMap, 0); GL.BindRenderbuffer(RenderbufferTarget.Renderbuffer, 0); GL.BindFramebuffer(FramebufferTarget.Framebuffer, 0); }); }), (input, window) => { foreach (var state in renderState) { state.Execute(window); } var clearMask = ClearMask; GL.BindFramebuffer(FramebufferTarget.Framebuffer, fbo); window.UpdateViewport(faceSize, faceSize); window.UpdateScissor(faceSize, faceSize); if (clearMask != ClearBufferMask.None) { GL.ClearColor(ClearColor); GL.Clear(clearMask); } foreach (var shader in window.Shaders) { shader.Dispatch(); } window.UpdateViewport(); window.UpdateScissor(); GL.BindFramebuffer(FramebufferTarget.Framebuffer, 0); return input; }).Finally(() => { if (fbo > 0) { GL.DeleteFramebuffers(1, ref fbo); GL.DeleteRenderbuffers(1, ref depthRenderbuffer); } }); })); }
/// <summary> /// Initializes a new instance of the <see cref="ObservableSocket"/> class. /// </summary> /// <param name="tcpClient">The TCP client to wrap.</param> protected ObservableSocket(TcpClient tcpClient) { Log = LogProvider.GetLogger(GetType()); id = Interlocked.Increment(ref IdCounter); this.tcpClient = tcpClient; subject = new Subject <byte[]>(); receiver = Observable.Defer( () => { if (isStarted.EnsureCalledOnce()) { return(subject.AsObservable()); } Task.Run( async() => { SafeLog(LogLevel.Trace, () => "{0} Worker Thread {1} started".Fmt(GetType(), id)); int bytesRead = 1; var stream = tcpClient.GetStream(); byte[] buffer = SharedPools.ByteArray.Allocate(); try { while (bytesRead > 0) { bytesRead = await stream.ReadAsync(buffer, 0, buffer.Length, readCancellationToken.Token); if (bytesRead > 0) { if (bytesRead == buffer.Length) { subject.OnNext(buffer); } else { subject.OnNext(buffer.Take(bytesRead).ToArray()); } } else { subject.OnCompleted(); } } } catch (ObjectDisposedException) { //expected - normal shutdown subject.OnCompleted(); } catch (TaskCanceledException) { //expected - normal shutdown subject.OnCompleted(); } catch (IOException ex) { if (ex.InnerException is ObjectDisposedException) { //expected - normal shutdown subject.OnCompleted(); } else { //socket comms interrupted - propogate the error up the layers SafeLog(LogLevel.Error, () => "IO Error reading from stream", ex); subject.OnError(ex); } } catch (SocketException ex) { //socket comms interrupted - propogate the error up the layers SafeLog(LogLevel.Error, () => "Socket Error reading from stream", ex); subject.OnError(ex); } catch (Exception ex) { //unexpected error SafeLog(LogLevel.Error, () => "Unexpected Error reading from stream", ex); subject.OnError(ex); } finally { SharedPools.ByteArray.Free(buffer); SafeLog(LogLevel.Trace, () => "{0} Worker Thread {1} completed".Fmt(GetType(), id)); Dispose(); } }); return(subject.AsObservable()); }); }
public override IObservable <Matrix4> Generate() { return(Observable.Defer(() => Observable.Return(Matrix4.CreateOrthographic(Width, Height, NearClip, FarClip)))); }
public override IObservable <Mat> Process(IObservable <Mat> source) { return(Observable.Defer(() => { Mat kernel = null; Mat overlap = null; Mat overlapInput = null; Mat overlapEnd = null; Mat overlapStart = null; Mat overlapFilter = null; Rect overlapOutput = default(Rect); float[] currentKernel = null; return source.Select(input => { if (Kernel != currentKernel || currentKernel != null && (input.Rows != overlapOutput.Height || input.Cols != overlapOutput.Width)) { currentKernel = Kernel; if (currentKernel == null || currentKernel.Length == 0) { kernel = null; } else { kernel = new Mat(1, currentKernel.Length, Depth.F32, 1); Marshal.Copy(currentKernel, 0, kernel.Data, currentKernel.Length); var anchor = Anchor; if (anchor == -1) { anchor = kernel.Cols / 2; } overlap = new Mat(input.Rows, input.Cols + kernel.Cols - 1, input.Depth, input.Channels); overlapInput = overlap.GetSubRect(new Rect(kernel.Cols - 1, 0, input.Cols, input.Rows)); overlapFilter = new Mat(overlap.Rows, overlap.Cols, overlap.Depth, overlap.Channels); if (kernel.Cols > 1) { overlapEnd = overlap.GetSubRect(new Rect(overlap.Cols - kernel.Cols + 1, 0, kernel.Cols - 1, input.Rows)); overlapStart = overlap.GetSubRect(new Rect(0, 0, kernel.Cols - 1, input.Rows)); } overlapOutput = new Rect(anchor, 0, input.Cols, input.Rows); CV.CopyMakeBorder(input, overlap, new Point(kernel.Cols - 1, 0), IplBorder.Reflect); } } if (kernel == null) { return input; } else { CV.Copy(input, overlapInput); CV.Filter2D(overlap, overlapFilter, kernel, new Point(Anchor, -1)); if (overlapEnd != null) { CV.Copy(overlapEnd, overlapStart); } return overlapFilter.GetSubRect(overlapOutput).Clone(); } }); })); }
public async Task LaunchBuildServerInfoFetchOperationAsync() { await TaskScheduler.Default; CancelBuildStatusFetchOperation(); var launchToken = _launchCancellation.Next(); var buildServerAdapter = await GetBuildServerAdapterAsync().ConfigureAwait(false); await _revisionGridView.SwitchToMainThreadAsync(launchToken); _buildServerAdapter?.Dispose(); _buildServerAdapter = buildServerAdapter; await TaskScheduler.Default; if (buildServerAdapter is null || launchToken.IsCancellationRequested) { return; } var scheduler = NewThreadScheduler.Default; // Run this first as it (may) force start queries var runningBuildsObservable = buildServerAdapter.GetRunningBuilds(scheduler); var fullDayObservable = buildServerAdapter.GetFinishedBuildsSince(scheduler, DateTime.Today - TimeSpan.FromDays(3)); var fullObservable = buildServerAdapter.GetFinishedBuildsSince(scheduler); bool anyRunningBuilds = false; var delayObservable = Observable.Defer(() => Observable.Empty <BuildInfo>() .DelaySubscription(anyRunningBuilds ? ShortPollInterval : LongPollInterval)); var shouldLookForNewlyFinishedBuilds = false; DateTime nowFrozen = DateTime.Now; // All finished builds have already been retrieved, // so looking for new finished builds make sense only if running builds have been found previously var fromNowObservable = Observable.If(() => shouldLookForNewlyFinishedBuilds, buildServerAdapter.GetFinishedBuildsSince(scheduler, nowFrozen) .Finally(() => shouldLookForNewlyFinishedBuilds = false)); var cancellationToken = new CompositeDisposable { fullDayObservable.OnErrorResumeNext(fullObservable) .OnErrorResumeNext(Observable.Empty <BuildInfo>() .DelaySubscription(LongPollInterval) .OnErrorResumeNext(fromNowObservable) .Retry() .Repeat()) .ObserveOn(MainThreadScheduler.Instance) .Subscribe(OnBuildInfoUpdate), runningBuildsObservable.Do(buildInfo => { anyRunningBuilds = true; shouldLookForNewlyFinishedBuilds = true; }) .OnErrorResumeNext(delayObservable) .Finally(() => anyRunningBuilds = false) .Retry() .Repeat() .ObserveOn(MainThreadScheduler.Instance) .Subscribe(OnBuildInfoUpdate) }; await _revisionGridView.SwitchToMainThreadAsync(launchToken); CancelBuildStatusFetchOperation(); _buildStatusCancellationToken = cancellationToken; }
public static IObservable <Unit> OnMessage(this Socket clientSocket) { return(Observable.Defer(() => new MessagePump(clientSocket))); }
/// <summary> /// Creates an observable sequence by reading lines to the end of the specified <paramref name="reader"/> each time /// the <paramref name="textAvailable"/> sequence notifies that additional text is available to be read /// and advances the position within the reader to the end of the stream. /// </summary> /// <typeparam name="TOther">The type of elements in the sequence that notifies when text is available to be read.</typeparam> /// <param name="reader">The object from which lines are read as they become available.</param> /// <param name="textAvailable">An observable sequence that notifies when additional text is available to be read.</param> /// <param name="scheduler">An object used to schedule reads.</param> /// <remarks> /// <para> /// The <paramref name="textAvailable"/> sequence does not have to notify when new lines are available. It only must notify when /// new text is available, which may or may not have new lines. Characters that are read up to the end of the stream are automatically /// buffered until a new line sequence is encountered in a subsequent read. A consequence of this behavior is that if the stream does /// not end with a new line sequence and it's not going to receive any more text, then the last line will not be read until /// <paramref name="textAvailable"/> calls <strong>OnCompleted</strong>. /// </para> /// <para> /// The generated sequence is intended to match the underlying stream; however, this behavior /// depends on whether the reader is well-behaved and whether the reader is not being shared. Reading always starts from the /// current position of the reader in the underlying stream. The reader is expected to increment its position in the stream /// when it's read. Each time that the <paramref name="textAvailable"/> sequence notifies that additional text is available, /// reading is expected to begin at the previous position in the stream, but if the reader is shared or it's not well-behaved, /// then the generated sequence may contain unexpected data. /// </para> /// </remarks> /// <returns>An observable sequence of lines read from the specified <paramref name="reader"/>.</returns> public static IObservable <string> ToObservableLines <TOther>(this TextReader reader, IObservable <TOther> textAvailable, IScheduler scheduler) { Contract.Requires(reader != null); Contract.Requires(textAvailable != null); Contract.Requires(scheduler != null); Contract.Ensures(Contract.Result <IObservable <string> >() != null); var buffer = new char[1024]; var remainder = new StringBuilder(1024); var lines = new Queue <Tuple <int, int> >(); var ignoreLeadingNewLine = false; return(textAvailable.Consume( _ => { if (lines.Count > 0) { var line = lines.Dequeue(); return Maybe.Return(new string(buffer, line.Item1, line.Item2)); } int read = reader.Read(buffer, 0, buffer.Length); int nextLineStart = 0; if (read > 0 && ignoreLeadingNewLine) { if (buffer[0] == '\n') { nextLineStart = 1; } ignoreLeadingNewLine = false; } for (int i = nextLineStart; i < read; i++) { var c = buffer[i]; switch (c) { case '\r': case '\n': lines.Enqueue(Tuple.Create(nextLineStart, i - nextLineStart)); if (c == '\r') { if (i + 1 < read) { if (buffer[i + 1] == '\n') { i++; } } else { ignoreLeadingNewLine = true; } } nextLineStart = i + 1; break; } } Maybe <string> value; if (lines.Count > 0) { var line = lines.Dequeue(); if (remainder.Length > 0) { remainder.Append(buffer, line.Item1, line.Item2); value = Maybe.Return(remainder.ToString()); remainder.Length = 0; } else { value = Maybe.Return(new string(buffer, line.Item1, line.Item2)); } } else { value = Maybe.Empty <string>(); } if (nextLineStart < read) { remainder.Append(buffer, nextLineStart, read - nextLineStart); } return value; }, scheduler) .Concat(Observable.Defer(() => remainder.Length > 0 ? Observable.Return(remainder.ToString(), scheduler) : Observable.Empty <string>(scheduler)))); }
public override IObservable <Categorical> Generate() { return(Observable.Defer(() => Observable.Return(new Categorical(ProbabilityMass)))); }
public override IObservable <Extrinsics> Generate() { return(Observable.Defer(() => Observable.Return(Create()))); }
public override IObservable <KeyPointOpticalFlow> Process(IObservable <Tuple <KeyPointCollection, IplImage> > source) { return(Observable.Defer(() => { IplImage previousImage = null; IplImage previousPyramid = null; IplImage currentPyramid = null; return source.Select(input => { var previous = input.Item1; var currentImage = input.Item2; var currentKeyPoints = new KeyPointCollection(currentImage); if (previous.Count == 0) { return new KeyPointOpticalFlow(previous, currentKeyPoints); } if (currentPyramid == null || currentPyramid.Size != currentImage.Size) { previousImage = null; previousPyramid = new IplImage(currentImage.Size, currentImage.Depth, currentImage.Channels); currentPyramid = new IplImage(currentImage.Size, currentImage.Depth, currentImage.Channels); } var maxIterations = MaxIterations; var epsilon = Epsilon; var terminationType = TermCriteriaType.None; if (maxIterations > 0) { terminationType |= TermCriteriaType.MaxIter; } if (epsilon > 0) { terminationType |= TermCriteriaType.Epsilon; } var termCriteria = new TermCriteria(terminationType, maxIterations, epsilon); var flags = previousImage == previous.Image ? LKFlowFlags.PyrAReady : LKFlowFlags.None; var previousFeatures = new Point2f[previous.Count]; for (int i = 0; i < previousFeatures.Length; i++) { previousFeatures[i] = previous[i]; } var currentFeatures = new Point2f[previousFeatures.Length]; var status = new byte[previousFeatures.Length]; var trackError = new float[previousFeatures.Length]; CV.CalcOpticalFlowPyrLK( previous.Image, currentImage, previousPyramid, currentPyramid, previousFeatures, currentFeatures, WindowSize, Level, status, trackError, termCriteria, flags); var previousKeyPoints = new KeyPointCollection(previous.Image); for (int i = 0; i < status.Length; i++) { if (status[i] == 0 || trackError[i] > MaxError || currentFeatures[i].X <0 || currentFeatures[i].Y <0 || currentFeatures[i].X> currentImage.Width - 1 || currentFeatures[i].Y> currentImage.Height - 1) { continue; } previousKeyPoints.Add(previousFeatures[i]); currentKeyPoints.Add(currentFeatures[i]); } var temp = currentPyramid; currentPyramid = previousPyramid; previousPyramid = temp; previousImage = currentImage; return new KeyPointOpticalFlow(previousKeyPoints, currentKeyPoints); }); })); }
/// <summary> /// Publishes a gist to GitHub. /// </summary> /// <param name="apiClient">The client to use to post to GitHub.</param> /// <param name="gist">The new gist to post.</param> /// <returns>The created gist.</returns> public IObservable <Gist> PublishGist(IApiClient apiClient, NewGist gist) { return(Observable.Defer(() => apiClient.CreateGist(gist))); }
public static IObservable <T> ToObservable <T>(this IEnumeratorAsync <T> enumeratorAsync) { var nextItem = Observable.Defer(() => enumeratorAsync.MoveNextAsync().ToObservable()); return(nextItem.Repeat().TakeUntil(b => !b).Select(b => enumeratorAsync.Current)); }
public IObservable <ResourceEvent <TResource> > GetResource(ResourceStreamType type) { var childScheduler = new EventLoopScheduler(); // dedicated thread for the child on which all messages are syncrhonized return(Observable.Defer(async() => { AddSubscriber(); _logger.LogTrace("Subscriber awaiting cache synchronization before attaching"); var isCacheSynchronized = await _cacheSynchronized.Task; if (!isCacheSynchronized) // really this only happens if the reset is the master completes before first reset, in which case the downstream subscriber gets nothing { return Observable.Empty <ResourceEvent <TResource> >(); } // we use lock to pause any processing of the broadcaster while we're attaching to the stream so proper alignment can be made _logger.LogTrace("Subscriber attaching to broadcaster"); return Observable.Create <ResourceEvent <TResource> >(observer => { var broadcasterAttachment = Disposable.Empty; var cacheSnapshot = _cache.Snapshot(); if (type.HasFlag(ResourceStreamType.List)) { _logger.LogTrace($"Flushing contents of cache version {cacheSnapshot.Version}"); _cache.Values .ToReset(type == ResourceStreamType.ListWatch) .ToObservable() .Concat(Observable.Never <ResourceEvent <TResource> >()) .ObserveOn(Scheduler.Immediate) .Subscribe(observer); } if (type.HasFlag(ResourceStreamType.Watch)) { broadcasterAttachment = _masterObservable // we could be ahead of broadcaster because we initialized from cache which gets updated before the message are sent to broadcaster // this logic realigns us at the correct point with the broadcaster .Do(x => _logger.LogTrace($"Received from broadcaster {x}")) .SkipWhile(x => x.MessageNumber <= cacheSnapshot.Version) .Select(x => x.Value) .Do(x => _logger.LogTrace($"Aligned with broadcaster {x}")) .SubscribeOn(_masterScheduler) .ObserveOn(childScheduler) .Subscribe(observer, () => { _logger.LogTrace("Child OnComplete"); RemoveSubscriber(); }); } else { observer.OnCompleted(); } // let broadcaster know we're done attaching to stream so it can resume it's regular work _logger.LogTrace("Finished attaching to stream - signalling to resume"); lock (_lock) { _waitingSubscribers.Signal(); } return broadcasterAttachment; }) .ObserveOn(childScheduler) .SubscribeOn(childScheduler); }) .SubscribeOn(childScheduler) // ensures that when we attach master observer it's done on child thread, as we plan on awaiting cache synchronization .Do(_ => _logger.LogTrace($"Shared informer out: {_}"))); }
public override IObservable <Pose> Process(IObservable <IplImage> source) { return(Observable.Defer(() => { TFSessionOptions options = new TFSessionOptions(); unsafe { byte[] GPUConfig = new byte[] { 0x32, 0x02, 0x20, 0x01 }; fixed(void *ptr = &GPUConfig[0]) { options.SetConfig(new IntPtr(ptr), GPUConfig.Length); } } var graph = new TFGraph(); var session = new TFSession(graph, options, null); var bytes = File.ReadAllBytes(ModelFileName); graph.Import(bytes); TFTensor tensor = null; var config = ConfigHelper.PoseConfig(PoseConfigFileName); return source.Select(input => { if (tensor == null || tensor.GetTensorDimension(1) != input.Height || tensor.GetTensorDimension(2) != input.Width) { tensor = new TFTensor( TFDataType.Float, new long[] { 1, input.Height, input.Width, 3 }, input.WidthStep * input.Height * 4); } using (var image = new IplImage(input.Size, IplDepth.F32, 3, tensor.Data)) { CV.Convert(input, image); } var runner = session.GetRunner(); runner.AddInput(graph["Placeholder"][0], tensor); runner.Fetch(graph["concat_1"][0]); // Run the model var output = runner.Run(); // Fetch the results from output: var poseTensor = output[0]; var pose = new Mat((int)poseTensor.Shape[0], (int)poseTensor.Shape[1], Depth.F32, 1, poseTensor.Data); var result = new Pose(input); var threshold = MinConfidence; for (int i = 0; i < pose.Rows; i++) { BodyPart bodyPart; bodyPart.Name = config[i]; bodyPart.Confidence = (float)pose.GetReal(i, 2); if (bodyPart.Confidence < threshold) { bodyPart.Position = new Point2f(float.NaN, float.NaN); } else { bodyPart.Position.X = (float)pose.GetReal(i, 1); bodyPart.Position.Y = (float)pose.GetReal(i, 0); } result.Add(bodyPart); } return result; }); })); }
public override IObservable <Matrix4> Generate() { return(Observable.Defer(() => Observable.Return(Matrix4.CreateRotationZ(Angle)))); }
public static IObservable <T> StartWith <T>(this IObservable <T> source, Func <T> value, IScheduler scheduler) => Observable.Defer(() => Observable.Start(value, scheduler).Concat(source));
private static IObservable <KeyedOperation> ProcessOperation(KeyedOperation operation) { return(Observable.Defer(operation.EvaluateFunc) .Select(_ => operation) .Catch(Observable.Return(operation))); }