public SubSink(ISink sink, long offset, long size) { this.m_sink = sink; this.m_offset = offset; this.Size = size; this.m_status = new SinkStatus(); }
public void SmokeTest() { var factory = new NullSinkFactory <int>(); ISink <int> sink = factory.CreateAsync("hub").Result; Assert.IsType <NullSink <int> >(sink); }
public override void OnSinkConnected(ISink s) { if (this.Data.Containers.Keys.Count == 0) { if (s is SingleResourceModuleGameplay) { this.Data.Containers.Add((s as SingleResourceModuleGameplay).Data.Container.MatterType, new ResourceContainer() { MatterType = (s as SingleResourceModuleGameplay).Data.Container.MatterType, TotalCapacity = 1f }); } RefreshValveTags(); } else { if (s.HasContainerFor(this.SplitterMatterType) && s is ModuleGameplay) { if (OutputOne == null) { OutputOne = s as ModuleGameplay; } else if (OutputTwo == null) { OutputTwo = s as ModuleGameplay; } } } }
public void AddSink(ISink sink, Matter type) { if (sink.HasContainerFor(type)) { AddInput(sink.Get(type)); } }
public void AddSink(ISink sink) { if (sink != null) { logSinks.Add(sink); } }
/// <summary> /// Signals the given item to the observer in a serialized fashion /// allowing a concurrent OnError or OnCompleted emission to be delayed until /// the observer.OnNext returns. /// Do not call OnNext from multiple threads as it may lead to ignored items. /// Use a full SerializedObserver wrapper for merging multiple sequences. /// </summary> /// <typeparam name="T">The element type of the observer.</typeparam> /// <param name="sink">The observer to signal events in a serialized fashion.</param> /// <param name="item">The item to signal.</param> /// <param name="wip">Indicates there is an emission going on currently.</param> /// <param name="error">The field containing an error or terminal indicator.</param> public static void ForwardOnNext <T>(ISink <T> sink, T item, ref int wip, ref Exception?error) { if (Interlocked.CompareExchange(ref wip, 1, 0) == 0) { sink.ForwardOnNext(item); if (Interlocked.Decrement(ref wip) != 0) { var ex = error !; // NB: A concurrent OnError or OnCompleted will either set Terminated or the original exception, so never null here. if (ex != ExceptionHelper.Terminated) { error = ExceptionHelper.Terminated; sink.ForwardOnError(ex); } else { sink.ForwardOnCompleted(); } } } #if !NO_TRACE else if (error == null) { Trace.TraceWarning("OnNext called while another OnNext call was in progress on the same Observer."); } #endif }
/// <summary> /// Signals the given item to the observer in a serialized fashion /// allowing a concurrent OnError or OnCompleted emission to be delayed until /// the observer.OnNext returns. /// Do not call OnNext from multiple threads as it may lead to ignored items. /// Use a full SerializedObserver wrapper for merging multiple sequences. /// </summary> /// <typeparam name="T">The element type of the observer.</typeparam> /// <param name="sink">The observer to signal events in a serialized fashion.</param> /// <param name="item">The item to signal.</param> /// <param name="wip">Indicates there is an emission going on currently.</param> /// <param name="error">The field containing an error or terminal indicator.</param> public static void ForwardOnNext <T>(ISink <T> sink, T item, ref int wip, ref Exception error) { if (Interlocked.CompareExchange(ref wip, 1, 0) == 0) { sink.ForwardOnNext(item); if (Interlocked.Decrement(ref wip) != 0) { var ex = error; if (ex != ExceptionHelper.Terminated) { error = ExceptionHelper.Terminated; sink.ForwardOnError(ex); } else { sink.ForwardOnCompleted(); } } } #if (HAS_TRACE) else if (error == null) { Trace.TraceWarning("OnNext called while another OnNext call was in progress on the same Observer."); } #endif }
public void Parse(string filename, ISink sink) { using (var stream = File.OpenRead(filename)) { Scanner = new PoScanner(stream, "ISO-8859-1"); var detector = new EncodingDetectorSink(); Sink = detector; try { Parse(); } catch (EncodingDetectorSink.DetectionTerminatedException) { // do nothing. } var charset = detector.Charset ?? "UTF-8"; stream.Seek(0, SeekOrigin.Begin); Scanner = new PoScanner(stream, charset) { SourceFileName = filename }; Sink = sink; Parse(); } }
void INotifyRegistry.RegisterSender(string senderName, ISink senderSink) { lock (channels) { channels[senderName] = new SenderChannel(this, senderName, null, senderSink); } }
public override void OnSinkConnected(ISink s) { if (s.HasContainerFor(RedHomestead.Simulation.Matter.CarbonDioxide)) { co2Out = s.Get(RedHomestead.Simulation.Matter.CarbonDioxide); } }
public static ISink GetSinkModel(string ModelID) { ISink NewModel = null; switch (ModelID) { case "SmallLakesSink": NewModel = new SmallLakesSink(); break; case "StreamSink": NewModel = new StreamSink(); break; case "LakeSink": NewModel = new LakeSink(); break; case "ConstructedWetland": NewModel = new ConstructedWetlandSink(); break; case "ConceptualSourceReducer": NewModel = new ConceptualSourceReducer(); break; } return(NewModel); }
private async Task WriteWorkItemDetails(WorkItem workitem, ISink sink) { await sink.Write( new string[] { workitem.Id.ToString(), GetField(workitem, TeamProjectFieldName), GetField(workitem, TitleFieldName), GetField(workitem, AreaPathFieldName), GetField(workitem, IterationPathFieldName), GetField(workitem, WorkItemTypeFieldName), GetField(workitem, StateFieldName), GetField(workitem, ReasonFieldName), GetField(workitem, AssignedToFieldName), GetField(workitem, CreatedDateFieldName), GetField(workitem, CreatedByFieldName), GetField(workitem, ChangedDateFieldName), GetField(workitem, ChangedByFieldName), GetField(workitem, ResolvedDateFieldName), GetField(workitem, ResolvedByFieldName), GetField(workitem, ClosedDateFieldName), GetField(workitem, ClosedByFieldName), GetField(workitem, PriorityFieldName), GetField(workitem, SeverityFieldName), }); }
private async Task InitialiseSink(ISink sink) { await sink.InitialiseSink( new string[] { "Id", "Project", "Title", "AreaPath", "IterationPath", "WorkItemType", "State", "Reason", "AssignedTo", "CreatedDate", "CreatedBy", "ChangedDate", "ChangedBy", "ResolvedDate", "ResolvedBy", "ClosedDate", "ClosedBy", "Priority", "Severity" }, "Id"); }
public Scan(ISink <T> sink, Func <T, T, T> reducer, T seed) { _reducer = reducer; _accumulator = seed; Sink = sink; _accumulatorInitialized = true; }
public ProdEncryptedXciArchive(ISink outSink, ISource xciSource, KeyConfiguration keyConfig) { this.ConnectionList = new List <Connection>(); this.m_rng = new RNGCryptoServiceProvider(); this.m_keyConfig = keyConfig; this.SetCryptor(this.m_keyConfig); byte[] keyArea = new byte[(int)XciInfo.PageSize * (int)XciInfo.CardKeyAreaPageCount]; ByteData byteData = xciSource.PullData(0L, (int)XciInfo.PageSize * (int)XciInfo.CardKeyAreaPageCount); ArraySegment <byte> buffer1 = byteData.Buffer; byte[] array = buffer1.Array; buffer1 = byteData.Buffer; int offset = buffer1.Offset; byte[] numArray = keyArea; int dstOffset = 0; buffer1 = byteData.Buffer; int count = buffer1.Count; Buffer.BlockCopy((Array)array, offset, (Array)numArray, dstOffset, count); byte[] buffer2 = this.EncryptKeyArea(keyArea); MemorySource memorySource = new MemorySource(buffer2, 0, buffer2.Length); AdaptedSource adaptedSource = new AdaptedSource(xciSource, (ISource)memorySource, 0L, memorySource.Size); outSink.SetSize(adaptedSource.Size); this.ConnectionList.Add(new Connection((ISource)adaptedSource, outSink)); }
static OtelLogging() { ISink sink = null; try { var logDirectory = GetLogDirectory(); if (logDirectory != null) { var fileName = GetLogFileName(); var logPath = Path.Combine(logDirectory, fileName); sink = new FileSink(logPath); } } catch (Exception) { // unable to configure logging to a file } if (sink == null) { sink = new NoopSink(); } Logger = new Logger(sink); }
protected virtual DiffInfo LoadFromPackage(string path) { IProcessingContext context = new SimpleProcessingContext(); CommandInstallerContext.Setup(context, Path.GetFileNameWithoutExtension(path), UpgradeAction.Preview, Sitecore.Update.Utils.InstallMode.Install, null, new List <ContingencyEntry>()); ISource <PackageEntry> source = new PackageReader(path); ISink <PackageEntry> installer = DoCreateInstallerSink(context); var sorter = new PackageDumper(source); sorter.Initialize(context); sorter.Populate(installer); installer.Flush(); var commands = new List <ICommand>(sorter.Commands); var engine = new DataEngine { FilterCommands = false, OptimizeCommands = false }; engine.ProcessCommands(ref commands); var info = new DiffInfo(commands, string.Empty, string.Empty, "Generated by ConvertFromPackage command."); MetadataView view = UpdateHelper.LoadMetadata(path); if (view != null) { info.Readme = view.Readme; info.InstallMode = view.Comment; info.Title = view.PackageName; } ; return(info); }
protected BaseMetricsEventHandler(ISink metricSink) { Sinks = new List <ISink>(); Sinks.Add(metricSink); Aggregator = new RingbufferPerformanceMetricsAggregator(new DateService()); Aggregator.Reset(); }
public async Task WriteWorkItemDetails(ISink sink) { WorkItemQueryResult queryResult = await witClient.QueryByWiqlAsync(new Wiql() { Query = $"SELECT {TitleFieldName}, " + $" {TeamProjectFieldName}, " + $" {AreaPathFieldName}, " + $" {IterationPathFieldName}, " + $" {WorkItemTypeFieldName}, " + $" {StateFieldName}, " + $" {ReasonFieldName}, " + $" {AssignedToFieldName}, " + $" {CreatedDateFieldName}, " + $" {CreatedByFieldName}, " + $" {ChangedDateFieldName}, " + $" {ChangedByFieldName}, " + $" {ResolvedDateFieldName}, " + $" {ResolvedByFieldName}, " + $" {ClosedDateFieldName}, " + $" {ClosedByFieldName}, " + $" {PriorityFieldName}, " + $" {SeverityFieldName} " + "FROM workitems" }); await InitialiseSink(sink); foreach (WorkItemReference workItemReference in queryResult.WorkItems) { await WriteWorkItemDetails(witClient, workItemReference.Id, sink); } }
private async Task InitialiseSink(ISink sink) { await sink.InitialiseSink( new string[] { "Id", "BuildNumber", "Definition", "RequestedBy", "RequestedFor", "Repository", "QueueTime", "StartTime", "FinishTime", "SourceBranch", "SourceVersion", "Result", "TotalTests", "TestsPassed", "TestsIgnored", "CoveredLines", "TotalLines" }, "Id"); }
public override void OnSinkConnected(ISink s) { if (s.HasContainerFor(Matter.Oxygen)) { OxygenIn = s; this.SyncStatusSprites(); } }
public override void Write(ISink s, short msk = 0xff) { base.Write(s, msk); s.Put(nameof(id), id); s.Put(nameof(weburl), weburl); s.Put(nameof(secret), secret); }
private void AddPaddingConnection(ISink outSink, long offset, long size) { if (size == 0L) { return; } this.ConnectionList.Add(new Connection((ISource) new PaddingSource(size), (ISink) new SubSink(outSink, offset, size))); }
public void Write(ISink s, short msk = 0xff) { s.Put(nameof(seq), seq); s.Put(nameof(acct), acct); s.Put(nameof(name), name); s.Put(nameof(amt), amt); s.Put(nameof(bal), bal); }
// use the scene generator factory, because a specific SceneGenerator might be shared if given as argument public ControllerBase(IUniverse universe, ISceneTranslator sceneTranslator, ISceneGeneratorFactory sceneGeneratorFactory, ISink channelSink) { _channelSink = channelSink; Universe = universe; SceneTranslator = sceneTranslator; SceneGenerator = sceneGeneratorFactory.CreateGenerator(Universe); SceneGenerator.SceneChanged += SceneChanged; SceneGenerator.Signal(StartupSignal.Value); }
public RecordPipeline( Func <UnparsedRecord, bool> routingPredicate, IPropagatorBlock <UnparsedRecord, Validity <ParsedRecord> > pipeline, ISink <ParsedRecord> validRecordSink) { RoutingPredicate = routingPredicate; Pipeline = pipeline; ValidRecordSink = validRecordSink; }
public LogGenerator(ITokenExtractor extractor, ITokenTransformer transformer, ISink sink, ILogger <LogGenerator> logger) { this.extractor = extractor; this.transformer = transformer; this.sink = sink; this.logger = logger; this.random = new Random(); this.randomLock = new Object(); }
public DataflowBuilderConfiguration( IFileParser fileParser, List <RecordPipeline> recordPipelines, ISink <InvalidRecord> invalidRecordSink) { FileParser = fileParser; RecordPipelines = recordPipelines; InvalidRecordSink = invalidRecordSink; }
public Interval(ISink <int> sink, double period) { _sink = sink; _timer = new Timer { Interval = period }; _timer.Elapsed += Next; _timer.Enabled = true; }
public DbDataFlow( ITap <TSource> tap, ITransformer <TSource, TTransformed> transformer, ISink <TTransformed> sink) { _tap = tap; _transformer = transformer; _sink = sink; }
private ISink AddSink(ISink firstSink, ISink addedSink) { if (firstSink == null) return addedSink; if (addedSink == null) return firstSink; var current = firstSink; while (current.NextSink != null) { current = current.NextSink; } current.NextSink = addedSink; return firstSink; }
public SenderChannel(Context context, string senderName, ISink decorateSink, ISink senderSink) { if (senderName == null) throw new ArgumentNullException("senderName"); if (context == null) throw new ArgumentNullException("context"); if (senderSink == null) throw new ApplicationException(string.Format("channel with tag {0} not created sender sink", senderName)); this.context = context; this.SenderName = senderName; this.firstSink = decorateSink; this.senderSink = senderSink; var dispatcherSink = new DispatchSink(SenderName, this.context.DispatchEngine); this.firstSink = AddSink(firstSink, dispatcherSink); }
public async Task JobEntry(string name, int interval, ISource source, IEnumerable<IIntermedia> intermedias, ISink sink, CancellationToken ct) { try { while (true) { //Read data set from source filter DataSet dataset = await source.Read(ct); if (dataset == null || !dataset.Any()) { //Skip current loop if no data read, or filter is in status "stopping" goto loop; } //Process data set by intermedia filters foreach (var intermedia in intermedias) { //dataset = await intermedia.Process(dataset, ct); dataset = await intermedia.Process(dataset, ct, DeviceId);//Chris Han add DeviceId to Json if (dataset == null || !dataset.Any()) { //Skip current loop if no data read, or filter is in status "stopping" goto loop; } } //Individually sink each data, to help down-stream Job in aggregating foreach (var data in dataset) { await sink.Write(data, ct); } loop: await Task.Delay(interval, ct); } } catch (OperationCanceledException) { Trace.TraceInformation(string.Format(CultureInfo.InvariantCulture, "Job {0}.{1} finished", DeviceId, name)); } catch (Exception ex) { Trace.TraceError(string.Format(CultureInfo.InvariantCulture, "Job {0}.{1} stopped due to exception {2}", DeviceId, name, ex.ToString())); } }
public void RegisterSender(string senderName, ISink senderSink) { context.SenderHolder.RegisterSender(new SenderChannel(context, senderName, null, senderSink)); }
public void register(Guid guid, ISink sink) { Console.WriteLine("TransportLayerCommunicator::register ADD to overlayRegistry"); overlayRegistry.Add(guid, sink); Console.Write("TransportLayerCommunicator::register overlayRegistry count = "); Console.WriteLine(overlayRegistry.Count); }