public void PsiStoreStreamReaderLargeStream() { var count = 10; var name = nameof(this.PsiStoreStreamReaderLargeStream); var size = 10240; var bytes = new byte[size]; using (var p = Pipeline.Create("write")) { var writeStore = PsiStore.Create(p, name, this.path); var seq = Generators.Sequence(p, 0, i => i + 1, count, TimeSpan.FromTicks(1)); var big = seq.Select(i => bytes.Select(_ => i).ToArray()); seq.Write("seq", writeStore); big.Write("big", writeStore, largeMessages: true); p.Run(); } // now replay the contents and verify we get something var index = new List <Func <IStreamReader, int[]> >(); // now read using the store stream reader using (var reader = new PsiStoreStreamReader(name, this.path)) { reader.OpenStreamIndex <int[]>("big", (ie, e) => index.Add(ie)); reader.ReadAll(ReplayDescriptor.ReplayAll); Assert.AreEqual(count, index.Count()); var probe = count / 2; var entry = index[probe]; var result = entry(reader); Assert.AreEqual(result.Sum(x => x), probe * size); } }
public void AudioBuffer_Persist() { byte[] rawBytes = new byte[] { 1, 2, 3, 4, 5, 6 }; var wf = WaveFormatEx.Create(WaveFormatTag.WAVE_FORMAT_APTX, 16000, 16, 2, 0, 16000); AudioBuffer buffer = new AudioBuffer(rawBytes, wf); AudioBuffer bresult = default(AudioBuffer); var p1 = Pipeline.Create(); var store = PsiStore.Create(p1, "audio", null); Generators.Return(p1, buffer).Write("audio", store); p1.RunAsync(); var p2 = Pipeline.Create(); var store2 = PsiStore.Open(p2, "audio", null); store2.OpenStream <AudioBuffer>("audio").Do(b => bresult = b); p2.RunAsync(); System.Threading.Thread.Sleep(100); p1.Dispose(); p2.Dispose(); Assert.AreEqual(6, bresult.Length); Assert.AreEqual(6, bresult.Data.Length); Assert.AreEqual(wf, bresult.Format); CollectionAssert.AreEqual(rawBytes, bresult.Data); }
private static void GenerateTestStore(string storeName, string storePath) { using var p = Pipeline.Create(); var store = PsiStore.Create(p, storeName, storePath); var root = Generators.Sequence(p, 0, i => i + 1, 10, TimeSpan.FromTicks(1)).Write("Root", store); p.Run(); }
public void PsiStoreStreamReader() { var count = 100; var before = new Envelope[count]; var after = new Envelope[count]; var name = nameof(this.PsiStoreStreamReader); using (var p = Pipeline.Create("write")) { var writeStore = PsiStore.Create(p, name, this.path); var seq = Generators.Sequence(p, 0, i => i + 1, count, TimeSpan.FromTicks(1)); seq.Write("seq", writeStore); seq.Do((m, e) => before[m] = e); p.Run(); } // now read using the store stream reader using (var reader = new PsiStoreStreamReader(name, this.path)) { reader.OpenStream <int>("seq", (s, e) => after[s] = e); reader.ReadAll(ReplayDescriptor.ReplayAll); } for (int i = 0; i < count; i++) { Assert.AreEqual(before[i], after[i]); } }
/// <summary> /// Builds and runs a webcam pipeline and records the data to a Psi store. /// </summary> /// <param name="pathToStore">The path to directory where store should be saved.</param> public static void RecordAudioVideo(string pathToStore) { // Create the pipeline object. using (Pipeline pipeline = Pipeline.Create()) { // Register an event handler to catch pipeline errors pipeline.PipelineExceptionNotHandled += Pipeline_PipelineException; // Register an event handler to be notified when the pipeline completes pipeline.PipelineCompleted += Pipeline_PipelineCompleted; // Create store var store = PsiStore.Create(pipeline, ApplicationName, pathToStore); // Create our webcam var webcam = new MediaCapture(pipeline, 1920, 1080, 30); // Create the AudioCapture component to capture audio from the default device in 16 kHz 1-channel IProducer <AudioBuffer> audioInput = new AudioCapture(pipeline, WaveFormat.Create16kHz1Channel16BitPcm()); var images = webcam.Out.EncodeJpeg(90, DeliveryPolicy.LatestMessage).Out; // Attach the webcam's image output to the store. We will write the images to the store as compressed JPEGs. images.Write("Image", store, true, DeliveryPolicy.LatestMessage); // Attach the audio input to the store audioInput.Out.Write("Audio", store, true, DeliveryPolicy.LatestMessage); // Run the pipeline pipeline.RunAsync(); Console.WriteLine("Press any key to finish recording"); Console.ReadKey(); } }
/// <summary> /// Method that generates a store for cross-serialization tests. /// </summary> /// <remarks> /// This method will be invoked in a separate process by the <see cref="CrossFrameworkDeserialize"/> /// test method to generate a store to test deserialization across different .NET frameworks. /// </remarks> public void CrossFrameworkSerialize() { int intValue = 0x7777AAA; byte byteValue = 0xBB; bool boolValue = true; short shortValue = 0x7CDD; long longValue = 0x77777777EEEEEEEE; char charValue = 'G'; string stringValue = "This is a test."; double doubleValue = Math.PI; float floatValue = -1.234f; float[] floatArray = new[] { 0.1f, 2.3f }; List <string> stringList = new List <string> { "one", "two" }; ArraySegment <string> stringArraySegment = new ArraySegment <string>(new[] { "aaa", "bbb", "ccc" }, 1, 2); Queue <TimeSpan> queue = new Queue <TimeSpan>(new[] { TimeSpan.Zero, TimeSpan.FromSeconds(1) }); EqualityComparer <int> intComparer = EqualityComparer <int> .Default; Tuple <long, string> tuple = Tuple.Create(0x77777777EEEEEEEE, "This is a tuple."); (DateTime, Stack <int>)valueTuple = (new DateTime(2020, 1, 2), new Stack <int>(new[] { 33, 782 })); Array intArray = new[] { 0, 3 }; ICollection stringArray = new[] { "three", "four" }; IEqualityComparer enumComparer = EqualityComparer <DayOfWeek> .Default; Dictionary <string, int> dictionary = new Dictionary <string, int> { { "one", 1 }, { "two", 2 } }; using (var p = Pipeline.Create()) { var store = PsiStore.Create(p, "Store1", this.testPath); Generators.Return(p, intValue).Write("int", store); Generators.Return(p, byteValue).Write("byte", store); Generators.Return(p, boolValue).Write("bool", store); Generators.Return(p, shortValue).Write("short", store); Generators.Return(p, longValue).Write("long", store); Generators.Return(p, charValue).Write("char", store); Generators.Return(p, stringValue).Write("string", store); Generators.Return(p, doubleValue).Write("double", store); Generators.Return(p, floatValue).Write("float", store); Generators.Return(p, floatArray).Write("floatArray", store); Generators.Return(p, stringList).Write("stringList", store); Generators.Return(p, stringArraySegment).Write("stringArraySegment", store); Generators.Return(p, queue).Write("queue", store); Generators.Return(p, intComparer).Write("intComparer", store); Generators.Return(p, tuple).Write("tuple", store); Generators.Return(p, valueTuple).Write("dateStackTuple", store); Generators.Return(p, intArray).Write("intArray", store); Generators.Return(p, stringArray).Write("stringArray", store); Generators.Return(p, enumComparer).Write("enumComparer", store); Generators.Return(p, dictionary).Write("dictionary", store); p.Run(); } // retain test store for cross-framework tests to run against after this process exits this.cleanupTestFolder = false; }
/// <summary> /// Create a data store to log stream data to. A data store may be persisted on disk (if outputLogPath is defined), /// or it may be an in-memory volatile store. The latter is only required if we are visualizing live data, and /// only if we are not already logging data to a persisted store. /// </summary> /// <param name="pipeline">The Psi pipeline associated with the store.</param> /// <param name="outputLogPath">The path to a folder in which a persistent store will be created.</param> /// <returns>The store Exporter object if a store was successfully created.</returns> private static Exporter CreateDataStore(Pipeline pipeline, string outputLogPath = null) { // If this is a persisted store, use the application name as the store name. Otherwise, generate // a unique temporary name for the volatile store only if we are visualizing live data. string dataStoreName = (outputLogPath != null) ? Program.AppName : null; // Create the store only if it is needed (logging to disk). return((dataStoreName != null) ? PsiStore.Create(pipeline, dataStoreName, outputLogPath) : null); }
#pragma warning disable IDE0060 // Remove unused parameter static void Main(string[] args) #pragma warning restore IDE0060 // Remove unused parameter { var storeName = ConfigurationManager.AppSettings["PsiStoreName"]; var storePath = ConfigurationManager.AppSettings["PsiStorePath"]; using var msPipe = Pipeline.Create(deliveryPolicy: DeliveryPolicy.LatestMessage); var storeRSG = PsiStore.Create(msPipe, storeName, storePath); RealSenseGenerator rsg = new RealSenseGenerator(msPipe); rsg.OutDepthImage.Write(nameof(rsg.OutDepthImage), storeRSG); rsg.OutDepthImageColorized.Write(nameof(rsg.OutDepthImageColorized), storeRSG); rsg.OutRBGImage.Write(nameof(rsg.OutRBGImage), storeRSG); var subKey = ConfigurationManager.AppSettings["CognitiveSubKey"]; var region = ConfigurationManager.AppSettings["CognitiveRegion"]; VisualFeatureTypes[] featViz = { VisualFeatureTypes.Objects }; ImageAnalyzerConfiguration zerConfig = new ImageAnalyzerConfiguration( subKey, region, featViz ); ImageAnalyzer zer = new ImageAnalyzer(msPipe, zerConfig); rsg.OutRBGImage .Where(shImg => shImg != null && shImg.Resource != null) .PipeTo(zer.In); zer.Out .ExtractDetectedObjects() .Write("DetectedObjects.AllFeatures", storeRSG) .Select(tupleList => tupleList.Count) .Write("DetectedObjects.Count", storeRSG); msPipe.RunAsync(); Console.WriteLine("Press any key to exit..."); Console.ReadKey(); }
public void Enumerables() { try { using (var p = Pipeline.Create()) { var store = PsiStore.Create(p, "Store", null); Generators.Return(p, new double[] { 1, 2, 3 }).Select(l => l.Select(d => d + 1)).Write("Test", store); p.Run(); } } catch (AggregateException ex) { Assert.AreEqual(1, ex.InnerExceptions.Count); Assert.IsTrue(ex.InnerExceptions[0].GetType() == typeof(NotSupportedException)); Assert.IsTrue(ex.InnerExceptions[0].Message.StartsWith("Cannot clone Func")); return; } Assert.Fail("Should have thrown above"); }
private dynamic InstanceToDynamic <T>(T instance) { // Rube Goldberg machine to convert instance to dynamic by // writing to a store (typed) and reading back as dynamic using (var p = Pipeline.Create()) { var gen = Generators.Return(p, instance); var exporter = PsiStore.Create(p, "Test", this.path); exporter.Write(gen.Out, "Data", true); p.Run(); } using (var p = Pipeline.Create()) { var importer = PsiStore.Open(p, "Test", this.path); var data = importer.OpenDynamicStream("Data"); var result = data.ToEnumerable(); p.Run(); return(result.First()); } }
/// <summary> /// Method that generates a store for cross-serialization tests. /// </summary> /// <remarks> /// This method will be invoked in a separate process by the <see cref="CrossFrameworkDeserializeMembers"/> /// test method to generate a store to test deserialization across different .NET frameworks. /// </remarks> public void CrossFrameworkSerializeMembers() { using (var p = Pipeline.Create()) { var testObj = new TypeMembers { IntValue = 0x7777AAA, ByteValue = 0xBB, BoolValue = true, ShortValue = 0x7CDD, LongValue = 0x77777777EEEEEEEE, CharValue = 'G', StringValue = "This is a test.", DoubleValue = Math.PI, FloatValue = -1.234f, FloatArray = new[] { 0.1f, 2.3f }, StringList = new List <string> { "one", "two" }, StringArraySegment = new ArraySegment <string>(new[] { "aaa", "bbb", "ccc" }, 1, 2), Queue = new Queue <TimeSpan>(new[] { TimeSpan.Zero, TimeSpan.FromSeconds(1) }), IntComparer = EqualityComparer <int> .Default, Tuple = Tuple.Create(0x77777777EEEEEEEE, "This is a tuple."), ValueTuple = (new DateTime(2020, 1, 2), new Stack <int>(new[] { 33, 782 })), IntArray = new[] { 0, 3 }, StringArray = new[] { "three", "four" }, EnumComparer = EqualityComparer <DayOfWeek> .Default, Dictionary = new Dictionary <string, int> { { "one", 1 }, { "two", 2 } }, }; var store = PsiStore.Create(p, "Store2", this.testPath); Generators.Return(p, testObj).Write("TypeMembers", store); p.Run(); } // retain test store for cross-framework tests to run against after this process exits this.cleanupTestFolder = false; }
public void RetrieveStreamSupplementalMetadata() { var name = nameof(this.RetrieveStreamSupplementalMetadata); // create store with supplemental meta using (var p = Pipeline.Create("write")) { var store = PsiStore.Create(p, name, this.path); var stream0 = Generators.Range(p, 0, 10, TimeSpan.FromTicks(1)); var stream1 = Generators.Range(p, 0, 10, TimeSpan.FromTicks(1)); stream0.Write("NoMeta", store, true); stream1.Write(("Favorite irrational number", Math.E), "WithMeta", store); } // read it back with a store stream reader var reader = new PsiStoreStreamReader(name, this.path); Assert.IsNull(reader.GetStreamMetadata("NoMeta").SupplementalMetadataTypeName); Assert.AreEqual(typeof(ValueTuple <string, double>).AssemblyQualifiedName, reader.GetStreamMetadata("WithMeta").SupplementalMetadataTypeName); var supplemental1 = reader.GetSupplementalMetadata <(string, double)>("WithMeta"); Assert.AreEqual("Favorite irrational number", supplemental1.Item1); Assert.AreEqual(Math.E, supplemental1.Item2); }
/// <summary> /// Initializes a new instance of the <see cref="RemoteExporter"/> class. /// </summary> /// <param name="pipeline">Pipeline to which to attach.</param> /// <param name="port">TCP port on which to listen (default 11411).</param> /// <param name="transport">Transport kind to use.</param> /// <param name="maxBytesPerSecond">Maximum bytes/sec quota (default infinite).</param> /// <param name="bytesPerSecondSmoothingWindowSeconds">Smoothing window over which to compute bytes/sec (default 5 sec.).</param> public RemoteExporter(Pipeline pipeline, int port = DefaultPort, TransportKind transport = DefaultTransport, long maxBytesPerSecond = long.MaxValue, double bytesPerSecondSmoothingWindowSeconds = 5.0) : this(PsiStore.Create(pipeline, $"RemoteExporter_{Guid.NewGuid().ToString()}", null, true), port, transport, maxBytesPerSecond, bytesPerSecondSmoothingWindowSeconds) { }
public async Task TranscribeConversationsAsync(IEnumerable <string> voiceSignatureStringUsers) { uint samplesPerSecond = 16000; byte bitsPerSample = 16; byte channels = 8; // 7 + 1 channels var config = SpeechConfig.FromSubscription(this.SubscriptionKey, this.Region); config.SetProperty("ConversationTranscriptionInRoomAndOnline", "true"); var stopRecognition = new TaskCompletionSource <int>(); using (var audioInput = AudioInputStream.CreatePushStream(AudioStreamFormat.GetWaveFormatPCM(samplesPerSecond, bitsPerSample, channels))) { var meetingID = Guid.NewGuid().ToString(); using (var conversation = await Conversation.CreateConversationAsync(config, meetingID)) { // create a conversation transcriber using audio stream input using (this.conversationTranscriber = new ConversationTranscriber(AudioConfig.FromStreamInput(audioInput))) { conversationTranscriber.Transcribing += (s, e) => { this.SetText($"TRANSCRIBING: Text={e.Result.Text} SpeakerId={e.Result.UserId}"); }; conversationTranscriber.Transcribed += (s, e) => { if (e.Result.Reason == ResultReason.RecognizedSpeech) { this.SetText($"TRANSCRIBED: Text={e.Result.Text} SpeakerId={e.Result.UserId}"); } else if (e.Result.Reason == ResultReason.NoMatch) { this.SetText($"NOMATCH: Speech could not be recognized."); } }; conversationTranscriber.Canceled += (s, e) => { this.SetText($"CANCELED: Reason={e.Reason}"); if (e.Reason == CancellationReason.Error) { this.SetText($"CANCELED: ErrorCode={e.ErrorCode}"); this.SetText($"CANCELED: ErrorDetails={e.ErrorDetails}"); this.SetText($"CANCELED: Did you update the subscription info?"); stopRecognition.TrySetResult(0); } }; conversationTranscriber.SessionStarted += (s, e) => { this.SetText($"\nSession started event. SessionId={e.SessionId}"); }; conversationTranscriber.SessionStopped += (s, e) => { this.SetText($"\nSession stopped event. SessionId={e.SessionId}"); this.SetText("\nStop recognition."); stopRecognition.TrySetResult(0); }; // Add participants to the conversation. int i = 1; foreach (var voiceSignatureStringUser in voiceSignatureStringUsers) { var speaker = Participant.From($"User{i++}", "en-US", voiceSignatureStringUser); await conversation.AddParticipantAsync(speaker); } // Join to the conversation and start transcribing await conversationTranscriber.JoinConversationAsync(conversation); await conversationTranscriber.StartTranscribingAsync().ConfigureAwait(false); using (var p = Pipeline.Create()) { var store = PsiStore.Create(p, "Transcribe", @"D:\Temp"); var capture = new AudioCapture(p, WaveFormat.CreatePcm((int)samplesPerSecond, bitsPerSample, channels)).Write("Audio", store); capture.Do(audio => audioInput.Write(audio.Data)); p.RunAsync(); // waits for completion, then stop transcription await stopRecognition.Task; } await conversationTranscriber.StopTranscribingAsync().ConfigureAwait(false); } } } }
/// <summary> /// This is the main code for our Multimodal Speech Detection demo. /// </summary> private void PerformMultiModalSpeechDetection() { Console.WriteLine("Initializing Psi."); bool detected = false; // First create our \Psi pipeline using (var pipeline = Pipeline.Create("MultiModalSpeechDetection")) { // Register an event handler to catch pipeline errors pipeline.PipelineExceptionNotHandled += Pipeline_PipelineException; // Register an event handler to be notified when the pipeline completes pipeline.PipelineCompleted += Pipeline_PipelineCompleted; // Next create our Kinect sensor. We will be using the color images, face tracking, and audio from the Kinect sensor var kinectSensorConfig = new KinectSensorConfiguration { OutputColor = true, OutputAudio = true, OutputBodies = true, // In order to detect faces using Kinect you must also enable detection of bodies }; var kinectSensor = new KinectSensor(pipeline, kinectSensorConfig); var kinectFaceDetector = new Microsoft.Psi.Kinect.Face.KinectFaceDetector(pipeline, kinectSensor, Microsoft.Psi.Kinect.Face.KinectFaceDetectorConfiguration.Default); // Create our Voice Activation Detector var speechDetector = new SystemVoiceActivityDetector(pipeline); var convertedAudio = kinectSensor.Audio.Resample(WaveFormat.Create16kHz1Channel16BitPcm()); convertedAudio.PipeTo(speechDetector); // Use the Kinect's face track to determine if the mouth is opened var mouthOpenAsFloat = kinectFaceDetector.Faces.Where(faces => faces.Count > 0).Select((List <Microsoft.Psi.Kinect.Face.KinectFace> list) => { if (!detected) { detected = true; Console.WriteLine("Found your face"); } bool open = (list[0] != null) ? list[0].FaceProperties[Microsoft.Kinect.Face.FaceProperty.MouthOpen] == Microsoft.Kinect.DetectionResult.Yes : false; return(open ? 1.0 : 0.0); }); // Next take the "mouthOpen" value and create a hold on that value (so that we don't see 1,0,1,0,1 but instead would see 1,1,1,1,0.8,0.6,0.4) var mouthOpen = mouthOpenAsFloat.Hold(0.1); // Next join the results of the speechDetector with the mouthOpen generator and only select samples where // we have detected speech and that the mouth was open. var mouthAndSpeechDetector = speechDetector.Join(mouthOpen, hundredMs).Select((t, e) => t.Item1 && t.Item2); // Convert our speech into text var speechRecognition = convertedAudio.SpeechToText(mouthAndSpeechDetector); speechRecognition.Do((s, t) => { if (s.Item1.Length > 0) { Console.WriteLine("You said: " + s.Item1); } }); // Create a stream of landmarks (points) from the face detector var facePoints = new List <Tuple <System.Windows.Point, string> >(); var landmarks = kinectFaceDetector.Faces.Where(faces => faces.Count > 0).Select((List <Microsoft.Psi.Kinect.Face.KinectFace> list) => { facePoints.Clear(); System.Windows.Point pt1 = new System.Windows.Point( list[0].FacePointsInColorSpace[Microsoft.Kinect.Face.FacePointType.EyeLeft].X, list[0].FacePointsInColorSpace[Microsoft.Kinect.Face.FacePointType.EyeLeft].Y); facePoints.Add(Tuple.Create(pt1, string.Empty)); System.Windows.Point pt2 = new System.Windows.Point( list[0].FacePointsInColorSpace[Microsoft.Kinect.Face.FacePointType.EyeRight].X, list[0].FacePointsInColorSpace[Microsoft.Kinect.Face.FacePointType.EyeRight].Y); facePoints.Add(Tuple.Create(pt2, string.Empty)); System.Windows.Point pt3 = new System.Windows.Point( list[0].FacePointsInColorSpace[Microsoft.Kinect.Face.FacePointType.MouthCornerLeft].X, list[0].FacePointsInColorSpace[Microsoft.Kinect.Face.FacePointType.MouthCornerLeft].Y); facePoints.Add(Tuple.Create(pt3, string.Empty)); System.Windows.Point pt4 = new System.Windows.Point( list[0].FacePointsInColorSpace[Microsoft.Kinect.Face.FacePointType.MouthCornerRight].X, list[0].FacePointsInColorSpace[Microsoft.Kinect.Face.FacePointType.MouthCornerRight].Y); facePoints.Add(Tuple.Create(pt4, string.Empty)); System.Windows.Point pt5 = new System.Windows.Point( list[0].FacePointsInColorSpace[Microsoft.Kinect.Face.FacePointType.Nose].X, list[0].FacePointsInColorSpace[Microsoft.Kinect.Face.FacePointType.Nose].Y); facePoints.Add(Tuple.Create(pt5, string.Empty)); return(facePoints); }); // ******************************************************************** // Finally create a Live Visualizer using PsiStudio. // We must persist our streams to a store in order for Live Viz to work properly // ******************************************************************** // Create store for the data. Live Visualizer can only read data from a store. var pathToStore = Environment.GetFolderPath(Environment.SpecialFolder.MyVideos); var store = PsiStore.Create(pipeline, ApplicationName, pathToStore); mouthOpen.Select(v => v ? 1d : 0d).Write("MouthOpen", store); speechDetector.Select(v => v ? 1d : 0d).Write("VAD", store); mouthAndSpeechDetector.Write("Join(MouthOpen,VAD)", store); kinectSensor.Audio.Write("Audio", store); var images = kinectSensor.ColorImage.EncodeJpeg(90, DeliveryPolicy.LatestMessage).Out; images.Write("Images", store, true, DeliveryPolicy.LatestMessage); landmarks.Write("FaceLandmarks", store); // Run the pipeline pipeline.RunAsync(); Console.WriteLine("Press any key to finish recording"); Console.ReadKey(); } }
public void CreateAndStartPipeline() { this.pipeline = Pipeline.Create("Teams Pipeline", enableDiagnostics: this.botSettings.EnablePsiDiagnostics); this.frameSourceComponent = new FrameSourceComponent(this.pipeline, logger); var mpegConfig = Mpeg4WriterConfiguration.Default; mpegConfig.ContainsAudio = false; mpegConfig.ImageWidth = (uint)this.botSettings.Resize.Width; mpegConfig.ImageHeight = (uint)this.botSettings.Resize.Height; mpegConfig.PixelFormat = PixelFormat.BGR_24bpp; var basePath = this.botSettings.RecordingFilePath; var endpointUrl = this.botSettings.ModelEndpointUrl; var resized = frameSourceComponent .Video .Select(v => v.First().Value) // discarding participant id - this means that no information of participant is carried forward .Resize(this.botSettings.Resize.Width, this.botSettings.Resize.Height) .Name("Resized Frames"); // input is stream of frames var fileNames = resized .WriteMP4InBatches(TimeSpan.FromSeconds(this.botSettings.VideoSegmentationIntervalInSeconds), basePath, mpegConfig) .Name("FileNames"); var labelStream = fileNames .CallModel(endpointUrl, basePath, logger).Name("Model Result") .Do(l => this.logger.Info($"file: {l.filename} label: {l.label}")); // output is stream of labels labelStream.Item2() .PerformTextToSpeech(this.ttsSettings, this.logger).Name("Text To Speech") .Do(bytes => this.sendAudioToBot(CreateAudioMediaBuffers(DateTime.UtcNow.Ticks, bytes))).Name("Send Audio To Bot"); Generators .Repeat(pipeline, true, TimeSpan.FromSeconds(1.0 / 15)).Name("15fps generation event") .Pair(labelStream, DeliveryPolicy.LatestMessage, DeliveryPolicy.LatestMessage) .Do(f => { try { var text = f.Item3; using (var sharedImage = ProduceScreenShare(text)) { var image = sharedImage.Resource; var nv12 = BGRAtoNV12(image.ImageData, image.Width, image.Height); this.sendScreenShareToBot(nv12); } } catch (Exception ex) { this.logger.Error(ex, "Error while screen sharing"); } }).Name("Screen Share to bot"); if (this.botSettings.EnablePsiStore) { var store = PsiStore.Create(pipeline, "Bot", this.botSettings.PsiStorePath); //resized.Write("video", store); labelStream.Write("label", store); if (this.botSettings.EnablePsiDiagnostics) { pipeline.Diagnostics.Write("Diagnostics", store); } } this.pipeline.PipelineExceptionNotHandled += (_, ex) => { this.logger.Error(ex.Exception, $"PSI PIPELINE ERROR: {ex.Exception.Message}"); }; pipeline.RunAsync(); //Task.Run(async () => { // await Task.Delay(30000); // this.logger.Warn("STOPPPPPPPPPIIIIIIIIINNNNNNNNGGGGGGGGGGGGGGGGG"); // pipeline.Dispose(); //}); }