protected override void OpenMediaAsync()
        {
            // Initialize data structures to pass to the Media pipeline via the MediaStreamSource
            Dictionary <MediaSourceAttributesKeys, string> mediaSourceAttributes = new Dictionary <MediaSourceAttributesKeys, string>();
            Dictionary <MediaStreamAttributeKeys, string>  mediaStreamAttributes = new Dictionary <MediaStreamAttributeKeys, string>();
            List <MediaStreamDescription> mediaStreamDescriptions = new List <MediaStreamDescription>();

            CameraStreamSourceDataSingleton dataSource = CameraStreamSourceDataSingleton.Instance;

            mediaStreamAttributes[MediaStreamAttributeKeys.VideoFourCC] = "RGBA";
            mediaStreamAttributes[MediaStreamAttributeKeys.Width]       = dataSource.FrameWidth.ToString();
            mediaStreamAttributes[MediaStreamAttributeKeys.Height]      = dataSource.FrameHeight.ToString();

            videoStreamDescription = new MediaStreamDescription(MediaStreamType.Video, mediaStreamAttributes);
            mediaStreamDescriptions.Add(videoStreamDescription);

            // a zero timespan is an infinite video
            mediaSourceAttributes[MediaSourceAttributesKeys.Duration] =
                TimeSpan.FromSeconds(0).Ticks.ToString(CultureInfo.InvariantCulture);

            mediaSourceAttributes[MediaSourceAttributesKeys.CanSeek] = false.ToString();

            frameTime = (int)TimeSpan.FromSeconds((double)0).Ticks;

            // Report that we finished initializing its internal state and can now
            // pass in frame samples.
            ReportOpenMediaCompleted(mediaSourceAttributes, mediaStreamDescriptions);
        }
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            CameraStreamSourceDataSingleton dataSource = CameraStreamSourceDataSingleton.Instance;

            if (frameStreamOffset + dataSource.FrameBufferSize > dataSource.FrameStreamSize)
            {
                dataSource.FrameStream.Seek(0, SeekOrigin.Begin);
                frameStreamOffset = 0;
            }

            Task tsk = dataSource.CameraEffect.GetNewFrameAndApplyEffect().AsTask();

            // Wait that the asynchroneous call completes, and proceed by reporting
            // the MediaElement that new samples are ready.
            tsk.ContinueWith((task) =>
            {
                dataSource.FrameStream.Position = 0;

                MediaStreamSample msSamp = new MediaStreamSample(
                    videoStreamDescription,
                    dataSource.FrameStream,
                    frameStreamOffset,
                    dataSource.FrameBufferSize,
                    currentTime,
                    emptySampleDict);

                ReportGetSampleCompleted(msSamp);
                currentTime       += frameTime;
                frameStreamOffset += dataSource.FrameBufferSize;
            });
        }
        public CameraStreamSource(ICameraEffect cameraEffect, Size targetMediaElementSize)
        {
            CameraStreamSourceDataSingleton dataSource = CameraStreamSourceDataSingleton.Instance;

            dataSource.Initialize(targetMediaElementSize);
            dataSource.CameraEffect       = cameraEffect;
            cameraEffect.OutputBufferSize = targetMediaElementSize;
            cameraEffect.OutputBuffer     = dataSource.ImageBuffer.AsBuffer();
        }