//Method to build connection to video-device and connecting to video
        private MediaStreamSource CreateI420VideoStreamSource(uint width, uint height, int framerate)
        {
            if (width == 0)
            {
                throw new ArgumentException("Invalid zero width for video", "width");
            }
            if (height == 0)
            {
                throw new ArgumentException("Invalid zero height for video", "height");
            }

            VideoEncodingProperties videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Iyuv, width, height);
            VideoStreamDescriptor   videoStreamDesc = new VideoStreamDescriptor(videoProperties);

            videoStreamDesc.EncodingProperties.FrameRate.Numerator   = (uint)framerate;
            videoStreamDesc.EncodingProperties.FrameRate.Denominator = 1;
            // Bitrate in bits per second : framerate * frame pixel size * I420=12bpp
            videoStreamDesc.EncodingProperties.Bitrate = ((uint)framerate * width * height * 12);

            MediaStreamSource videoStreamSource = new MediaStreamSource(videoStreamDesc)
            {
                BufferTime = TimeSpan.Zero,
                // Enables optimizations for live sources
                IsLive = true,
                // Cannot seek live WebRTC video stream
                CanSeek = false
            };

            //Event called by request for new frame (?)
            videoStreamSource.SampleRequested += OnMediaStreamSourceRequested;

            return(videoStreamSource);
        }
示例#2
0
        public override Windows.Media.Core.IMediaStreamDescriptor CreateMediaStreamDescriptor()
        {
            var mep = MediaEncodingProfile.CreateWmv(VideoEncodingQuality.Auto).Video;

            mep.Subtype = MediaEncodingSubtypes.Wmv3;
            mep.Width   = (uint)TrackEntry.Video.DisplayWidth;
            mep.Height  = (uint)TrackEntry.Video.PixelHeight;
            var descriptor = new VideoStreamDescriptor(mep);

            descriptor.EncodingProperties.SetFormatUserData(videoPrivateData.GetUserData());

            //mep.SetFormatUserData(videoPrivateData.GetUserData());

            //var fourCC = BitConverterLE.GetBytes(vih.BmiHeader.BiCompression);
            //var fourCCMap = Encoding.UTF8.GetString(fourCC, 0, 4);

            //var properties = VideoEncodingProperties.CreateUncompressed(
            //                        MediaEncodingSubtypes.Wmv3,
            //                        (uint)videoPrivateData.Width,
            //                        (uint)videoPrivateData.Height);

            ////properties.Subtype = MediaEncodingSubtypes.Asf;
            //properties.SetFormatUserData(videoPrivateData.GetUserData());
            //var descriptor = new VideoStreamDescriptor(properties);

            return(descriptor);
        }
        void InitializeMediaPlayer()
        {
            int iWidth = (int)Window.Current.Bounds.Width;
            int iHeight = (int)Window.Current.Bounds.Height;

            // Even frame size with a 16:9 ratio
            iWidth = Math.Min(iWidth, ((iHeight * 16 / 9) >> 1) * 2);
            iHeight = Math.Min(iHeight, ((iWidth * 9 / 16) >> 1) * 2);

            VideoEncodingProperties videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Bgra8, (uint)iWidth, (uint)iHeight);
            _videoDesc = new VideoStreamDescriptor(videoProperties);
            _videoDesc.EncodingProperties.FrameRate.Numerator = c_frameRateN;
            _videoDesc.EncodingProperties.FrameRate.Denominator = c_frameRateD;
            _videoDesc.EncodingProperties.Bitrate = (uint)(c_frameRateN * c_frameRateD * iWidth * iHeight * 4);

            _mss = new Windows.Media.Core.MediaStreamSource(_videoDesc);
            TimeSpan spanBuffer = new TimeSpan(0, 0, 0, 0, 250);
            _mss.BufferTime = spanBuffer;
            _mss.Starting += _mss_Starting;
            _mss.SampleRequested += _mss_SampleRequested;

            _sampleGenerator = new DXSurfaceGenerator.SampleGenerator();

            mediaPlayer.AutoPlay = false;
            mediaPlayer.CurrentStateChanged += mediaPlayer_CurrentStateChanged;
            mediaPlayer.SetMediaStreamSource(_mss);
            _hasSetMediaSource = true;
        }
示例#4
0
        void InitializeMediaPlayer()
        {
            int iWidth  = (int)Window.Current.Bounds.Width;
            int iHeight = (int)Window.Current.Bounds.Height;

            // Even frame size with a 16:9 ratio
            iWidth  = Math.Min(iWidth, ((iHeight * 16 / 9) >> 1) * 2);
            iHeight = Math.Min(iHeight, ((iWidth * 9 / 16) >> 1) * 2);

            VideoEncodingProperties videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Bgra8, (uint)iWidth, (uint)iHeight);

            _videoDesc = new VideoStreamDescriptor(videoProperties);
            _videoDesc.EncodingProperties.FrameRate.Numerator   = c_frameRateN;
            _videoDesc.EncodingProperties.FrameRate.Denominator = c_frameRateD;
            _videoDesc.EncodingProperties.Bitrate = (uint)(c_frameRateN * c_frameRateD * iWidth * iHeight * 4);

            _mss = new Windows.Media.Core.MediaStreamSource(_videoDesc);
            TimeSpan spanBuffer = new TimeSpan(0, 0, 0, 0, 250);

            _mss.BufferTime       = spanBuffer;
            _mss.Starting        += _mss_Starting;
            _mss.SampleRequested += _mss_SampleRequested;

            _sampleGenerator = new DXSurfaceGenerator.SampleGenerator();

            mediaPlayer.AutoPlay             = false;
            mediaPlayer.CurrentStateChanged += mediaPlayer_CurrentStateChanged;
            mediaPlayer.SetMediaStreamSource(_mss);
            _hasSetMediaSource = true;
        }
示例#5
0
        private void CreateMediaObjects()
        {
            // Create our encoding profile based on the size of the item
            // TODO: This only really makes sense for monitors, we need
            //       to change this to make sense in all cases.
            int width  = _captureItem.Size.Width;
            int height = _captureItem.Size.Height;

            // Describe our input: uncompressed BGRA8 buffers comming in at the monitor's refresh rate
            // TODO: We pick 60Hz here because it applies to most monitors. However this should be
            //       more robust.
            var videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Bgra8, (uint)width, (uint)height);

            _videoDescriptor = new VideoStreamDescriptor(videoProperties);
            _videoDescriptor.EncodingProperties.FrameRate.Numerator   = c_frameRateN;
            _videoDescriptor.EncodingProperties.FrameRate.Denominator = c_frameRateD;
            _videoDescriptor.EncodingProperties.Bitrate = (uint)(c_frameRateN * c_frameRateD * width * height * 4);

            // Create our MediaStreamSource
            _mediaStreamSource                  = new MediaStreamSource(_videoDescriptor);
            _mediaStreamSource.BufferTime       = TimeSpan.FromSeconds(0);
            _mediaStreamSource.Starting        += OnMediaStreamSourceStarting;
            _mediaStreamSource.SampleRequested += OnMediaStreamSourceSampleRequested;

            // Create our device manager
            _mediaGraphicsDevice = MediaGraphicsDevice.CreateFromMediaStreamSource(_mediaStreamSource);
            _mediaGraphicsDevice.RenderingDevice = _device;

            // Create our transcoder
            _transcoder = new MediaTranscoder();
            _transcoder.HardwareAccelerationEnabled = true;
        }
示例#6
0
        //<SnippetGetMediaEncodingProfile>
        public MediaEncodingProfile CreateProfileForTranscoder(VideoStreamDescriptor videoStream1, VideoStreamDescriptor videoStream2, AudioStreamDescriptor audioStream, TimedMetadataStreamDescriptor timedMetadataStream)
        {
            ContainerEncodingProperties container = new ContainerEncodingProperties()
            {
                Subtype = MediaEncodingSubtypes.Mpeg4
            };

            MediaEncodingProfile profile = new MediaEncodingProfile()
            {
                Container = container
            };


            VideoStreamDescriptor encodingVideoStream1 = videoStream1.Copy();

            encodingVideoStream1.EncodingProperties.Subtype = MediaEncodingSubtypes.H264;
            encodingVideoStream1.Label = videoStream1.Name;

            VideoStreamDescriptor encodingVideoStream2 = videoStream2.Copy();

            encodingVideoStream2.EncodingProperties.Subtype = MediaEncodingSubtypes.H264;
            encodingVideoStream2.Label = videoStream2.Name;

            AudioStreamDescriptor encodingAudioStream = audioStream.Copy();

            encodingAudioStream.EncodingProperties.Subtype = MediaEncodingSubtypes.Ac3;
            encodingAudioStream.Label = audioStream.Name;

            TimedMetadataStreamDescriptor encodingTimedMetadataStream = timedMetadataStream.Copy();

            profile.SetTimedMetadataTracks(new TimedMetadataStreamDescriptor[] { encodingTimedMetadataStream });
            profile.SetVideoTracks(new VideoStreamDescriptor[] { encodingVideoStream1, encodingVideoStream2 });
            profile.SetAudioTracks(new AudioStreamDescriptor[] { encodingAudioStream });
            return(profile);
        }
        private MediaStreamSource CreateI420VideoStreamSource(
            uint width, uint height, int framerate)
        {
            if (width == 0)
            {
                throw new ArgumentException("Invalid zero width for video.", "width");
            }
            if (height == 0)
            {
                throw new ArgumentException("Invalid zero height for video.", "height");
            }
            // Note: IYUV and I420 have same memory layout (though different FOURCC)
            // https://docs.microsoft.com/en-us/windows/desktop/medfound/video-subtype-guids
            var videoProperties = VideoEncodingProperties.CreateUncompressed(
                MediaEncodingSubtypes.Iyuv, width, height);
            var videoStreamDesc = new VideoStreamDescriptor(videoProperties);

            videoStreamDesc.EncodingProperties.FrameRate.Numerator   = (uint)framerate;
            videoStreamDesc.EncodingProperties.FrameRate.Denominator = 1;
            // Bitrate in bits per second : framerate * frame pixel size * I420=12bpp
            videoStreamDesc.EncodingProperties.Bitrate = ((uint)framerate * width * height * 12);
            var videoStreamSource = new MediaStreamSource(videoStreamDesc);

            videoStreamSource.BufferTime       = TimeSpan.Zero;
            videoStreamSource.SampleRequested += OnMediaStreamSourceRequested;
            videoStreamSource.IsLive           = true;  // Enables optimizations for live sources
            videoStreamSource.CanSeek          = false; // Cannot seek live WebRTC video stream
            return(videoStreamSource);
        }
        public void InitialiseMp4FileMedia(string path)
        {
            try
            {
                VideoEncodingProperties videoProperties = VideoEncodingProperties.CreateH264();
                _videoDesc = new VideoStreamDescriptor(videoProperties);
                _videoDesc.EncodingProperties.FrameRate.Numerator   = FRAME_RATE;
                _videoDesc.EncodingProperties.FrameRate.Denominator = 1;
                //_videoDesc.EncodingProperties.Bitrate = (uint)(1 * FRAME_RATE * MP4_WIDTH * MP4_HEIGHT * 4);

                _mss = new Windows.Media.Core.MediaStreamSource(_videoDesc);
                TimeSpan spanBuffer = new TimeSpan(0);
                _mss.BufferTime       = spanBuffer;
                _mss.Starting        += mp4_Starting;
                _mss.SampleRequested += mp4_SampleRequested;

                _mp4Sampler = new SurfaceGenerator.Mp4Sampler();

                _remoteVideo.MediaFailed += _remoteVideo_MediaFailed;
                _remoteVideo.SetMediaStreamSource(_mss);
                _remoteVideo.Play();
            }
            catch (Exception excp)
            {
                Debug.WriteLine("Exception InitialiseMp4FileMedia. " + excp);
            }
        }
        /// <summary>
        /// Create a new 30-fps NV12-encoded video source for the specified video size.
        /// </summary>
        /// <param name="width">The width of the video in pixels.</param>
        /// <param name="height">The height of the video in pixels.</param>
        /// <returns>The newly created video source.</returns>
        private MediaStreamSource CreateVideoStreamSource(uint width, uint height, uint framerate)
        {
            if (width == 0)
            {
                throw new ArgumentException("Invalid zero width for video stream source.", "width");
            }
            if (height == 0)
            {
                throw new ArgumentException("Invalid zero height for video stream source.", "height");
            }

            // Note: IYUV and I420 have same memory layout (though different FOURCC)
            // https://docs.microsoft.com/en-us/windows/desktop/medfound/video-subtype-guids
            var videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Iyuv, width, height);
            var videoStreamDesc = new VideoStreamDescriptor(videoProperties);

            videoStreamDesc.EncodingProperties.FrameRate.Numerator   = framerate;
            videoStreamDesc.EncodingProperties.FrameRate.Denominator = 1;
            videoStreamDesc.EncodingProperties.Bitrate = (framerate * width * height * 12); // NV12=12bpp
            var videoStreamSource = new MediaStreamSource(videoStreamDesc);

            videoStreamSource.BufferTime       = TimeSpan.Zero; // TODO : playback breaks if buffering, need to investigate
            videoStreamSource.Starting        += OnMediaStreamSourceStarting;
            videoStreamSource.Closed          += OnMediaStreamSourceClosed;
            videoStreamSource.Paused          += OnMediaStreamSourcePaused;
            videoStreamSource.SampleRequested += OnMediaStreamSourceRequested;
            videoStreamSource.IsLive           = true;  // Enables optimizations for live sources
            videoStreamSource.CanSeek          = false; // Cannot seek live WebRTC video stream
            return(videoStreamSource);
        }
示例#10
0
        async void Init()
        {
            uint iWidth  = 1280;
            uint iHeight = 638;

            VideoEncodingProperties videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Nv12, iWidth, iHeight);
            VideoStreamDescriptor   videoDesc       = new VideoStreamDescriptor(videoProperties);

            videoDesc.EncodingProperties.FrameRate.Numerator   = c_frameRateN;
            videoDesc.EncodingProperties.FrameRate.Denominator = c_frameRateD;
            videoDesc.EncodingProperties.Bitrate = (uint)(c_frameRateN * c_frameRateD * iWidth * iHeight * 4);

            videoDesc.EncodingProperties.Width  = iWidth;
            videoDesc.EncodingProperties.Height = iHeight;

            var      mss        = new MediaStreamSource(videoDesc);
            TimeSpan spanBuffer = new TimeSpan(0, 0, 0, 0, 250);

            mss.BufferTime       = spanBuffer;
            mss.Starting        += mss_Starting;
            mss.SampleRequested += mss_SampleRequested;

            MediaElement.SetMediaStreamSource(mss);
            MediaElement.Play();
        }
示例#11
0
        private void OnVideoStarted(object sender, NetStreamVideoStartedEventArgs args)
        {
            if (_IsClosed)
            {
                throw new Exception();
            }
            if (_Connection == null)
            {
                Debug.WriteLine("すでに閉じられたRTMP接続です");
                return;
            }
            if (isAlreadHaveVideo)
            {
                Debug.WriteLine("すでにビデオプロパティは初期化済み");
                return;
            }


            var info = args.Info;
            VideoEncodingProperties prop = null;

            if (info.Format == Mntone.Rtmp.Media.VideoFormat.Avc)
            {
                prop           = VideoEncodingProperties.CreateH264();
                prop.ProfileId = (int)info.ProfileIndication;
            }
            else
            {
                if (_MediaStreamSource != null)
                {
                    Started?.Invoke(new NicovideoRtmpClientStartedEventArgs(_MediaStreamSource));
                }
            }

            prop.Bitrate = info.Bitrate;
            prop.Height  = info.Height;
            prop.Width   = info.Width;

            var desc = new VideoStreamDescriptor(prop);

            if (_MediaStreamSource != null)
            {
                _MediaStreamSource.AddStreamDescriptor(desc);
                Started?.Invoke(new NicovideoRtmpClientStartedEventArgs(_MediaStreamSource));
            }
            else
            {
                CreateMediaStream(desc);
                if (args.VideoOnly)
                {
                    Started?.Invoke(new NicovideoRtmpClientStartedEventArgs(_MediaStreamSource));
                }
            }

            isAlreadHaveVideo = true;

            Debug.WriteLine($"{nameof(NicovideoRtmpClient)}: video : id:{ClientId}");
        }
        public async Task InitStartTranscoder()
        {
            if (parent != null)
            {
                parent.StartWritingOutput("Initialize Transcoder", 1);
            }


            tempFile = await GetTempOutputFile();

            if (parent != null)
            {
                parent.StartWritingOutputExtended("Temporary Output : " + tempFile.Path, 0);
            }

            IRandomAccessStream destStream = await tempFile.OpenAsync(FileAccessMode.ReadWrite);

            int width  = 320;
            int height = 200;

            if (gcitem != null)
            {
                width  = gcitem.Size.Width;
                height = gcitem.Size.Height;
            }

            frameCounter = 0;
            Timestamp    = TimeSpan.Zero;

            VideoEncodingProperties videoSourceProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Rgb32, (uint)width, (uint)height);
            VideoStreamDescriptor   videoSourceDescriptor = new VideoStreamDescriptor(videoSourceProperties);

            MediaStreamSource mediaStreamSource = new MediaStreamSource(videoSourceDescriptor);

            mediaStreamSource.BufferTime       = TimeSpan.FromSeconds(0);
            mediaStreamSource.Starting        += OnMSSStarting;
            mediaStreamSource.SampleRequested += OnMSSSampleRequested;
            mediaStreamSource.SampleRendered  += OnMSSSampleRendered;
            //mediaStreamSource.CanSeek = false;

            MediaTranscoder mediaTranscoder = new MediaTranscoder();

            mediaTranscoder.HardwareAccelerationEnabled = true;

            ////////////////////
            //Start Transcoding
            MediaEncodingProfile   destProfile        = MediaEncodingProfile.CreateMp4(VideoEncodingQuality.HD720p);
            PrepareTranscodeResult transcodeOperation = await mediaTranscoder.PrepareMediaStreamSourceTranscodeAsync(mediaStreamSource, destStream, destProfile);

            //await transcode.TranscodeAsync();
            var rendering = transcodeOperation.TranscodeAsync();

            rendering.Progress  += progressHandler;
            rendering.Completed += completedHandler;
        }
示例#13
0
        public override Windows.Media.Core.IMediaStreamDescriptor CreateMediaStreamDescriptor()
        {
            var properties = VideoEncodingProperties.CreateH264();

            properties.Width  = (uint)TrackEntry.Video.PixelWidth;
            properties.Height = (uint)TrackEntry.Video.PixelHeight;

            var descriptor = new VideoStreamDescriptor(properties);

            return(descriptor);
        }
示例#14
0
        private static VideoStreamDescriptor GetStreamDescriptor(uint width, uint height)
        {
            var videoEncoding = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Bgra8,
                                                                           width, height);

            var videoStreamDescriptor = new VideoStreamDescriptor(videoEncoding)
            {
                Name  = "Desktop video stream",
                Label = "Desktop video stream"
            };

            return(videoStreamDescriptor);
        }
示例#15
0
        private void CreateMediaObjects()
        {
            var videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Bgra8, 1920, 1080);

            _videoDescriptor = new VideoStreamDescriptor(videoProperties);

            _mediaStreamSource                  = new MediaStreamSource(_videoDescriptor);
            _mediaStreamSource.BufferTime       = TimeSpan.FromSeconds(0);
            _mediaStreamSource.Starting        += OnMediaStreamSourceStarting;
            _mediaStreamSource.SampleRequested += OnMediaStreamSourceSampleRequested;

            _transcoder = new MediaTranscoder();
            _transcoder.HardwareAccelerationEnabled = IsHardwareAcc;
        }
示例#16
0
        IMediaStreamDescriptor GetFlvVideoDescriptor(List <FlvTag> scriptFlvTagList)
        {
            var key   = scriptFlvTagList.FirstOrDefault().ScriptData.Values[1].Key;
            var value = scriptFlvTagList.FirstOrDefault().ScriptData.Values[1].Value;

            uint iWidth  = UInt32.Parse((value as ScriptObject)["width"].ToString());
            uint iHeight = UInt32.Parse((value as ScriptObject)["height"].ToString());

            VideoEncodingProperties videoEncodingProperties = VideoEncodingProperties.CreateH264();
            VideoStreamDescriptor   descriptor = new VideoStreamDescriptor(videoEncodingProperties);

            descriptor.EncodingProperties.Width  = iWidth;
            descriptor.EncodingProperties.Height = iHeight;

            return(descriptor);
        }
示例#17
0
        void InitializeMediaPlayer()
        {
            m_hasSetMediaSource = false;

            if (CurrentEffect == null || CurrentVideo == null)
            {
                return;
            }

            // Initialize Transition
            SecondVideo = VideoList.IndexOf(CurrentVideo) == VideoList.Count - 1 ? VideoList[0] : VideoList[VideoList.IndexOf(CurrentVideo) + 1];

            ThirdVideo = VideoList.IndexOf(SecondVideo) == VideoList.Count - 1 ? VideoList[0] : VideoList[VideoList.IndexOf(SecondVideo) + 1];

            advanced_media_source.ResetTimeline();
            advanced_media_source.AddVideo(CurrentVideo);
            advanced_media_source.AddTransitionEffect(CurrentEffect.EffectType, 1);
            advanced_media_source.AddVideo(SecondVideo);
            advanced_media_source.AddTransitionEffect(CurrentEffect.EffectType, 1);
            advanced_media_source.AddVideo(ThirdVideo);

            // Initialize MediaStreamSource
            VideoEncodingProperties videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Bgra8, c_frameWidth, c_frameHeight);

            videoDesc = new VideoStreamDescriptor(videoProperties);
            videoDesc.EncodingProperties.FrameRate.Numerator   = c_frameRateN;
            videoDesc.EncodingProperties.FrameRate.Denominator = c_frameRateD;
            videoDesc.EncodingProperties.Bitrate = (uint)(c_frameRateN * c_frameRateD * c_frameWidth * c_frameHeight * 4);

            AudioEncodingProperties audioProperties = AudioEncodingProperties.CreatePcm(c_sampleRate, c_channelCount, c_bitsPerSample);

            audioDesc = new AudioStreamDescriptor(audioProperties);

            media_stream_source = new Windows.Media.Core.MediaStreamSource(videoDesc, audioDesc);

            TimeSpan spanBuffer = new TimeSpan(0, 0, 0, 0, 0);

            media_stream_source.BufferTime       = spanBuffer;
            media_stream_source.Starting        += MSS_Starting;
            media_stream_source.Closed          += MSS_Closed;
            media_stream_source.SampleRequested += MSS_SampleRequested;

            Video.SetMediaStreamSource(media_stream_source);
            m_hasSetMediaSource = true;
        }
示例#18
0
        private void CreateMediaObjects()
        {
            // Create our encoding profile based on the size of the item
            int width  = _captureItem.Size.Width;
            int height = _captureItem.Size.Height;

            // Describe our input: uncompressed BGRA8 buffers
            var videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Bgra8, (uint)width, (uint)height);

            _videoDescriptor = new VideoStreamDescriptor(videoProperties);

            // Create our MediaStreamSource
            _mediaStreamSource                  = new MediaStreamSource(_videoDescriptor);
            _mediaStreamSource.BufferTime       = TimeSpan.FromSeconds(0);
            _mediaStreamSource.Starting        += OnMediaStreamSourceStarting;
            _mediaStreamSource.SampleRequested += OnMediaStreamSourceSampleRequested;

            // Create our transcoder
            _transcoder = new MediaTranscoder();
            _transcoder.HardwareAccelerationEnabled = true;
        }
        public MainPage()
        {
            this.InitializeComponent();

            var videoProperties = VideoEncodingProperties.CreateH264();//.CreateUncompressed(MediaEncodingSubtypes.H264, 720, 480);
            var vd = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.H264, 720, 480);

            videoDesc = new VideoStreamDescriptor(videoProperties);
            videoDesc.EncodingProperties.FrameRate.Numerator   = 29970;
            videoDesc.EncodingProperties.FrameRate.Denominator = 1000;
            videoDesc.EncodingProperties.Width  = 720;
            videoDesc.EncodingProperties.Height = 480;

            mss         = new MediaStreamSource(videoDesc);
            mss.CanSeek = false;
            //mss.BufferTime = new TimeSpan(0, 0, 0, 0, 250);
            mss.Starting        += mss_Starting;
            mss.SampleRequested += Mss_SampleRequested;
            mss.SampleRendered  += Mss_SampleRendered;

            //initialize some buffers
            buff    = new Windows.Storage.Streams.Buffer(1024 * 4);
            bStream = buff.AsStream();

            //this seems needed for start-up
            threadSync = new System.Threading.AutoResetEvent(false);

            //get the frame time in ms
            double ms = 1000.0 * videoDesc.EncodingProperties.FrameRate.Denominator / videoDesc.EncodingProperties.FrameRate.Numerator;

            //get the frame time in ticks
            T0 = System.TimeSpan.FromTicks((long)(ms * System.TimeSpan.TicksPerMillisecond));

            //our demuxer
            extractor = new MpegTS.BufferExtractor();
            running   = true;

            //give the file IO a head start
            Task.Run(() => RunreadFromFile());
        }
        public void InitialiseRovingSquareSampleMedia()
        {
            VideoEncodingProperties videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Bgra8, (uint)WIDTH, (uint)HEIGHT);

            _videoDesc = new VideoStreamDescriptor(videoProperties);
            _videoDesc.EncodingProperties.FrameRate.Numerator   = FRAME_RATE;
            _videoDesc.EncodingProperties.FrameRate.Denominator = 1;
            _videoDesc.EncodingProperties.Bitrate = (uint)(1 * FRAME_RATE * WIDTH * HEIGHT * 4);

            _mss = new Windows.Media.Core.MediaStreamSource(_videoDesc);
            TimeSpan spanBuffer = new TimeSpan(0);

            _mss.BufferTime       = spanBuffer;
            _mss.Starting        += mss_Starting;
            _mss.SampleRequested += mss_SampleRequested;

            _sampleMaker = new SurfaceGenerator.SampleMaker();

            _remoteVideo.MediaFailed += _remoteVideo_MediaFailed;
            _remoteVideo.SetMediaStreamSource(_mss);
            _remoteVideo.Play();
        }
示例#21
0
        /// <summary>
        /// Initialize the media element for playback
        /// </summary>
        /// <param name="streamConfig">Object containing stream configuration details</param>
        void InitializeMediaPlayer(LimelightStreamConfiguration streamConfig, AvStreamSource streamSource)
        {
            this._streamSource = streamSource;

            AudioEncodingProperties audioProperties = AudioEncodingProperties.CreatePcm(48000, 2, 16);

            VideoEncodingProperties videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.H264Es,
                                                                                                 (uint)streamConfig.GetWidth(), (uint)streamConfig.GetHeight());

            videoProperties.ProfileId = H264ProfileIds.High;

            _videoDesc = new VideoStreamDescriptor(videoProperties);
            _audioDesc = new AudioStreamDescriptor(audioProperties);

            _mss                  = new MediaStreamSource(_videoDesc, _audioDesc);
            _mss.BufferTime       = TimeSpan.Zero;
            _mss.CanSeek          = false;
            _mss.Duration         = TimeSpan.Zero;
            _mss.SampleRequested += _mss_SampleRequested;

            // Set for low latency playback
            StreamDisplay.RealTimePlayback = true;

            // Set the audio category to take advantage of hardware audio offload
            StreamDisplay.AudioCategory = AudioCategory.ForegroundOnlyMedia;

            // Render on the full window to avoid extra compositing
            StreamDisplay.IsFullWindow = true;

            // Disable built-in transport controls
            StreamDisplay.AreTransportControlsEnabled = false;

            // Start playing right away
            StreamDisplay.AutoPlay = true;

            StreamDisplay.SetMediaStreamSource(_mss);
        }
示例#22
0
        private async void CreateFile()
        {
            var videoProps      = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Bgra8, 1024, 768);
            var videoDescriptor = new VideoStreamDescriptor(videoProps);

            //videoDescriptor.EncodingProperties.FrameRate.Numerator = frn;
            //videoDescriptor.EncodingProperties.FrameRate.Denominator = frd;
            //videoDescriptor.EncodingProperties.Bitrate = (frn / frd) * w * h * 4 * 8;
            var streamSource = new MediaStreamSource(videoDescriptor);


            var tc         = new MediaTranscoder();
            var prof       = MediaEncodingProfile.CreateMp4(VideoEncodingQuality.HD720p);
            var tempFolder = ApplicationData.Current.TemporaryFolder;
            var file       = await tempFolder.CreateFileAsync("out2.mp4", CreationCollisionOption.ReplaceExisting);

            var outputStream = await file.OpenAsync(FileAccessMode.ReadWrite);

            try
            {
                var result = await tc.PrepareMediaStreamSourceTranscodeAsync(streamSource, outputStream, prof);

                if (result.CanTranscode)
                {
                    //Debug.Print($"encoding");
                    var op = result.TranscodeAsync();
                    //op.Progress +=
                    //    new AsyncActionProgressHandler<double>(TranscodeProgress);
                    //op.Completed +=
                    //    new AsyncActionWithProgressCompletedHandler<double>(TranscodeComplete);
                    //Debug.WriteLine($"encoded");
                }
            }
            catch (Exception)
            {
            }
        }
示例#23
0
        public CustomMediaStreamSource()
        {
            int iWidth  = (int)Window.Current.Bounds.Width;
            int iHeight = (int)Window.Current.Bounds.Height;

            // Even frame size with a 16:9 ratio
            iWidth  = Math.Min(iWidth, ((iHeight * 16 / 9) >> 1) * 2);
            iHeight = Math.Min(iHeight, ((iWidth * 9 / 16) >> 1) * 2);

            VideoEncodingProperties videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Bgra8, (uint)iWidth, (uint)iHeight);

            _videoDesc = new VideoStreamDescriptor(videoProperties);
            _videoDesc.EncodingProperties.FrameRate.Numerator   = c_frameRateN;
            _videoDesc.EncodingProperties.FrameRate.Denominator = c_frameRateD;
            _videoDesc.EncodingProperties.Bitrate = (uint)(c_frameRateN * c_frameRateD * iWidth * iHeight * 4);

            MediaStreamSource source = new MediaStreamSource(_videoDesc);

            TimeSpan spanBuffer = new TimeSpan(0, 0, 0, 0, 250);

            source.BufferTime       = spanBuffer;
            source.Starting        += Starting;
            source.SampleRequested += SampleRequested;
        }
示例#24
0
        /// <summary>
        /// Initialize the media element for playback
        /// </summary>
        /// <param name="streamConfig">Object containing stream configuration details</param>
        void InitializeMediaPlayer(LimelightStreamConfiguration streamConfig, AvStreamSource streamSource)
        {
            this._streamSource = streamSource;

            AudioEncodingProperties audioProperties = AudioEncodingProperties.CreatePcm(48000, 2, 16);

            VideoEncodingProperties videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.H264Es,
                (uint)streamConfig.GetWidth(), (uint)streamConfig.GetHeight());
            videoProperties.ProfileId = H264ProfileIds.High;

            _videoDesc = new VideoStreamDescriptor(videoProperties);
            _audioDesc = new AudioStreamDescriptor(audioProperties);

            _mss = new MediaStreamSource(_videoDesc, _audioDesc);
            _mss.BufferTime = TimeSpan.Zero;
            _mss.CanSeek = false;
            _mss.Duration = TimeSpan.Zero;
            _mss.SampleRequested += _mss_SampleRequested;

            // Set for low latency playback
            StreamDisplay.RealTimePlayback = true;

            // Set the audio category to take advantage of hardware audio offload
            StreamDisplay.AudioCategory = AudioCategory.ForegroundOnlyMedia;

            // Render on the full window to avoid extra compositing
            StreamDisplay.IsFullWindow = true;

            // Disable built-in transport controls
            StreamDisplay.AreTransportControlsEnabled = false;

            // Start playing right away
            StreamDisplay.AutoPlay = true;

            StreamDisplay.SetMediaStreamSource(_mss);
        }
        private async void MultiRecord_Click(object sender, RoutedEventArgs e)
        {
            //<SnippetMultiRecordFindSensorGroups>
            var sensorGroups = await MediaFrameSourceGroup.FindAllAsync();

            var foundGroup = sensorGroups.Select(g => new
            {
                group  = g,
                color1 = g.SourceInfos.Where(info => info.SourceKind == MediaFrameSourceKind.Color && info.DeviceInformation.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Front).FirstOrDefault(),
                color2 = g.SourceInfos.Where(info => info.SourceKind == MediaFrameSourceKind.Color && info.DeviceInformation.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Back).FirstOrDefault()
            }).Where(g => g.color1 != null && g.color2 != null).FirstOrDefault();

            if (foundGroup == null)
            {
                Debug.WriteLine("No groups found.");
                return;
            }
            //</SnippetMultiRecordFindSensorGroups>

            //<SnippetMultiRecordInitMediaCapture>
            var settings = new MediaCaptureInitializationSettings()
            {
                SourceGroup = foundGroup.group
            };

            mediaCapture = new MediaCapture();
            await mediaCapture.InitializeAsync(settings);

            //</SnippetMultiRecordInitMediaCapture>


            //<SnippetMultiRecordMediaEncodingProfile>
            var profile = new MediaEncodingProfile();

            profile.Container         = new ContainerEncodingProperties();
            profile.Container.Subtype = MediaEncodingSubtypes.Mpeg4;

            List <VideoStreamDescriptor> streams = new List <VideoStreamDescriptor>();

            var encodeProps = VideoEncodingProperties.CreateH264();

            encodeProps.Subtype = MediaEncodingSubtypes.H264;
            var stream1Desc = new VideoStreamDescriptor(encodeProps);

            stream1Desc.Label = foundGroup.color1.Id;
            streams.Add(stream1Desc);

            var encodeProps2 = VideoEncodingProperties.CreateH264();

            encodeProps2.Subtype = MediaEncodingSubtypes.H264;
            var stream2Desc = new VideoStreamDescriptor(encodeProps2);

            stream2Desc.Label = foundGroup.color2.Id;
            streams.Add(stream2Desc);

            profile.SetVideoTracks(streams);
            profile.Audio = null;
            //</SnippetMultiRecordMediaEncodingProfile>


            Debug.WriteLine("started");
            //<SnippetMultiRecordToFile>
            var recordFile = await Windows.Storage.KnownFolders.CameraRoll.CreateFileAsync("record.mp4", Windows.Storage.CreationCollisionOption.GenerateUniqueName);

            await mediaCapture.StartRecordToStorageFileAsync(profile, recordFile);

            await Task.Delay(8000);

            await mediaCapture.StopRecordAsync();

            //</SnippetMultiRecordToFile>
            Debug.WriteLine("done");
        }
        private async Task SetupEncoding()
        {
            if (!GraphicsCaptureSession.IsSupported())
            {
                // Show message to user that screen capture is unsupported
                return;
            }

            // Create the D3D device and SharpDX device
            if (_device == null)
            {
                _device = Direct3D11Helpers.CreateD3DDevice();
            }
            if (_sharpDxD3dDevice == null)
            {
                _sharpDxD3dDevice = Direct3D11Helpers.CreateSharpDXDevice(_device);
            }



            try
            {
                // Let the user pick an item to capture
                var picker = new GraphicsCapturePicker();
                _captureItem = await picker.PickSingleItemAsync();

                if (_captureItem == null)
                {
                    return;
                }

                // Initialize a blank texture and render target view for copying frames, using the same size as the capture item
                _composeTexture          = Direct3D11Helpers.InitializeComposeTexture(_sharpDxD3dDevice, _captureItem.Size);
                _composeRenderTargetView = new SharpDX.Direct3D11.RenderTargetView(_sharpDxD3dDevice, _composeTexture);

                // This example encodes video using the item's actual size.
                var width  = (uint)_captureItem.Size.Width;
                var height = (uint)_captureItem.Size.Height;

                // Make sure the dimensions are are even. Required by some encoders.
                width  = (width % 2 == 0) ? width : width + 1;
                height = (height % 2 == 0) ? height : height + 1;


                var  temp      = MediaEncodingProfile.CreateMp4(VideoEncodingQuality.HD1080p);
                var  bitrate   = temp.Video.Bitrate;
                uint framerate = 30;

                _encodingProfile = new MediaEncodingProfile();
                _encodingProfile.Container.Subtype                  = "MPEG4";
                _encodingProfile.Video.Subtype                      = "H264";
                _encodingProfile.Video.Width                        = width;
                _encodingProfile.Video.Height                       = height;
                _encodingProfile.Video.Bitrate                      = bitrate;
                _encodingProfile.Video.FrameRate.Numerator          = framerate;
                _encodingProfile.Video.FrameRate.Denominator        = 1;
                _encodingProfile.Video.PixelAspectRatio.Numerator   = 1;
                _encodingProfile.Video.PixelAspectRatio.Denominator = 1;

                var videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Bgra8, width, height);
                _videoDescriptor = new VideoStreamDescriptor(videoProperties);

                // Create our MediaStreamSource
                _mediaStreamSource                  = new MediaStreamSource(_videoDescriptor);
                _mediaStreamSource.BufferTime       = TimeSpan.FromSeconds(0);
                _mediaStreamSource.Starting        += OnMediaStreamSourceStarting;
                _mediaStreamSource.SampleRequested += OnMediaStreamSourceSampleRequested;

                // Create our transcoder
                _transcoder = new MediaTranscoder();
                _transcoder.HardwareAccelerationEnabled = true;

                using (var stream = new InMemoryRandomAccessStream())
                    await EncodeAsync(stream);
            }
            catch (Exception ex)
            {
                return;
            }
        }