private void InitializeVideo()
        {
            if (!this.videoInitilized)
            {
                this.videoInitilized = true;

                var videoEncodingProperties = VideoEncodingProperties.CreateH264();
                videoEncodingProperties.Height = 720;
                videoEncodingProperties.Width  = 960;

                var mediaStreamSource = new MediaStreamSource(new VideoStreamDescriptor(videoEncodingProperties))
                {
                    // never turn live on because it tries to skip frame which breaks the h264 decoding
                    // IsLive = true,
                    BufferTime = TimeSpan.FromSeconds(0.0),
                };

                mediaStreamSource.SampleRequested += this.MediaStreamSource_SampleRequested;

                this.VideoElement.SetMediaStreamSource(mediaStreamSource);

                // never turn real time playback on
                // _mediaElement.RealTimePlayback = true;
            }
        }
        /// <summary>
        /// Initialize the media element for playback
        /// </summary>
        /// <param name="streamConfig">Object containing stream configuration details</param>
        void InitializeMediaPlayer(MoonlightStreamConfiguration streamConfig, AvStreamSource streamSource)
        {
            this._streamSource = streamSource;

            // This code is based upon the MS FFmpegInterop project on GitHub
            VideoEncodingProperties videoProps = VideoEncodingProperties.CreateH264();

            videoProps.ProfileId = H264ProfileIds.High;
            videoProps.Width     = (uint)streamConfig.GetWidth();
            videoProps.Height    = (uint)streamConfig.GetHeight();
            videoProps.Bitrate   = (uint)streamConfig.GetBitrate();

            _videoMss                  = new MediaStreamSource(new VideoStreamDescriptor(videoProps));
            _videoMss.BufferTime       = TimeSpan.Zero;
            _videoMss.CanSeek          = false;
            _videoMss.Duration         = TimeSpan.Zero;
            _videoMss.SampleRequested += _videoMss_SampleRequested;

            XAudio2        xaudio         = new XAudio2();
            MasteringVoice masteringVoice = new MasteringVoice(xaudio, 2, 48000);
            WaveFormat     format         = new WaveFormat(48000, 16, 2);

            // Set for low latency playback
            StreamDisplay.RealTimePlayback = true;

            // Render on the full window to avoid extra compositing
            StreamDisplay.IsFullWindow = true;

            // Disable built-in transport controls
            StreamDisplay.AreTransportControlsEnabled = false;

            StreamDisplay.SetMediaStreamSource(_videoMss);
            AvStream.SetSourceVoice(new SourceVoice(xaudio, format));
        }
Example #3
0
        private void InitializeVideo()
        {
            if (!_videoInitialized)
            {
                _videoInitialized = true;

                var vep = VideoEncodingProperties.CreateH264();
                //vep.Bitrate = 3750000;
                vep.Height = 720;
                vep.Width  = 960;

                var mss = new MediaStreamSource(new VideoStreamDescriptor(vep))
                {
                    // never turn live on
                    //IsLive = true,
                    BufferTime = TimeSpan.FromSeconds(0.0)
                };

                mss.SampleRequested += Mss_SampleRequested;
                mss.Starting        += Mss_Starting;
                mss.Closed          += Mss_Closed;
                //mss.SampleRendered += Mss_SampleRendered;
                //mss.SwitchStreamsRequested += Mss_SwitchStreamsRequested;

                _mediaElement.SetMediaStreamSource(mss);
                //_mediaElement.BufferingProgressChanged += _mediaElement_BufferingProgressChanged;
                // never turn real time playback on
                //_mediaElement.RealTimePlayback = true;

                Debug.WriteLine("media element initialized");
            }
        }
        public void InitialiseMp4FileMedia(string path)
        {
            try
            {
                VideoEncodingProperties videoProperties = VideoEncodingProperties.CreateH264();
                _videoDesc = new VideoStreamDescriptor(videoProperties);
                _videoDesc.EncodingProperties.FrameRate.Numerator   = FRAME_RATE;
                _videoDesc.EncodingProperties.FrameRate.Denominator = 1;
                //_videoDesc.EncodingProperties.Bitrate = (uint)(1 * FRAME_RATE * MP4_WIDTH * MP4_HEIGHT * 4);

                _mss = new Windows.Media.Core.MediaStreamSource(_videoDesc);
                TimeSpan spanBuffer = new TimeSpan(0);
                _mss.BufferTime       = spanBuffer;
                _mss.Starting        += mp4_Starting;
                _mss.SampleRequested += mp4_SampleRequested;

                _mp4Sampler = new SurfaceGenerator.Mp4Sampler();

                _remoteVideo.MediaFailed += _remoteVideo_MediaFailed;
                _remoteVideo.SetMediaStreamSource(_mss);
                _remoteVideo.Play();
            }
            catch (Exception excp)
            {
                Debug.WriteLine("Exception InitialiseMp4FileMedia. " + excp);
            }
        }
Example #5
0
        private void OnVideoStarted(object sender, NetStreamVideoStartedEventArgs args)
        {
            if (_IsClosed)
            {
                throw new Exception();
            }
            if (_Connection == null)
            {
                Debug.WriteLine("すでに閉じられたRTMP接続です");
                return;
            }
            if (isAlreadHaveVideo)
            {
                Debug.WriteLine("すでにビデオプロパティは初期化済み");
                return;
            }


            var info = args.Info;
            VideoEncodingProperties prop = null;

            if (info.Format == Mntone.Rtmp.Media.VideoFormat.Avc)
            {
                prop           = VideoEncodingProperties.CreateH264();
                prop.ProfileId = (int)info.ProfileIndication;
            }
            else
            {
                if (_MediaStreamSource != null)
                {
                    Started?.Invoke(new NicovideoRtmpClientStartedEventArgs(_MediaStreamSource));
                }
            }

            prop.Bitrate = info.Bitrate;
            prop.Height  = info.Height;
            prop.Width   = info.Width;

            var desc = new VideoStreamDescriptor(prop);

            if (_MediaStreamSource != null)
            {
                _MediaStreamSource.AddStreamDescriptor(desc);
                Started?.Invoke(new NicovideoRtmpClientStartedEventArgs(_MediaStreamSource));
            }
            else
            {
                CreateMediaStream(desc);
                if (args.VideoOnly)
                {
                    Started?.Invoke(new NicovideoRtmpClientStartedEventArgs(_MediaStreamSource));
                }
            }

            isAlreadHaveVideo = true;

            Debug.WriteLine($"{nameof(NicovideoRtmpClient)}: video : id:{ClientId}");
        }
Example #6
0
        public override Windows.Media.Core.IMediaStreamDescriptor CreateMediaStreamDescriptor()
        {
            var properties = VideoEncodingProperties.CreateH264();

            properties.Width  = (uint)TrackEntry.Video.PixelWidth;
            properties.Height = (uint)TrackEntry.Video.PixelHeight;

            var descriptor = new VideoStreamDescriptor(properties);

            return(descriptor);
        }
Example #7
0
        public static VideoStreamDescriptor CreateVideoDesc(this VideoTag tag)
        {
            VideoEncodingProperties encode = null;

            if (tag.Codec == CodecID.H263)
            {
                encode = VideoEncodingProperties.CreateH264();
            }
            if (tag.Codec == CodecID.AVC)
            {
                encode = VideoEncodingProperties.CreateH264();
            }
            Debug.WriteLine("VideoDesc ## " + tag.Codec + "  " + tag.Type);
            return(new VideoStreamDescriptor(encode));
        }
Example #8
0
        IMediaStreamDescriptor GetFlvVideoDescriptor(List <FlvTag> scriptFlvTagList)
        {
            var key   = scriptFlvTagList.FirstOrDefault().ScriptData.Values[1].Key;
            var value = scriptFlvTagList.FirstOrDefault().ScriptData.Values[1].Value;

            uint iWidth  = UInt32.Parse((value as ScriptObject)["width"].ToString());
            uint iHeight = UInt32.Parse((value as ScriptObject)["height"].ToString());

            VideoEncodingProperties videoEncodingProperties = VideoEncodingProperties.CreateH264();
            VideoStreamDescriptor   descriptor = new VideoStreamDescriptor(videoEncodingProperties);

            descriptor.EncodingProperties.Width  = iWidth;
            descriptor.EncodingProperties.Height = iHeight;

            return(descriptor);
        }
        public MainPage()
        {
            this.InitializeComponent();

            var videoProperties = VideoEncodingProperties.CreateH264();//.CreateUncompressed(MediaEncodingSubtypes.H264, 720, 480);
            var vd = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.H264, 720, 480);

            videoDesc = new VideoStreamDescriptor(videoProperties);
            videoDesc.EncodingProperties.FrameRate.Numerator   = 29970;
            videoDesc.EncodingProperties.FrameRate.Denominator = 1000;
            videoDesc.EncodingProperties.Width  = 720;
            videoDesc.EncodingProperties.Height = 480;

            mss         = new MediaStreamSource(videoDesc);
            mss.CanSeek = false;
            //mss.BufferTime = new TimeSpan(0, 0, 0, 0, 250);
            mss.Starting        += mss_Starting;
            mss.SampleRequested += Mss_SampleRequested;
            mss.SampleRendered  += Mss_SampleRendered;

            //initialize some buffers
            buff    = new Windows.Storage.Streams.Buffer(1024 * 4);
            bStream = buff.AsStream();

            //this seems needed for start-up
            threadSync = new System.Threading.AutoResetEvent(false);

            //get the frame time in ms
            double ms = 1000.0 * videoDesc.EncodingProperties.FrameRate.Denominator / videoDesc.EncodingProperties.FrameRate.Numerator;

            //get the frame time in ticks
            T0 = System.TimeSpan.FromTicks((long)(ms * System.TimeSpan.TicksPerMillisecond));

            //our demuxer
            extractor = new MpegTS.BufferExtractor();
            running   = true;

            //give the file IO a head start
            Task.Run(() => RunreadFromFile());
        }
        /// <summary>
        /// Processes a NAL.
        /// </summary>
        /// <param name="nal">The NAL to be processed.</param>
        /// <returns>Type of NAL.</returns>
        private int ProcessNal(Nal nal)
        {
            // get the NAL type
            int nalType = -1;

            if (nal.Buffer.Length > 4)
            {
                byte[] header = new byte[5];
                nal.Buffer.CopyTo(0, header, 0, 5);
                nalType = (header[0] == 0 && header[1] == 0 && header[2] == 0 && header[3] == 1) ? (header[4] & 0x1F) : -1;
            }
            //Log.Verbose("NAL: type = {0}, len = {1}", nalType, nal.Buffer.Length);

            // process the first SPS record we encounter
            if (nalType == 7 && !isDecoding)
            {
                byte[] sps = new byte[nal.Buffer.Length];
                nal.Buffer.CopyTo(sps);
                SpsParser parser = new SpsParser(sps, (int)nal.Buffer.Length);
                //Log.Verbose("SPS: {0}x{1} @ {2}", parser.width, parser.height, parser.fps);

                VideoEncodingProperties properties = VideoEncodingProperties.CreateH264();
                properties.ProfileId = H264ProfileIds.High;
                properties.Width     = (uint)parser.width;
                properties.Height    = (uint)parser.height;

                streamSource                  = new MediaStreamSource(new VideoStreamDescriptor(properties));
                streamSource.BufferTime       = TimeSpan.Zero;
                streamSource.CanSeek          = false;
                streamSource.Duration         = TimeSpan.Zero;
                streamSource.SampleRequested += HandleSampleRequested;

                var action = Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.High, () =>
                {
                    statusTextBlock.Visibility = Visibility.Collapsed;
                    media.SetMediaStreamSource(streamSource);
                    media.Play();
                    storyboard.Begin();
                });
                isDecoding = true;
            }

            // queue the frame
            if (nalType > 0 && isDecoding)
            {
                if (deferral != null)
                {
                    request.Sample = MediaStreamSample.CreateFromBuffer(nal.Buffer, new TimeSpan(0));
                    lock (availableNals)
                    {
                        //Log.Verbose("availableNals.Enqueue");
                        availableNals.Enqueue(nal);
                    }
                    deferral.Complete();
                    deferral = null;
                    request  = null;
                    //Log.Verbose("Deferral Complete");
                }
                else
                {
                    //Log.Verbose("usedNals.Enqueue");
                    lock (usedNals)
                    {
                        usedNals.Enqueue(nal);
                    }
                }
            }

            // return the NAL type
            return(isDecoding ? nalType : -1);
        }
        private async void MultiRecord_Click(object sender, RoutedEventArgs e)
        {
            //<SnippetMultiRecordFindSensorGroups>
            var sensorGroups = await MediaFrameSourceGroup.FindAllAsync();

            var foundGroup = sensorGroups.Select(g => new
            {
                group  = g,
                color1 = g.SourceInfos.Where(info => info.SourceKind == MediaFrameSourceKind.Color && info.DeviceInformation.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Front).FirstOrDefault(),
                color2 = g.SourceInfos.Where(info => info.SourceKind == MediaFrameSourceKind.Color && info.DeviceInformation.EnclosureLocation.Panel == Windows.Devices.Enumeration.Panel.Back).FirstOrDefault()
            }).Where(g => g.color1 != null && g.color2 != null).FirstOrDefault();

            if (foundGroup == null)
            {
                Debug.WriteLine("No groups found.");
                return;
            }
            //</SnippetMultiRecordFindSensorGroups>

            //<SnippetMultiRecordInitMediaCapture>
            var settings = new MediaCaptureInitializationSettings()
            {
                SourceGroup = foundGroup.group
            };

            mediaCapture = new MediaCapture();
            await mediaCapture.InitializeAsync(settings);

            //</SnippetMultiRecordInitMediaCapture>


            //<SnippetMultiRecordMediaEncodingProfile>
            var profile = new MediaEncodingProfile();

            profile.Container         = new ContainerEncodingProperties();
            profile.Container.Subtype = MediaEncodingSubtypes.Mpeg4;

            List <VideoStreamDescriptor> streams = new List <VideoStreamDescriptor>();

            var encodeProps = VideoEncodingProperties.CreateH264();

            encodeProps.Subtype = MediaEncodingSubtypes.H264;
            var stream1Desc = new VideoStreamDescriptor(encodeProps);

            stream1Desc.Label = foundGroup.color1.Id;
            streams.Add(stream1Desc);

            var encodeProps2 = VideoEncodingProperties.CreateH264();

            encodeProps2.Subtype = MediaEncodingSubtypes.H264;
            var stream2Desc = new VideoStreamDescriptor(encodeProps2);

            stream2Desc.Label = foundGroup.color2.Id;
            streams.Add(stream2Desc);

            profile.SetVideoTracks(streams);
            profile.Audio = null;
            //</SnippetMultiRecordMediaEncodingProfile>


            Debug.WriteLine("started");
            //<SnippetMultiRecordToFile>
            var recordFile = await Windows.Storage.KnownFolders.CameraRoll.CreateFileAsync("record.mp4", Windows.Storage.CreationCollisionOption.GenerateUniqueName);

            await mediaCapture.StartRecordToStorageFileAsync(profile, recordFile);

            await Task.Delay(8000);

            await mediaCapture.StopRecordAsync();

            //</SnippetMultiRecordToFile>
            Debug.WriteLine("done");
        }