Exemplo n.º 1
0
        public RawVideoTrackInfo(IVideoTrack video)
        {
            Hints = video.ParentStream.Hints;
            string pcd = video.Codec.PrivateCodecData;

            this.CodecPrivateData = pcd;
            this.HandlerType      = "Video";
            this.PayloadType      = video.PayloadType;
            this.Height           = video.FrameSize.Height;
            this.Width            = video.FrameSize.Width;
            this.SourceTrackID    = video.TrackID; // set source track ID here
            if (video.TrackFormat != null)
            {
                this.DurationIn100NanoSecs = video.TrackFormat.DurationIn100NanoSecs;
                this.TimeScale             = video.TrackFormat.TimeScale;
            }
            else
            {
                this.DurationIn100NanoSecs = video.TrackDurationIn100NanoSecs;
                this.TimeScale             = (uint)TimeSpan.TicksPerSecond;
            }

            if (video.IsAnamorphic)
            {
                AspectRatioX = 4;
                AspectRatioY = 3;
            }
            else
            {
                AspectRatioY = AspectRatioX = 1;
            }
        }
Exemplo n.º 2
0
        // The video track is the track to which all other tracks should sync with.
        // SyncPoints are instances in time at which:
        // 1. the current video slice is an IFrame; or
        // 2. the current video slice is the beginning of a new block (in case the video track is all IFrames or no IFrames).
        // NOTE: This will only work if GenericMediaStream.CachingEnabled is true.
        public IEnumerable <ulong> EnumerateSyncPoints(IVideoTrack vTrack)
        {
            IMediaTrackSliceEnumerator sliceEnum = (IMediaTrackSliceEnumerator)vTrack.GetEnumerator();
            ulong lastTimeStamp = 0;

            while (sliceEnum.MoveNext())
            {
                Slice slice = sliceEnum.Current;
                if (slice == null)
                {
                    break;
                }
                if (slice.SliceType == SliceType.IFrame)
                {
                    var timeStamp = sliceEnum.CurrentTimeStampNew.Value;         // guaranteed as this is an iframe...
                    //Common.Logger.Instance.Info("[GenericRecodeWRC::SyncPoints] timeStamp [" + timeStamp + "]");
                    yield return(timeStamp);
                }

                if (sliceEnum.CurrentTimeStampNew.HasValue && sliceEnum.CurrentTimeStampNew.Value > 0)
                {
                    lastTimeStamp = sliceEnum.CurrentTimeStampNew.Value;
                }
            }

            //Common.Logger.Instance.Info("[GenericRecodeWRC::SyncPoints] timeStamp [" + lastTimeStamp + "]");
            yield return(lastTimeStamp);            // last slice is NOT an IFrame, but it is a syncpoint nevertheless
        }
        private void CreatePeerConnectionInternal()
        {
            if (_factory == null || _isError)
            {
                _logger.Error(TAG, "Peerconnection factory is not created");
                return;
            }

            _logger.Debug(TAG, "Create peer connection.");
            _queuedRemoteCandidates = new List <IceCandidate>();

            var rtcConfig = new RTCConfiguration();

            // TCP candidates are only useful when connecting to a server that supports
            // ICE-TCP.
            rtcConfig.IceServers               = _parameters.IceServers;
            rtcConfig.TcpCandidatePolicy       = TcpCandidatePolicy.Disabled;
            rtcConfig.BundlePolicy             = BundlePolicy.MaxBundle;
            rtcConfig.RtcpMuxPolicy            = RtcpMuxPolicy.Require;
            rtcConfig.ContinualGatheringPolicy = ContinualGatheringPolicy.Continually;
            // Use ECDSA encryption.
            rtcConfig.KeyType = EncryptionKeyType.Ecdsa;
            // Enable DTLS for normal calls and disable for loopback calls.
            rtcConfig.EnableDtlsSrtp = _parameters.Loopback;
            rtcConfig.SdpSemantics   = SdpSemantics.UnifiedPlan;

            _peerConnection = _factory.PeerConnectionWithConfiguration(rtcConfig, _sdpMediaConstraints, _peerConnectionListener);

            var mediaStreamLabels = new[] { "ARDAMS" };

            if (IsVideoCallEnabled)
            {
                _peerConnection.AddTrack(CreateVideoTrack(), mediaStreamLabels);

                // We can add the renderers right away because we don't need to wait for an
                // answer to get the remote track.
                _remoteVideoTrack           = GetRemoteVideoTrack();
                _remoteVideoTrack.IsEnabled = _renderVideo;
                _remoteVideoTrack.AddRenderer(_remoteRenderer);
            }

            _peerConnection.AddTrack(CreateAudioTrack(), mediaStreamLabels);

            if (IsVideoCallEnabled)
            {
                FindVideoSender();
            }

            if (_parameters.AecDump)
            {
                var result = _factory.StartAecDumpWithFilePath(_parameters.AecDumpFile, -1);
                if (!result)
                {
                    _logger.Error(TAG, "Can not open aecdump file");
                }
            }

            _logger.Debug(TAG, "Peer connection created.");
        }
Exemplo n.º 4
0
        public async Task Info_EncodedVideoProperties_True()
        {
            IMediaInfo info = await MediaInfo.Get(Resources.MKV_Encoded_MultipleTracks);

            IVideoTrack videoTrack = info.VideoTracks.FirstOrDefault();

            Assert.NotNull(videoTrack);
        }
Exemplo n.º 5
0
        private IVideoTrack CreateVideoTrack()
        {
            _videoSource   = _factory.CreateVideoSource(_parameters.IsScreencast);
            _videoCapturer = _peerConnectionEvents.CreateVideoCapturer(_factory, _videoSource);

            _videoCapturer.StartCapture(_videoWidth, _videoHeight, _fps);
            _localVideoTrack = _factory.CreateVideoTrack(VideoTrackId, _videoSource);

            _localVideoTrack.IsEnabled = _renderVideo;
            _localVideoTrack.AddRenderer(_localRenderer);
            return(_localVideoTrack);
        }
        private IVideoTrack[] GetVideoTracks()
        {
            var items = _mediaStream.VideoTracks;
            var arr   = new IVideoTrack[items.Count];

            for (int i = 0; i < items.Count; i++)
            {
                arr[i] = new VideoTrackNative((VideoTrack)items[i]);
            }

            return(arr);
        }
Exemplo n.º 7
0
        protected void CreateTracks <T1, T0, T2>() where T1 : GenericAudioTrack, new() where T0 : GenericVideoTrack, new() where T2 : MP4TrackFormat, new()
        {
            // boolean for calling event MediaTrackLogicalBreak only once
            bool breakCalled       = false;
            GenericMediaTrack trak = null;
            T2 trackFormat;

            foreach (TrackBox trackb in mmb.TrackBoxes)
            {
                trackFormat          = new T2();
                trackFormat.TrackBox = trackb;
                switch (trackb.PayloadType)
                {
                case "samr": // 3gpp audio
                case "wma ":
                case "mp4a":
                    trak          = new T1();
                    Hints.object1 = trackb.EdtsBox;
                    break;

                case "mp4v": // 3gpp video
                case "vc-1":
                case "avc1":
                    trak = new T0();
                    IVideoTrack    vt        = trak as IVideoTrack;
                    MP4TrackFormat mp4format = trackFormat as MP4TrackFormat;
                    vt.IsAnamorphic = mp4format.IsAnamorphic;
                    Hints.object2   = trackb.EdtsBox;
                    break;

                case "mp4s":
                    break; // ignore - in the case of vc-1 and wma, these are processed in ISMVStreamReader

                default:
                    throw new Exception(string.Format("Unknown track type: {0}", trackb.PayloadType));
                }
                if (trak != null)
                {
                    trak.TrackFormat  = trackFormat;
                    trak.ParentStream = this;
                    AddTrack(trak);

                    if (!breakCalled)
                    {
                        TriggerLogicalBreak(trak);
                        breakCalled = true;
                    }
                }
            }
        }
Exemplo n.º 8
0
        // The video track is the track to which all other tracks should sync with.
        // SyncPoints are instances in time at which:
        // 1. the current video slice is an IFrame; or
        // 2. the current video slice is the beginning of a new block (in case the video track is all IFrames or no IFrames).
        // NOTE: This will only work if GenericMediaStream.CachingEnabled is true.
        public IEnumerable<ulong> EnumerateSyncPoints(IVideoTrack vTrack)
        {
            IMediaTrackSliceEnumerator sliceEnum = (IMediaTrackSliceEnumerator)vTrack.GetEnumerator();
            ulong lastTimeStamp = 0;
              while (sliceEnum.MoveNext())
              {
            Slice slice = sliceEnum.Current;
            if (slice == null)
              break;
            if (slice.SliceType == SliceType.IFrame) {
                var timeStamp = sliceEnum.CurrentTimeStampNew.Value; // guaranteed as this is an iframe...
                 //Common.Logger.Instance.Info("[GenericRecodeWRC::SyncPoints] timeStamp [" + timeStamp + "]");
                yield return timeStamp;
            }

                if (sliceEnum.CurrentTimeStampNew.HasValue && sliceEnum.CurrentTimeStampNew.Value > 0)
                    lastTimeStamp = sliceEnum.CurrentTimeStampNew.Value;
              }

            //Common.Logger.Instance.Info("[GenericRecodeWRC::SyncPoints] timeStamp [" + lastTimeStamp + "]");
            yield return lastTimeStamp; // last slice is NOT an IFrame, but it is a syncpoint nevertheless
        }
Exemplo n.º 9
0
        public async Task Info_VideoProperties_True()
        {
            IMediaInfo info = await MediaInfo.Get(Resources.MKV_Encoded_MultipleTracks);

            IVideoTrack videoTrack = info.VideoTracks.FirstOrDefault();

            Assert.NotNull(videoTrack);
            Assert.Equal(8, videoTrack.BitDepth);
            Assert.Equal(28656344, videoTrack.Bitrate);
            Assert.Equal("4:2:0", videoTrack.ChromaSubSampling);
            Assert.Equal("yuv", videoTrack.ColorSpace.ToLower());
            Assert.Equal(1.778, videoTrack.DisplayAspectRatio);
            Assert.Equal(60.022, videoTrack.Duration);
            Assert.Equal("avc", videoTrack.Format.ToLower());
            Assert.Equal("4", videoTrack.FormatLevel);
            Assert.Equal("high", videoTrack.FormatProfile.ToLower());
            Assert.Equal(3276.232, videoTrack.FrameRate);
            Assert.Equal("vfr", videoTrack.FrameRateMode.ToLower());
            Assert.Equal("cabac=1 / ref=3 / deblock=1:0:0 / analyse=0x3:0x113 / me=hex / subme=7 / psy=1 / psy_rd=1.00:0.00 / mixed_ref=1 / me_range=16 / chroma_me=1 / trellis=1 / 8x8dct=1 / cqm=0 / deadzone=21,11 / fast_pskip=1 / chroma_qp_offset=-2 / threads=18 / lookahead_threads=3 / sliced_threads=0 / nr=0 / decimate=1 / interlaced=0 / bluray_compat=0 / constrained_intra=0 / bframes=3 / b_pyramid=2 / b_adapt=1 / b_bias=0 / direct=1 / weightb=1 / open_gop=0 / weightp=2 / keyint=250 / keyint_min=23 / scenecut=40 / intra_refresh=0 / rc_lookahead=40 / rc=crf / mbtree=1 / crf=23.0 / qcomp=0.60 / qpmin=0 / qpmax=69 / qpstep=4 / ip_ratio=1.40 / aq=1:1.00", videoTrack.EncodedLibrarySetteings);
            Assert.Equal(1920, videoTrack.Width);
            Assert.Equal(1080, videoTrack.Height);
            Assert.Equal(1, videoTrack.PixelAspectRatio);
            Assert.Equal(0, videoTrack.StreamOrder);
        }
        private void SwitchMediaPlayerSource(IAudioTrack audioTrack, IVideoTrack videoTrack)
        {
            //lock (_mediaPlaybackLock)
            //{
            //    if (videoTrack != _playbackVideoTrack)
            //    {
            //        // Notify media player that source changed
            //        if (_isVideoPlaying)
            //        {
            //            _videoStreamSource.NotifyError(MediaStreamSourceErrorStatus.ConnectionToServerLost);
            //            _videoPlayer.Pause();
            //            _isVideoPlaying = false;
            //        }

            //        // Detach old source
            //        if (_playbackVideoTrack != null)
            //        {
            //            _playbackVideoTrack.I420AVideoFrameReady -= VideoTrack_I420AFrameReady;
            //            _videoWidth = 0;
            //            _videoHeight = 0;
            //            ClearVideoStats();
            //        }

            //        _playbackVideoTrack = videoTrack;

            //        // Attach new source
            //        if (_playbackVideoTrack != null)
            //        {
            //            _playbackVideoTrack.I420AVideoFrameReady += VideoTrack_I420AFrameReady;
            //        }

            //        LogMessage($"Changed video playback track.");
            //    }

            //    if (audioTrack != _playbackAudioTrack)
            //    {
            //        // Detach old source
            //        if (_playbackAudioTrack != null)
            //        {
            //            _playbackAudioTrack.AudioFrameReady -= AudioTrack_FrameReady;
            //            _audioSampleRate = 0;
            //            _audioChannelCount = 0;
            //            ClearRemoteAudioStats();
            //        }

            //        _playbackAudioTrack = audioTrack;

            //        // Attach new source
            //        if (_playbackAudioTrack != null)
            //        {
            //            _playbackAudioTrack.AudioFrameReady += AudioTrack_FrameReady;
            //        }

            //        LogMessage($"Changed video playback track.");
            //    }
            //}

            //// Update local media overlay panel
            //bool hasLocalMedia = ((_playbackVideoTrack != null) || (_playbackAudioTrack != null));
            //localMediaPanel.Visibility = (hasLocalMedia ? Visibility.Visible : Visibility.Collapsed);
            //muteLocalAudio.IsEnabled = (_playbackAudioTrack != null);
            //muteLocalVideo.IsEnabled = (_playbackVideoTrack != null);
        }
Exemplo n.º 11
0
 public void RemoveTrack(IVideoTrack videoTrack)
 {
     _mediaStream.RemoveTrack(videoTrack.ToNative <VideoTrack>());
 }
Exemplo n.º 12
0
 public void AddTrack(IVideoTrack videoTrack)
 {
     _mediaStream.AddTrack(videoTrack.ToNative <VideoTrack>());
 }
Exemplo n.º 13
0
        public override void Recode(ulong startTime100NanoSec, ulong endTime100NanoSec, ushort videoTrackID)
        {
            var vidTracks     = DestStream.MediaTracks.Where(t => t is GenericVideoTrack);
            int vidTrackCount = (vidTracks == null) ? 0 : vidTracks.Count();

            if (endTime100NanoSec == 0)
            {      // special case when endTime == 0
                // using duration here is ok as it is about the total time of the source
                endTime100NanoSec = SourceStream.DurationIn100NanoSecs;
            }

            if (endTime100NanoSec - startTime100NanoSec < MaxIterateDuration)
            {
                throw new Exception("Desired time interval for output stream too short");
            }

            int outTracks = DestStream.MediaTracks.Count;

            RecodeSet[] trackEnumerators = new RecodeSet[outTracks];
            int         k = 0;
            int         n = 0;

            foreach (IMediaTrack track in SourceStream.MediaTracks)
            {
                if (((track.Codec.CodecType == CodecTypes.Audio) && (audioOrVideoOrBoth != TracksIncluded.Video)) ||
                    ((track.Codec.CodecType == CodecTypes.Video) && ((videoTrackID == 0) || (track.TrackID == videoTrackID)) &&
                     (audioOrVideoOrBoth != TracksIncluded.Audio)))
                {
                    RecodeSet recodeSet = new RecodeSet();
                    recodeSet.sourceTrack = (IMediaTrackSliceEnumerator)track.GetEnumerator();
                    recodeSet.sourceTrack.Reset();
                    recodeSet.pendingChunkSlices = new List <Slice>();

                    IMediaTrack destination = DestStream[recodeSet.sourceTrack.CodecType, 0];

                    if ((track.Codec.CodecType != CodecTypes.Video) || (vidTrackCount == 1))
                    {
                        destination = DestStream[recodeSet.sourceTrack.CodecType, 0];
                    }
                    else if (vidTrackCount > 1)
                    {
                        destination = vidTracks.ElementAt(n);
                        n++;
                    }

                    if (destination == null)
                    {
                        throw new Exception(string.Format("No {0} destination track. Try vo or so option.", recodeSet.sourceTrack.CodecType));
                    }

                    // normally the destination TrackDurationIn100NanoSecs is set to source duration;
                    // here we reset its value back to zero because it may be smaller than source duration
                    // (for example, if the start time is more than zero).
                    destination.TrackDurationIn100NanoSecs = 0UL;
                    recodeSet.destination      = destination;
                    recodeSet.destinationTrack = (IMediaTrackSliceEnumerator)destination.GetEnumerator();
                    recodeSet.destinationTrack.Reset();

                    trackEnumerators[k++] = recodeSet;
                }
            }

            RaiseRecodeProgressUpdate(0.01f, true, null);      // Indicate we have completed a portion of the work.

            // Need to call MoveNext() first for all source track enumerators
            foreach (RecodeSet recodeSet in trackEnumerators)
            {
                while (recodeSet.sourceTrack.MoveNext())
                {
                    if (recodeSet.sourceTrack.Current != null)
                    {
                        break;
                    }
                }
            }

            IVideoTrack videoTrack           = (IVideoTrack)SourceStream[CodecTypes.Video, 0];
            ulong       prevSyncTime         = 0UL;
            bool        validSyncPointsFound = false;

            foreach (ulong syncTime in EnumerateSyncPoints(videoTrack))
            {     // Cycle through all of the sync points in the video...
                Logger.Instance.Info("[GenericRecodeWRC::Recode] [merge] iterating at syncTime [" + syncTime + "].");

                if ((syncTime > endTime100NanoSec) && (prevSyncTime > endTime100NanoSec))
                {
                    break;              // If we are past the requested end time, stop doing work
                }
                // Each source and destinatin track has its own, independent counter (enumerator).
                // The slices are synced with respect to time, and NOT with respect to index.
                // The outer for loop below iterates through each track being recoded;
                // the inner while loop iterates through each slice skipped.
                // .timeStart == time relative to source track at which recoding starts (should be first slice NOT skipped);
                // .indexStart == index of first slice NOT skipped.
                if (startTime100NanoSec > prevSyncTime)
                {          // Skip a portion of slices.
                    for (int i = 0; i < trackEnumerators.Length; i++)
                    {
                        if (trackEnumerators[i].sourceTrack.CurrentTimeStampNew.HasValue == false)
                        {
                            continue;                      // b-frame and we can't use it to compare...
                        }
                        while (trackEnumerators[i].sourceTrack.CurrentTimeStampNew.Value < syncTime)
                        {
                            Slice slice = trackEnumerators[i].sourceTrack.Current;
                            if (slice == null)
                            {
                                break;
                            }
                            if (slice.TimeStampNew.HasValue == false)
                            {
                                continue;                                                            // its a b-frame, thus no time is available
                            }
                            trackEnumerators[i].timeStart  = slice.TimeStampNew.Value;               // at this point its guaranteed to have a value...
                            trackEnumerators[i].indexStart = slice.index;

                            // Find the next valid CurrentTimeStampNew value.
                            bool tmpEnd = false;
                            while (true)
                            {
                                if (!trackEnumerators[i].sourceTrack.MoveNext())
                                {
                                    tmpEnd = true;
                                    break;                              // Ended.
                                }

                                if (trackEnumerators[i].sourceTrack.CurrentTimeStampNew.HasValue == true)
                                {
                                    break;                              // Found it.
                                }
                            }

                            if (tmpEnd == true)
                            {
                                break;
                            }
                        }
                    }

                    prevSyncTime = syncTime;
                    continue;
                }

                // If we never hit this condition there is nothing actually taken in to process and this causes an exception down the road.
                validSyncPointsFound = true;

                // Each source and destinatin track has its own, independent counter (enumerator).
                // The slices are synced with respect to time, and NOT with respect to index.
                // The outer foreach loop below iterates through each track being recoded;
                // the inner while loop iterates through each slice.
                // recodeSet.sourceTrack ==> source track enumerator
                // recodeSet.destinationTrack ==> destination track enumerator
                ulong timeStamp100NanoSec = ulong.MaxValue;

                foreach (RecodeSet recodeSet in trackEnumerators)
                {
                    recodeSet.pendingChunkSlices.Clear();

                    // Start writing the actual data.
                    while (recodeSet.sourceTrack.CurrentTimeStampNew.HasValue == false ||
                           recodeSet.sourceTrack.CurrentTimeStampNew.Value <= syncTime)
                    {
                        Slice slice = recodeSet.sourceTrack.Current;
                        if (slice == null)
                        {
                            break;
                        }

                        //Logger.Instance.Info("[GenericRecodeWRC::Recode] dumping slice [" + slice.TimeStampNew + ", dur " + (int)slice.SliceDuration + "], track type [" + recodeSet.sourceTrack.CodecType + "].");

                        // Prepare the slice; apply position and time compensation, to base it to the start of the extract.
                        slice.index -= recodeSet.indexStart;
                        if (slice.TimeStampNew.HasValue)
                        {                 // TimeStamp == null if we are a bframe, thus we are not here...
                            if (slice.TimeStampNew.Value < recodeSet.timeStart)
                            {
                                throw new Exception("GenericRecodeWRC.Recode: Offset time stamping error");
                            }

                            // adjust time-stamp and index (offset from time start)
                            slice.TimeStampNew -= recodeSet.timeStart;

                            if (timeStamp100NanoSec == ulong.MaxValue || slice.TimeStampNew.Value > timeStamp100NanoSec)
                            {
                                timeStamp100NanoSec = slice.TimeStampNew.Value;                                 // Take the value for the progress report.
                            }
                        }

                        // Put the slices in the pending Chunk buffer for overview and confirmation.
                        recodeSet.pendingChunkSlices.Add(slice);

                        // position to next output slice
                        recodeSet.destinationTrack.MoveNext();

                        // put slice in destination track
                        recodeSet.destinationTrack.SetCurrent(slice);

                        recodeSet.destination.TrackDurationIn100NanoSecs += (ulong)slice.SliceDuration;

                        // move to next input slice, exit if done
                        if (!recodeSet.sourceTrack.MoveNext())
                        {
                            break;
                        }
                    }
                }

                // Report progress.
                if (timeStamp100NanoSec != ulong.MaxValue)
                {
                    float progress = (float)(((double)timeStamp100NanoSec - (double)startTime100NanoSec) / ((double)endTime100NanoSec - (double)startTime100NanoSec));
                    if (progress > 1)
                    {
                        Common.Logger.Instance.Error("[GenericRecodeWRC::Recode] Progress value [" + progress + "] mis-calculated, progress report skipped.");
                    }
                    else
                    {
                        RaiseRecodeProgressUpdate(progress, true, null);
                    }
                }

                prevSyncTime = syncTime;
            }

            if (validSyncPointsFound == false)
            {     // Nothing meaningful found to process, end now.
                // Do not DestStream.FinalizeStream() as this will try to write and cause an exception.
                RaiseRecodeProgressUpdate(1, false, null);
                RaiseRecodeProgressUpdate(2, false, null);
                return;
            }

            RaiseRecodeProgressUpdate(1, true, null);      // All the work is done, but there may be some finalizers left.

            // Assemble all stbl or moof boxes.
            // Write out the mdat slice in the case of MP4 output;
            // in the case of fragmented files (ISMV output), all moof and mdat boxes have already been written out at this point, and we
            // only need to write out the mfra slice, if it is needed.
            DestStream.FinalizeStream();

            RaiseRecodeProgressUpdate(2, true, null);      // Everything is completed.
        }
Exemplo n.º 14
0
        /// <summary>
        /// Look into a slice just to get width, height, and aspect ratio.
        /// NOTE: This is no longer used.
        /// </summary>
        private void GetScreenDimensions(IVideoTrack video)
        {
            if ((video.PayloadType == VideoPayloadType.unknown) || (video.PayloadType == VideoPayloadType.jpeg) || (video.PayloadType == VideoPayloadType.mjpeg))
            {
                return;
            }

            // this will only work for H.264 video source

            IMediaTrackSliceEnumerator slices = (IMediaTrackSliceEnumerator)video.GetEnumerator();

            slices.MoveNext();
            Slice slice       = slices.Current;;
            int   countToZero = slice.SliceSize;
            ulong totalSize   = 0UL;

            BinaryReader reader = new BinaryReader(new MemoryStream(slice.SliceBytes));

            while (countToZero > 4)
            {
                ulong naluLen = BE32(reader.ReadUInt32());
                long  nextPos = reader.BaseStream.Position + (long)naluLen;
                uint  typ     = reader.ReadByte();
                if ((naluLen > (ulong)countToZero) || (naluLen < 2))
                {
                    throw new Exception("Invalid video payload");
                }

                // access unit delimiter (aud) always comes first and its size is not added to total size because
                // it is be added back to the payload.
                if ((typ & 0x1Fu) == 9u)
                {
                    if (naluLen != 2)
                    {
                        throw new Exception("Wrong nalu delimiter length");
                    }
                    reader.ReadByte(); // discard (we don't need it here)
                }

                // if nalu type is Sequence Param Set, pick up width and height
                // also, build private codec data from this SPS
                // NOTE: it matters which video track this qbox belongs!
                if ((typ & 0x1Fu) == 7u)
                {
                    byte[] buf = new byte[naluLen];
                    reader.Read(buf, 1, (int)naluLen - 1);
                    totalSize += (4 + naluLen);
                    // parse the SPS bit stream, just to get the correct width and height of video.
                    BitReader            bitReader = new BitReader(new MemoryStream(buf));
                    SequenceParameterSet sps       = new SequenceParameterSet((uint)naluLen);
                    sps.Read(bitReader);
                    Width  = (int)sps.Width;
                    Height = (int)sps.Height;
                    if (sps.VUIParametersPresent)
                    {
                        AspectRatioX = sps.vuiParams.AspectRatioX;
                        AspectRatioY = sps.vuiParams.AspectRatioY;
                    }
                }

                countToZero -= ((int)naluLen + 4);
                reader.BaseStream.Position = nextPos;
            }
        }
Exemplo n.º 15
0
        /// <summary>
        /// Look into a slice just to get width, height, and aspect ratio.
        /// NOTE: This is no longer used.
        /// </summary>
        private void GetScreenDimensions(IVideoTrack video)
        {
            if ((video.PayloadType == VideoPayloadType.unknown) || (video.PayloadType == VideoPayloadType.jpeg) || (video.PayloadType == VideoPayloadType.mjpeg))
            return;

              // this will only work for H.264 video source

              IMediaTrackSliceEnumerator slices = (IMediaTrackSliceEnumerator)video.GetEnumerator();
              slices.MoveNext();
              Slice slice = slices.Current; ;
              int countToZero = slice.SliceSize;
              ulong totalSize = 0UL;

              BinaryReader reader = new BinaryReader(new MemoryStream(slice.SliceBytes));
              while (countToZero > 4)
              {
            ulong naluLen = BE32(reader.ReadUInt32());
            long nextPos = reader.BaseStream.Position + (long)naluLen;
            uint typ = reader.ReadByte();
            if ((naluLen > (ulong)countToZero) || (naluLen < 2))
              throw new Exception("Invalid video payload");

            // access unit delimiter (aud) always comes first and its size is not added to total size because
            // it is be added back to the payload.
            if ((typ & 0x1Fu) == 9u)
            {
              if (naluLen != 2)
            throw new Exception("Wrong nalu delimiter length");
              reader.ReadByte(); // discard (we don't need it here)
            }

            // if nalu type is Sequence Param Set, pick up width and height
            // also, build private codec data from this SPS
            // NOTE: it matters which video track this qbox belongs!
            if ((typ & 0x1Fu) == 7u)
            {
              byte[] buf = new byte[naluLen];
              reader.Read(buf, 1, (int)naluLen - 1);
              totalSize += (4 + naluLen);
              // parse the SPS bit stream, just to get the correct width and height of video.
              BitReader bitReader = new BitReader(new MemoryStream(buf));
              SequenceParameterSet sps = new SequenceParameterSet((uint)naluLen);
              sps.Read(bitReader);
              Width = (int)sps.Width;
              Height = (int)sps.Height;
              if (sps.VUIParametersPresent)
              {
            AspectRatioX = sps.vuiParams.AspectRatioX;
            AspectRatioY = sps.vuiParams.AspectRatioY;
              }
            }

            countToZero -= ((int)naluLen + 4);
            reader.BaseStream.Position = nextPos;
              }
        }
Exemplo n.º 16
0
        public RawVideoTrackInfo(IVideoTrack video)
        {
            Hints = video.ParentStream.Hints;
              string pcd = video.Codec.PrivateCodecData;
              this.CodecPrivateData = pcd;
              this.HandlerType = "Video";
              this.PayloadType = video.PayloadType;
              this.Height = video.FrameSize.Height;
              this.Width = video.FrameSize.Width;
              this.SourceTrackID = video.TrackID; // set source track ID here
              if (video.TrackFormat != null)
              {
            this.DurationIn100NanoSecs = video.TrackFormat.DurationIn100NanoSecs;
            this.TimeScale = video.TrackFormat.TimeScale;
              }
              else
              {
            this.DurationIn100NanoSecs = video.TrackDurationIn100NanoSecs;
            this.TimeScale = (uint)TimeSpan.TicksPerSecond;
              }

              if (video.IsAnamorphic)
              {
            AspectRatioX = 4;
            AspectRatioY = 3;
              }
              else
              {
            AspectRatioY = AspectRatioX = 1;
              }
        }
Exemplo n.º 17
0
 public void OnVideoTrackRemoved(IConversation conversation, IParticipant participant, IVideoTrack videoTrack)
 {
     VideoTrackRemovedHandler?.Invoke(conversation, participant, videoTrack);
 }