public void AllH264ResolutionsTest()
        {
            MediaConfigurationChangeLog changeLog = new MediaConfigurationChangeLog();

            RunTest(
                () =>
            {
                _videoForm.CheckActualResolution = true;
                _videoForm.VideoCodecName        = "H264";

                VideoEncoding encodingUnderTest = VideoEncoding.H264;
                GetResolutionsAvailable getResolutionsAvailable = (options) =>
                {
                    return(options.H264.ResolutionsAvailable);
                };

                AllResolutionsTest(changeLog, encodingUnderTest,
                                   getResolutionsAvailable);
            },
                () =>
            {
                try
                {
                    _videoForm.CheckActualResolution = false;
                    VideoCleanup2();
                }
                finally
                {
                    RestoreMediaConfiguration(changeLog);
                }
            }
                );
        }
Beispiel #2
0
        public void VideoEncoderConfigurationsJPEGResolutionTest()
        {
            MediaConfigurationChangeLog changeLog = new MediaConfigurationChangeLog();

            RunTest(
                () =>
            {
                _videoForm.CheckActualResolution = true;
                _videoForm.VideoCodecName        = "JPEG";

                VideoEncoding encodingUnderTest = VideoEncoding.JPEG;
                GetResolutionsAvailable getResolutionsAvailable = (options) =>
                {
                    return(options.JPEG.ResolutionsAvailable);
                };

                ResolutionTest(changeLog, encodingUnderTest,
                               (options) => { return(options.JPEG.ResolutionsAvailable); });
            },
                () =>
            {
                try
                {
                    _videoForm.CheckActualResolution = false;
                    VideoCleanup();
                }
                finally
                {
                    RestoreMediaConfiguration(changeLog);
                }
            }
                );
        }
Beispiel #3
0
        private string ProcessVideoLog(string log, VideoFrame frame, VideoEncoding encoding, ConnectionInfo remoteConnectionInfo)
        {
            var rawBuffer        = GetRawVideoBuffer(frame);
            var compressedBuffer = GetCompressedVideoBuffer(frame);

            return(log
                   .Replace("{footprint}", compressedBuffer.Footprint.ToString())
                   .Replace("{width}", rawBuffer.Width.ToString())
                   .Replace("{height}", rawBuffer.Height.ToString())
                   .Replace("{mediaStreamId}", frame.Mid)
                   .Replace("{rtpStreamId}", frame.RtpStreamId)
                   .Replace("{sequenceNumber}", frame.SequenceNumber.ToString())
                   .Replace("{synchronizationSource}", frame.SynchronizationSource.ToString())
                   .Replace("{systemTimestamp}", frame.SystemTimestamp.ToString())
                   .Replace("{timestamp}", frame.Timestamp.ToString())
                   .Replace("{encoding}", encoding.ToString())
                   .Replace("{applicationId}", Options.ApplicationId)
                   .Replace("{channelId}", Options.ChannelId)
                   .Replace("{userId}", remoteConnectionInfo.UserId)
                   .Replace("{userAlias}", remoteConnectionInfo.UserAlias)
                   .Replace("{deviceId}", remoteConnectionInfo.DeviceId)
                   .Replace("{deviceAlias}", remoteConnectionInfo.DeviceAlias)
                   .Replace("{clientId}", remoteConnectionInfo.ClientId)
                   .Replace("{clientTag}", remoteConnectionInfo.ClientTag)
                   .Replace("{connectionId}", remoteConnectionInfo.Id)
                   .Replace("{connectionTag}", remoteConnectionInfo.Tag)
                   .Replace("{mediaId}", remoteConnectionInfo.MediaId));
        }
Beispiel #4
0
        public static string Command(this VideoEncoding value)
        {
            FieldInfo        field = value.GetType().GetField(value.ToString());
            CommandAttribute attribute
                = Attribute.GetCustomAttribute(field, typeof(CommandAttribute))
                  as CommandAttribute;

            return(attribute == null?value.ToString() : attribute.Command);
        }
Beispiel #5
0
        public static string LongName(this VideoEncoding value)
        {
            FieldInfo         field = value.GetType().GetField(value.ToString());
            LongNameAttribute attribute
                = Attribute.GetCustomAttribute(field, typeof(LongNameAttribute))
                  as LongNameAttribute;

            return(attribute == null?value.ToString() : attribute.LongName);
        }
Beispiel #6
0
 public void OnVideoEncoding(Video video)
 {
     if (VideoEncoding != null)
     {
         VideoEncoding.Invoke(this, new VideoEventArgs {
             Video = video
         });
     }
 }
Beispiel #7
0
 MediaUri GetVideoMediaUri(TestVideoEncoderConfigurationOptions test,
                           CopyVideoEncoderConfiguration copyMethod,
                           string profileRequirementsDescription,
                           VideoEncoding encoding,
                           TransportProtocol protocol,
                           StreamType streamType)
 {
     return(GetVideoMediaUri(test, copyMethod, profileRequirementsDescription, encoding, protocol, streamType, null));
 }
        /// <summary>
        /// Start recording a new video.
        /// </summary>
        /// <param name="filename">The file path.</param>
        /// <param name="encoding">The video encoding. Mp4 by default.</param>
        /// <returns>The <see cref="VideoCaptureResult"/> struct.</returns>
        public async Task <VideoCaptureResult> StartRecordingAsync(string filename, VideoEncoding encoding)
        {
            if (string.IsNullOrEmpty(filename))
            {
                throw new ArgumentException("The filename cannot be empty or null.");
            }

            if (!this.isInitialized)
            {
                Debug.WriteLine("First you need to initialize the videocapturemanager.");
                return(new VideoCaptureResult());
            }

            if (this.IsRecording)
            {
                Debug.WriteLine("VideoCapture is already recording. Call Stop before Start again.");
                return(new VideoCaptureResult(false, string.Empty));
            }

            try
            {
                Debug.WriteLine("Recording video...");
                var name = filename + ".mp4";
                var file = await this.captureFolder.CreateFileAsync(name, CreationCollisionOption.GenerateUniqueName);

                MediaEncodingProfile encodingProfile;

                switch (encoding)
                {
                case VideoEncoding.AVI:
                    encodingProfile = MediaEncodingProfile.CreateAvi(VideoEncodingQuality.Auto);
                    break;

                case VideoEncoding.WMV:
                    encodingProfile = MediaEncodingProfile.CreateWmv(VideoEncodingQuality.Auto);
                    break;

                default:
                case VideoEncoding.MP4:
                    encodingProfile = MediaEncodingProfile.CreateMp4(VideoEncodingQuality.Auto);
                    break;
                }

                await this.mediaCapture.StartRecordToStorageFileAsync(encodingProfile, file);

                this.isRecording = true;

                Debug.WriteLine("Video saved.");
                return(new VideoCaptureResult(true, file.Path));
            }
            catch (Exception ex)
            {
                Debug.WriteLine("Exception when taking a photo: " + ex.ToString());
                return(new VideoCaptureResult(false, string.Empty));
            }
        }
 public void RevertChanges()
 {
     this.current.resolution       = this.origin.resolution;
     this.current.frameRate        = this.origin.frameRate;
     this.current.encodingInterval = this.origin.encodingInterval;
     this.current.quality          = this.origin.quality;
     this.current.bitrate          = this.origin.bitrate;
     this.current.govLength        = this.origin.govLength;
     this.current.encoder          = this.origin.encoder;
 }
        /// <summary>
        /// Start recording a new video.
        /// </summary>
        /// <param name="filename">The file path.</param>
        /// <param name="encoding">The video encoding. Mp4 by default.</param>
        /// <returns>The <see cref="VideoCaptureResult"/> struct.</returns>
        public async Task <VideoCaptureResult> StartRecordingAsync(string filename, VideoEncoding encoding = VideoEncoding.MP4)
        {
            VideoCaptureResult result = default(VideoCaptureResult);

            if (this.videoCaptureManager != null)
            {
                result = await this.videoCaptureManager.StartRecordingAsync(filename, encoding);
            }

            return(result);
        }
Beispiel #11
0
 /// <summary>
 /// Initializes an instance of <see cref="MuxedStreamInfo"/>.
 /// </summary>
 public MuxedStreamInfo(int itag, string url, Container container, long size, AudioEncoding audioEncoding,
                        VideoEncoding videoEncoding, string videoQualityLabel, VideoQuality videoQuality,
                        VideoResolution resolution)
     : base(itag, url, container, size)
 {
     AudioEncoding     = audioEncoding;
     VideoEncoding     = videoEncoding;
     VideoQualityLabel = videoQualityLabel;
     VideoQuality      = videoQuality;
     Resolution        = resolution;
 }
        /// <summary>
        /// Start recording a new video and save it as stream.
        /// </summary>
        /// <param name="encoding">The video encoding. Mp4 by default.</param>
        /// <returns>The <see cref="VideoCaptureStreamResult"/> struct.</returns>
        public async Task <VideoCaptureStreamResult> StartRecordingInStreamAsync(VideoEncoding encoding = VideoEncoding.MP4)
        {
            VideoCaptureStreamResult result = default(VideoCaptureStreamResult);

            if (this.videoCaptureManager != null)
            {
                result = await this.videoCaptureManager.StartRecordingToStreamAsync(encoding);
            }

            return(result);
        }
Beispiel #13
0
 MediaUri GetAudioVideoMediaUri(TestVideoEncoderConfigurationOptions videoTest,
                                string videoCodec,
                                VideoEncoding encoding,
                                TestAudioEncoderConfigurationOptions audioTest,
                                string audioCodec,
                                AudioEncoding audioEncoding,
                                StreamType streamType,
                                TransportProtocol protocol)
 {
     return(GetAudioVideoMediaUri(
                videoTest, videoCodec, encoding, audioTest, audioCodec, audioEncoding, streamType, protocol, null));
 }
 /// <summary>
 /// Initializes an instance of <see cref="VideoStreamInfo"/>.
 /// </summary>
 public VideoStreamInfo(int itag, string url, Container container, long size, long bitrate,
                        VideoEncoding videoEncoding, string videoQualityLabel, VideoQuality videoQuality,
                        VideoResolution resolution, int framerate)
     : base(itag, url, container, size)
 {
     Bitrate           = bitrate.GuardNotNegative(nameof(bitrate));
     VideoEncoding     = videoEncoding;
     VideoQualityLabel = videoQualityLabel.GuardNotNull(nameof(videoQualityLabel));
     VideoQuality      = videoQuality;
     Resolution        = resolution;
     Framerate         = framerate.GuardNotNegative(nameof(framerate));
 }
Beispiel #15
0
 /// <summary>
 /// Initializes an instance of <see cref="VideoStreamInfo"/>.
 /// </summary>
 public VideoStreamInfo(int itag, string url, Container container, long size, long bitrate,
                        VideoEncoding videoEncoding, string videoQualityLabel, VideoQuality videoQuality,
                        VideoResolution resolution, int framerate)
     : base(itag, url, container, size)
 {
     Bitrate           = bitrate;
     VideoEncoding     = videoEncoding;
     VideoQualityLabel = videoQualityLabel;
     VideoQuality      = videoQuality;
     Resolution        = resolution;
     Framerate         = framerate;
 }
        /// <summary>
        /// Start recording a new video and save it as stream.
        /// </summary>
        /// <param name="encoding">The video encoding. Mp4 by default.</param>
        /// <returns>The <see cref="VideoCaptureStreamResult"/> struct.</returns>
        public async Task <VideoCaptureStreamResult> StartRecordingToStreamAsync(VideoEncoding encoding)
        {
            if (!this.isInitialized)
            {
                Debug.WriteLine("First you need to initialize the videocapturemanager.");
                return(new VideoCaptureStreamResult());
            }

            if (this.IsRecording)
            {
                Debug.WriteLine("VideoCapture is already recording. Call Stop before Start again.");
                return(new VideoCaptureStreamResult(false, Stream.Null));
            }

            try
            {
                Debug.WriteLine("Recording video...");
                MediaEncodingProfile encodingProfile;

                switch (encoding)
                {
                case VideoEncoding.AVI:
                    encodingProfile = MediaEncodingProfile.CreateMp4(VideoEncodingQuality.Auto);
                    break;

                case VideoEncoding.WMV:
                    encodingProfile = MediaEncodingProfile.CreateWmv(VideoEncodingQuality.Auto);
                    break;

                default:
                case VideoEncoding.MP4:
                    encodingProfile = MediaEncodingProfile.CreateAvi(VideoEncodingQuality.Auto);
                    break;
                }

                var stream = new InMemoryRandomAccessStream();
                await this.mediaCapture.StartRecordToStreamAsync(encodingProfile, stream);

                this.isRecording = true;

                return(new VideoCaptureStreamResult(true, stream.AsStream()));
            }
            catch (Exception ex)
            {
                Debug.WriteLine("Exception when recording a video: " + ex.ToString());
                return(new VideoCaptureStreamResult(false, Stream.Null));
            }
        }
Beispiel #17
0
        public override string ToString()
        {
            StringBuilder sb = new StringBuilder();

            for (int i = 0; i < _actions.Count; i++)
            {
                sb.AppendFormat(
                    "{0} {1} ",
                    _actions[i].Item1.Command(),
                    _actions[i].Item1 == Parameter.MISC_FILTER_COMPLEX ? FilterComplex.ToString().EncloseQMarks() : _actions[i].Item2
                    );
            }
            sb.Append(VideoEncoding.Command()).Append(" ");
            sb.Append(AudioEncoding.Command()).Append(" ");
            return(sb.ToString());
        }
            public static Model Create(
                string profToken,
                VideoResolution resolution,
                int maxFrameRate,
                int minFrameRate,
                double frameRate,
                int maxEncodingInterval,
                int minEncodingInterval,
                int encodingInterval,
                int maxQuality,
                int minQuality,
                float quality,
                int maxBitrate,
                int minBitrate,
                double bitrate,
                int maxGovLength,
                int minGovLength,
                int govLength,
                VideoEncoding encoder,
                VideoEncoderConfigurationOptions encoderOptions
                )
            {
                var _this = new Model();

                _this.profToken               = profToken;
                _this.maxFrameRate            = maxFrameRate;
                _this.minFrameRate            = minFrameRate;
                _this.maxEncodingInterval     = maxEncodingInterval;
                _this.minEncodingInterval     = minEncodingInterval;
                _this.maxQuality              = maxQuality;
                _this.minQuality              = minQuality;
                _this.maxBitrate              = maxBitrate;
                _this.minBitrate              = minBitrate;
                _this.maxGovLength            = maxGovLength;
                _this.minGovLength            = minGovLength;
                _this.encoderOptions          = encoderOptions;
                _this.origin.resolution       = resolution;
                _this.origin.frameRate        = frameRate;
                _this.origin.encodingInterval = encodingInterval;
                _this.origin.quality          = quality;
                _this.origin.bitrate          = bitrate;
                _this.origin.govLength        = govLength;
                _this.origin.encoder          = encoder;
                _this.RevertChanges();

                return(_this);
            }
        public static VideoDecoder CreateDecoder(this VideoEncoding encoding)
        {
            switch (encoding)
            {
            case VideoEncoding.VP8:
                return(new Vp8.Decoder());

            case VideoEncoding.VP9:
                return(new Vp9.Decoder());

            case VideoEncoding.H264:
                return(new OpenH264.Decoder());

            default:
                throw new InvalidOperationException($"Unexpected video encoding '{encoding}'.");
            }
        }
        public static VideoCodec ToCodec(this VideoEncoding encoding)
        {
            switch (encoding)
            {
            case VideoEncoding.VP8:
                return(VideoCodec.VP8);

            case VideoEncoding.VP9:
                return(VideoCodec.VP9);

            case VideoEncoding.H264:
                return(VideoCodec.H264);

            default:
                throw new InvalidOperationException($"Unexpected video encoding '{encoding}'.");
            }
        }
        public static VideoPipe CreateDepacketizer(this VideoEncoding encoding)
        {
            switch (encoding)
            {
            case VideoEncoding.VP8:
                return(new Vp8.Depacketizer());

            case VideoEncoding.VP9:
                return(new Vp9.Depacketizer());

            case VideoEncoding.H264:
                return(new H264.Depacketizer(H264.PacketizationMode.Default));

            default:
                throw new InvalidOperationException($"Unexpected video encoding '{encoding}'.");
            }
        }
Beispiel #22
0
        // Annex A.14
        protected Profile SelectVideoProfile(
            VideoEncoding videoEncoding,
            string profileRequirementsDescription,
            TestVideoEncoderConfigurationOptions testVideo,
            ref VideoEncoderConfigurationOptions videoOptions)
        {
            Profile[] profiles = GetProfiles();
            Profile   profile  = null;
            VideoEncoderConfigurationOptions videoOptionsTmp = null;

            RunStep(() =>
            {
                foreach (Profile p in profiles)
                {
                    LogStepEvent(string.Format("Check if {0} profile supports {1} Video encoder configuration",
                                               p.Name, profileRequirementsDescription));
                    if (p.VideoEncoderConfiguration != null)
                    {
                        videoOptionsTmp =
                            Client.GetVideoEncoderConfigurationOptions(p.VideoEncoderConfiguration.token, p.token);

                        DoRequestDelay();

                        if (testVideo(videoOptionsTmp))
                        {
                            profile = p;
                            break;
                        }
                    }
                }
            },
                    string.Format("Select profile with {0} Video encoder configuration", profileRequirementsDescription));

            Assert(profile != null,
                   string.Format("Profile with {0} Video encoder configuration not found", profileRequirementsDescription),
                   "Check if required profile found");

            profile.VideoEncoderConfiguration.Encoding = videoEncoding;
            videoOptions = videoOptionsTmp;

            AdjustVideoEncoderConfiguration(videoEncoding, profile.VideoEncoderConfiguration, videoOptions);

            return(profile);
        }
        public static VideoFormat CreateFormat(this VideoEncoding encoding, bool isPacketized = false)
        {
            switch (encoding)
            {
            case VideoEncoding.VP8:
                return(new Vp8.Format {
                    IsPacketized = isPacketized
                });

            case VideoEncoding.VP9:
                return(new Vp9.Format {
                    IsPacketized = isPacketized
                });

            case VideoEncoding.H264:
                return(new H264.Format(H264.ProfileLevelId.Default, H264.PacketizationMode.Default)
                {
                    IsPacketized = isPacketized
                });

            default:
                throw new InvalidOperationException($"Unexpected video encoding '{encoding}'.");
            }
        }
Beispiel #24
0
        MediaUri GetVideoMediaUri(TestVideoEncoderConfigurationOptions test,
                                  CopyVideoEncoderConfiguration copyMethod,
                                  string profileRequirementsDescription,
                                  VideoEncoding encoding,
                                  TransportProtocol protocol,
                                  StreamType streamType,
                                  IPType?multicastAddressType)
        {
            Profile profile = null;
            VideoEncoderConfigurationOptions options = null;

            Profile[] profiles = GetProfiles();

            RunStep(() =>
            {
                foreach (Profile p in profiles)
                {
                    LogStepEvent(string.Format("Check if {0} profile supports {1} Video encoder configuration",
                                               p.Name, profileRequirementsDescription));
                    if (p.VideoEncoderConfiguration != null)
                    {
                        options =
                            Client.GetVideoEncoderConfigurationOptions(p.VideoEncoderConfiguration.token, p.token);

                        DoRequestDelay();

                        if (test(options))
                        {
                            profile = p;
                            break;
                        }
                    }
                }
            },
                    string.Format("Select profile with {0} Video encoder configuration", profileRequirementsDescription));

            Assert(profile != null,
                   string.Format("Profile with {0} Video encoder configuration not found", profileRequirementsDescription),
                   "Check if required profile found");

            profile.VideoEncoderConfiguration.Encoding = encoding;
            copyMethod(profile, options);
            // fix for Panasonic
            if (encoding == VideoEncoding.JPEG)
            {
                profile.VideoEncoderConfiguration.MPEG4 = null;
                profile.VideoEncoderConfiguration.H264  = null;

                // support for extensions (bitrate limits)
                if (options.Extension != null)
                {
                    if (options.Extension.JPEG != null)
                    {
                        if (options.Extension.JPEG.BitrateRange != null)
                        {
                            if (profile.VideoEncoderConfiguration.RateControl.BitrateLimit < options.Extension.JPEG.BitrateRange.Min)
                            {
                                profile.VideoEncoderConfiguration.RateControl.BitrateLimit = options.Extension.JPEG.BitrateRange.Min;
                            }
                            if (profile.VideoEncoderConfiguration.RateControl.BitrateLimit > options.Extension.JPEG.BitrateRange.Max)
                            {
                                profile.VideoEncoderConfiguration.RateControl.BitrateLimit = options.Extension.JPEG.BitrateRange.Max;
                            }
                        }
                    }
                }
            }
            if (encoding == VideoEncoding.MPEG4)
            {
                profile.VideoEncoderConfiguration.H264 = null;

                // support for extensions (bitrate limits)
                if (options.Extension != null)
                {
                    if (options.Extension.MPEG4 != null)
                    {
                        if (options.Extension.MPEG4.BitrateRange != null)
                        {
                            if (profile.VideoEncoderConfiguration.RateControl.BitrateLimit < options.Extension.MPEG4.BitrateRange.Min)
                            {
                                profile.VideoEncoderConfiguration.RateControl.BitrateLimit = options.Extension.MPEG4.BitrateRange.Min;
                            }
                            if (profile.VideoEncoderConfiguration.RateControl.BitrateLimit > options.Extension.MPEG4.BitrateRange.Max)
                            {
                                profile.VideoEncoderConfiguration.RateControl.BitrateLimit = options.Extension.MPEG4.BitrateRange.Max;
                            }
                        }
                    }
                }
            }
            if (encoding == VideoEncoding.H264)
            {
                profile.VideoEncoderConfiguration.MPEG4 = null;

                // support for extensions (bitrate limits)
                if (options.Extension != null)
                {
                    if (options.Extension.H264 != null)
                    {
                        if (options.Extension.H264.BitrateRange != null)
                        {
                            if (profile.VideoEncoderConfiguration.RateControl.BitrateLimit < options.Extension.H264.BitrateRange.Min)
                            {
                                profile.VideoEncoderConfiguration.RateControl.BitrateLimit = options.Extension.H264.BitrateRange.Min;
                            }
                            if (profile.VideoEncoderConfiguration.RateControl.BitrateLimit > options.Extension.H264.BitrateRange.Max)
                            {
                                profile.VideoEncoderConfiguration.RateControl.BitrateLimit = options.Extension.H264.BitrateRange.Max;
                            }
                        }
                    }
                }
            }

            if (multicastAddressType.HasValue)
            {
                SetMulticastSettings(profile, true, false, multicastAddressType.Value);
            }

            SetVideoEncoderConfiguration(profile.VideoEncoderConfiguration, false, multicastAddressType.HasValue);

            StreamSetup streamSetup = new StreamSetup();

            streamSetup.Transport          = new Transport();
            streamSetup.Transport.Protocol = protocol;
            streamSetup.Stream             = streamType;

            UsedProfileToken = profile.token;
            MediaUri streamUri = GetStreamUri(streamSetup, profile.token);

            AdjustVideo(protocol, streamType, streamUri, profile.VideoEncoderConfiguration);

            return(streamUri);
        }
Beispiel #25
0
        MediaUri GetAudioVideoMediaUri(TestVideoEncoderConfigurationOptions videoTest,
                                       string videoCodec,
                                       VideoEncoding encoding,
                                       TestAudioEncoderConfigurationOptions audioTest,
                                       string audioCodec,
                                       AudioEncoding audioEncoding,
                                       StreamType streamType,
                                       TransportProtocol protocol,
                                       IPType?multicastAddressType)
        {
            Profile[] profiles = GetProfiles();

            Profile profile = null;
            VideoEncoderConfigurationOptions options = null;
            int bitrate    = 0;
            int sampleRate = 0;

            RunStep(() =>
            {
                foreach (Profile p in profiles)
                {
                    LogStepEvent(string.Format("Check if {0} profile satisfies current needs", p.Name));

                    if (p.VideoEncoderConfiguration != null)
                    {
                        LogStepEvent("GetVideoEncoderConfigurationOptions");
                        VideoEncoderConfigurationOptions videoOptions =
                            Client.GetVideoEncoderConfigurationOptions(p.VideoEncoderConfiguration.token, p.token);
                        DoRequestDelay();

                        if (videoTest(videoOptions))
                        {
                            // Video configuration OK  - configure Audio, if needed.
                            options = videoOptions;

                            if (p.AudioEncoderConfiguration != null && p.AudioSourceConfiguration != null)
                            {
                                LogStepEvent("GetAudioEncoderConfigurationOptions");
                                AudioEncoderConfigurationOptions audioOptions =
                                    Client.GetAudioEncoderConfigurationOptions(p.AudioEncoderConfiguration.token,
                                                                               p.token);

                                DoRequestDelay();

                                if (audioTest(audioOptions))
                                {
                                    profile = p;
                                    LogStepEvent("OK - profile found");

                                    // find nearest bitrate and samplerate
                                    bitrate = FindNearestAudioBitrate(p.AudioEncoderConfiguration.Bitrate, audioEncoding,
                                                                      audioOptions);
                                    sampleRate = FindNearestAudioSamplerate(p.AudioEncoderConfiguration.SampleRate,
                                                                            audioEncoding, audioOptions);
                                    break;
                                }
                            }
                            else
                            {
                                LogStepEvent("GetAudioEncoderConfigurations");
                                AudioEncoderConfiguration[] audioEncoderConfigurations =
                                    Client.GetAudioEncoderConfigurations();

                                DoRequestDelay();

                                bool audioEncoderConfigurationFound = false;

                                foreach (AudioEncoderConfiguration configuration in audioEncoderConfigurations)
                                {
                                    LogStepEvent("GetAudioEncoderConfigurationOptions");
                                    AudioEncoderConfigurationOptions audioOptions =
                                        Client.GetAudioEncoderConfigurationOptions(configuration.token, p.token);

                                    DoRequestDelay();

                                    if (audioTest(audioOptions))
                                    {
                                        if (p.AudioSourceConfiguration == null)
                                        {
                                            AudioSourceConfiguration[] audioSourceConfigurations = Client.GetAudioSourceConfigurations();

                                            DoRequestDelay();

                                            if (audioSourceConfigurations.Length > 0)
                                            {
                                                LogStepEvent("AddAudioSourceConfiguration");
                                                Client.AddAudioSourceConfiguration(p.token, audioSourceConfigurations[0].token);
                                                DoRequestDelay();
                                            }
                                            else
                                            {
                                                throw new DutPropertiesException("Audio Source Configurations not found");
                                            }
                                        }

                                        bitrate = FindNearestAudioBitrate(configuration.Bitrate, audioEncoding,
                                                                          audioOptions);
                                        sampleRate = FindNearestAudioSamplerate(configuration.SampleRate, audioEncoding,
                                                                                audioOptions);

                                        LogStepEvent("AddAudioEncoderConfiguration");
                                        Client.AddAudioEncoderConfiguration(p.token, configuration.token);
                                        DoRequestDelay();

                                        p.AudioEncoderConfiguration = configuration;

                                        profile = p;

                                        LogStepEvent(string.Format("Add Audio configuration to the {0} profile - OK", profile.Name));

                                        audioEncoderConfigurationFound = true;
                                        break;
                                    }
                                }

                                if (!audioEncoderConfigurationFound)
                                {
                                    throw new DutPropertiesException("Audio Encoder Configuration with required properties not found");
                                }
                            }
                        }
                    }
                }

                if (profile == null)
                {
                    throw new DutPropertiesException("Respective profile cannot be found or created");
                }
            },
                    string.Format("Select or create profile with {0} Video encoder configuration and {1} Audio encoder configuration",
                                  videoCodec,
                                  audioCodec));

            // profile found

            profile.VideoEncoderConfiguration.Encoding = encoding;

            // support for extensions (bitrate limits)

            // fix for Panasonic
            if (encoding == VideoEncoding.JPEG)
            {
                profile.VideoEncoderConfiguration.MPEG4 = null;
                profile.VideoEncoderConfiguration.H264  = null;

                // support for extensions (bitrate limits)
                if (options.Extension != null)
                {
                    if (options.Extension.JPEG != null)
                    {
                        if (options.Extension.JPEG.BitrateRange != null)
                        {
                            if (profile.VideoEncoderConfiguration.RateControl.BitrateLimit < options.Extension.JPEG.BitrateRange.Min)
                            {
                                profile.VideoEncoderConfiguration.RateControl.BitrateLimit = options.Extension.JPEG.BitrateRange.Min;
                            }
                            if (profile.VideoEncoderConfiguration.RateControl.BitrateLimit > options.Extension.JPEG.BitrateRange.Max)
                            {
                                profile.VideoEncoderConfiguration.RateControl.BitrateLimit = options.Extension.JPEG.BitrateRange.Max;
                            }
                        }
                    }
                }
            }
            if (encoding == VideoEncoding.MPEG4)
            {
                profile.VideoEncoderConfiguration.H264 = null;

                // support for extensions (bitrate limits)
                if (options.Extension != null)
                {
                    if (options.Extension.MPEG4 != null)
                    {
                        if (options.Extension.MPEG4.BitrateRange != null)
                        {
                            if (profile.VideoEncoderConfiguration.RateControl.BitrateLimit < options.Extension.MPEG4.BitrateRange.Min)
                            {
                                profile.VideoEncoderConfiguration.RateControl.BitrateLimit = options.Extension.MPEG4.BitrateRange.Min;
                            }
                            if (profile.VideoEncoderConfiguration.RateControl.BitrateLimit > options.Extension.MPEG4.BitrateRange.Max)
                            {
                                profile.VideoEncoderConfiguration.RateControl.BitrateLimit = options.Extension.MPEG4.BitrateRange.Max;
                            }
                        }
                    }
                }
            }
            if (encoding == VideoEncoding.H264)
            {
                profile.VideoEncoderConfiguration.MPEG4 = null;

                // support for extensions (bitrate limits)
                if (options.Extension != null)
                {
                    if (options.Extension.H264 != null)
                    {
                        if (options.Extension.H264.BitrateRange != null)
                        {
                            if (profile.VideoEncoderConfiguration.RateControl.BitrateLimit < options.Extension.H264.BitrateRange.Min)
                            {
                                profile.VideoEncoderConfiguration.RateControl.BitrateLimit = options.Extension.H264.BitrateRange.Min;
                            }
                            if (profile.VideoEncoderConfiguration.RateControl.BitrateLimit > options.Extension.H264.BitrateRange.Max)
                            {
                                profile.VideoEncoderConfiguration.RateControl.BitrateLimit = options.Extension.H264.BitrateRange.Max;
                            }
                        }
                    }
                }
            }

            if (multicastAddressType.HasValue)
            {
                SetMulticastSettings(profile, true, true, multicastAddressType.Value);
            }

            SetVideoEncoderConfiguration(profile.VideoEncoderConfiguration, false, multicastAddressType.HasValue);

            profile.AudioEncoderConfiguration.Encoding   = audioEncoding;
            profile.AudioEncoderConfiguration.Bitrate    = bitrate;
            profile.AudioEncoderConfiguration.SampleRate = sampleRate;

            SetAudioEncoderConfiguration(profile.AudioEncoderConfiguration, false, multicastAddressType.HasValue);

            StreamSetup streamSetup = new StreamSetup();

            streamSetup.Transport          = new Transport();
            streamSetup.Transport.Protocol = protocol;
            streamSetup.Stream             = streamType;

            UsedProfileToken = profile.token;
            MediaUri streamUri = GetStreamUri(streamSetup, profile.token);

            AdjustVideo(protocol, streamType, streamUri, profile.VideoEncoderConfiguration);

            return(streamUri);
        }
        /// <summary>
        /// Start recording a new video and save it as stream.
        /// </summary>
        /// <param name="encoding">The video encoding. Mp4 by default.</param>
        /// <returns>The <see cref="VideoCaptureStreamResult"/> struct.</returns>
        public async Task<VideoCaptureStreamResult> StartRecordingToStreamAsync(VideoEncoding encoding)
        {
            if (!this.isInitialized)
            {
                Debug.WriteLine("First you need to initialize the videocapturemanager.");
                return new VideoCaptureStreamResult();
            }

            if (this.IsRecording)
            {
                Debug.WriteLine("VideoCapture is already recording. Call Stop before Start again.");
                return new VideoCaptureStreamResult(false, Stream.Null);
            }

            try
            {
                Debug.WriteLine("Recording video...");
                MediaEncodingProfile encodingProfile;

                switch (encoding)
                {
                    case VideoEncoding.AVI:
                        encodingProfile = MediaEncodingProfile.CreateMp4(VideoEncodingQuality.Auto);
                        break;
                    case VideoEncoding.WMV:
                        encodingProfile = MediaEncodingProfile.CreateWmv(VideoEncodingQuality.Auto);
                        break;
                    default:
                    case VideoEncoding.MP4:
                        encodingProfile = MediaEncodingProfile.CreateAvi(VideoEncodingQuality.Auto);
                        break;
                }

                var stream = new InMemoryRandomAccessStream();
                await this.mediaCapture.StartRecordToStreamAsync(encodingProfile, stream);
                this.isRecording = true;

                return new VideoCaptureStreamResult(true, stream.AsStream());
            }
            catch (Exception ex)
            {
                Debug.WriteLine("Exception when recording a video: " + ex.ToString());
                return new VideoCaptureStreamResult(false, Stream.Null);
            }
        }
        /// <summary>
        /// Start recording a new video.
        /// </summary>
        /// <param name="filename">The file path.</param>
        /// <param name="encoding">The video encoding. Mp4 by default.</param>
        /// <returns>The <see cref="VideoCaptureResult"/> struct.</returns>
        public async Task<VideoCaptureResult> StartRecordingAsync(string filename, VideoEncoding encoding)
        {
            if (string.IsNullOrEmpty(filename))
            {
                throw new ArgumentException("The filename cannot be empty or null.");
            }

            if (!this.isInitialized)
            {
                Debug.WriteLine("First you need to initialize the videocapturemanager.");
                return new VideoCaptureResult();
            }

            if (this.IsRecording)
            {
                Debug.WriteLine("VideoCapture is already recording. Call Stop before Start again.");
                return new VideoCaptureResult(false, string.Empty);
            }

            try
            {
                Debug.WriteLine("Recording video...");
                var name = filename + ".mp4";
                var file = await this.captureFolder.CreateFileAsync(name, CreationCollisionOption.GenerateUniqueName);
                MediaEncodingProfile encodingProfile;

                switch (encoding)
                {
                    case VideoEncoding.AVI:
                        encodingProfile = MediaEncodingProfile.CreateMp4(VideoEncodingQuality.Auto);
                        break;
                    case VideoEncoding.WMV:
                        encodingProfile = MediaEncodingProfile.CreateWmv(VideoEncodingQuality.Auto);
                        break;
                    default:
                    case VideoEncoding.MP4:
                        encodingProfile = MediaEncodingProfile.CreateAvi(VideoEncodingQuality.Auto);
                        break;
                }
                
                await this.mediaCapture.StartRecordToStorageFileAsync(encodingProfile, file);
                this.isRecording = true;

                Debug.WriteLine("Video saved.");
                return new VideoCaptureResult(true, file.Path);
            }
            catch (Exception ex)
            {
                Debug.WriteLine("Exception when taking a photo: " + ex.ToString());
                return new VideoCaptureResult(false, string.Empty);
            }
        }
Beispiel #28
0
 protected virtual void OnVideoEncoding()
 {
     VideoEncoding?.Invoke(this, EventArgs.Empty);
 }
        protected void AllResolutionsTest(MediaConfigurationChangeLog changeLog,
                                          VideoEncoding encodingUnderTest,
                                          GetResolutionsAvailable getResolutionsAvailable)
        {
            //3.	ONVIF Client invokes GetVideoEncoderConfigurationsRequest message to retrieve
            // video configuration list.
            //4.	Verify the GetVideoEncoderConfigurationsResponse message.

            VideoEncoderConfiguration[] encoderConfigurations = GetVideoEncoderConfigurations();
            CheckVideoEncoderConfigurationsList(encoderConfigurations);
            bool NoProfilesForEncoding = true;

            foreach (VideoEncoderConfiguration encoderConfig in encoderConfigurations)
            {
                //5.	Find or create media profile with Video Source Configuration and
                // Video Encoder Configuration with token VECToken1 and supporting of JPEG encoding,
                // where VECToken1 is first video encoder configuration token from
                // GetVideoEncoderConfigurationsResponse message (see Annex A.14). If it is not possible
                // skip steps 6-26 and go to the step 27.
                Profile profile = GetProfileForSpecificConfigurationAndCodec(
                    encoderConfig.token, encodingUnderTest, changeLog);

                if (profile == null)
                {
                    continue;
                }
                if (NoProfilesForEncoding)
                {
                    NoProfilesForEncoding = false;
                }

                //6.	ONVIF Client invokes GetVideoEncoderConfigurationOptionsRequest message
                // (ProfileToken = “Profile1”, where “Profile1” is profile token from the step 5)
                // to get video encoder configuration options.
                //7.	Verify the GetVideoEncoderConfigurationOptionsResponse message from the DUT.
                VideoEncoderConfigurationOptions options = GetVideoEncoderConfigurationOptions(
                    null, profile.token);

                Assert(CheckVideoSupport(options, encodingUnderTest),
                       string.Format("{0} encoding is not compatible with current configuration",
                                     GetVideoCodecName(encodingUnderTest)),
                       string.Format("Validate {0} options", GetVideoCodecName(encodingUnderTest)));

                VideoResolution[] ResolutionsAvailable = getResolutionsAvailable(options);

                VideoEncoderConfiguration unchanged = Utils.CopyMaker.CreateCopy(encoderConfig);
                bool addToChangeLog = true;

                foreach (VideoResolution resolution in ResolutionsAvailable)
                {
                    //8.	ONVIF Client invokes SetVideoEncoderConfigurationRequest message
                    // (ConfigurationToken = VECToken1, Resolution = [Width1, Height1], Encoding = JPEG,
                    // where [Width1, Height1] is the first resolution
                    // from the Options.JPEG.ResolutionsAvailable) to change video encoder configuration.
                    //9.	Verify the SetVideoEncoderConfigurationResponse message from the DUT.

                    encoderConfig.Encoding   = encodingUnderTest;
                    encoderConfig.Resolution = resolution;

                    AdjustVideoEncoderConfiguration(encodingUnderTest, encoderConfig, options);
                    switch (encodingUnderTest)
                    {
                    case VideoEncoding.MPEG4:
                        AdjustMpeg4VideoEncoderConfiguration(encoderConfig, options, false);
                        break;

                    case VideoEncoding.H264:
                        AdjustH264VideoEncoderConfiguration(encoderConfig, options, false);
                        break;
                    }

                    SetVideoEncoderConfiguration(encoderConfig, false);

                    if (addToChangeLog)
                    {
                        changeLog.ModifiedVideoEncoderConfigurations.Add(unchanged);
                        addToChangeLog = false;
                    }


                    //10.	ONVIF Client invokes GetVideoEncoderConfigurationRequest message
                    // (ConfigurationToken = VECToken1) to get video encoder configuration.
                    //11.	Verify the GetVideoEncoderConfigurationResponse message
                    //(ConfigurationToken = VECToken1, Resolution = [Width1, Height1], Encoding = JPEG)
                    // from the DUT. Check that new setting for Resolution and Encoding was applied.

                    VideoEncoderConfiguration actual = GetVideoEncoderConfiguration(unchanged.token);

                    string reason = string.Empty;
                    // check encoding and resolutions
                    bool ok = ConfigurationValid(actual, encodingUnderTest, resolution, out reason);
                    Assert(ok, reason, "Check that the DUT accepted values passed");

                    //12.	ONVIF Client invokes GetStreamUriRequest message (Profile Token, RTP-Unicast,
                    // UDP transport) to retrieve media stream URI for the selected media profile.
                    //13.	DUT sends RTSP URI and parameters defining the lifetime of the URI like
                    // ValidUntilConnect, ValidUntilReboot and Timeout in the GetStreamUriResponse message.

                    StreamSetup streamSetup = new StreamSetup();
                    streamSetup.Transport          = new Transport();
                    streamSetup.Transport.Protocol = TransportProtocol.UDP;
                    streamSetup.Stream             = StreamType.RTPUnicast;

                    MediaUri streamUri = GetStreamUri(streamSetup, profile.token);

                    //14.	ONVIF Client verifies the RTSP media stream URI provided by the DUT.
                    //15.	ONVIF Client invokes RTSP DESCRIBE request.
                    //16.	DUT sends 200 OK message and SDP information.
                    //17.	 ONVIF Client invokes RTSP SETUP request with transport parameter as RTP/UDP.
                    //18.	DUT sends 200 OK message and the media stream information.
                    //19.	ONVIF Client invokes RTSP PLAY request.
                    //20.	DUT sends 200 OK message and starts media streaming.
                    //21.	DUT sends JPEG RTP media stream to ONVIF Client over UDP. Verify that stream has JPEG encoding and [Width1, Height1] resolution.
                    //22.	DUT sends RTCP sender report to ONVIF Client.
                    //23.	DUT validates the received RTP and RTCP packets, decodes and renders them.
                    //24.	ONVIF Client invokes RTSP TEARDOWN control request at the end of media streaming to terminate the RTSP session.
                    //25.	DUT sends 200 OK Response and terminates the RTSP Session.
                    //TestTool.Tests.Common.Media.VideoUtils.AdjustVideo(_videoForm, _username,
                    //    _password, _messageTimeout, streamSetup.Transport.Protocol, streamSetup.Stream,
                    //    streamUri, encoderConfig);
                    //ValidateStreamSequence(false, true);
                    DoSequence(encoderConfig, streamUri);
                }
            }

            if (NoProfilesForEncoding)
            {
                RunStep(() =>
                {
                    throw new Exception(string.Format("No profiles for {0}", GetVideoCodecName(encodingUnderTest)));
                }, string.Format("Check if at least one profile were found ({0} not supported?)", GetVideoCodecName(encodingUnderTest)));
            }
        }
        protected void ResolutionTest(
            MediaConfigurationChangeLog changeLog,
            VideoEncoding encodingUnderTest,
            GetResolutionsAvailable getResolutionsAvailable)
        {
            //3.	ONVIF Client invokes GetVideoEncoderConfigurationsRequest message to retrieve
            // video configuration list.
            //4.	Verify the GetVideoEncoderConfigurationsResponse message.

            VideoEncoderConfiguration[] encoderConfigurations = GetVideoEncoderConfigurations();
            CheckVideoEncoderConfigurationsList(encoderConfigurations);
            bool NoProfilesForEncoding = true;

            List <VideoEncoderConfiguration> selectedConfigs = null;
            var WhereRes = encoderConfigurations.Where(C => C.Encoding == encodingUnderTest);

            if (WhereRes != null)
            {
                selectedConfigs = WhereRes.ToList();
            }
            if (selectedConfigs == null || selectedConfigs.Count == 0)
            {
                LogTestEvent("There are no VideoEncoderConfiguration ready for selected encoder type - will try to reconfigure (if this may fail - please pre-configure before making tests)." + Environment.NewLine);
                selectedConfigs = encoderConfigurations.ToList();
            }
            selectedConfigs = MediaTestUtils.SelectConfigurations(selectedConfigs);

            var configGroups = encoderConfigurations.Where(e => !selectedConfigs.Contains(e)).Select(e => new List <VideoEncoderConfiguration>()
            {
                e
            }).ToList();

            configGroups.Insert(0, selectedConfigs);

            //Try to perform steps for selected profiles.
            //In case of fail for all selected profiles try to perform steps for each another profile until first success.
            foreach (var configGroup in configGroups)
            {
                foreach (VideoEncoderConfiguration encoderConfig in configGroup)
                {
                    //5.	Find or create media profile with Video Source Configuration and Video Encoder
                    // Configuration with token VECToken1 and supporting of JPEG encoding, where VECToken1
                    // is first video encoder configuration token from GetVideoEncoderConfigurationsResponse
                    // message (see Annex A.8). If it is not possible skip steps 6-61 and go to the step 62.

                    Profile profile = GetProfileForSpecificConfigurationAndCodec(encoderConfig.token, encodingUnderTest, changeLog);

                    if (profile == null)
                    {
                        continue;
                    }
                    NoProfilesForEncoding = false;

                    //6.	ONVIF Client invokes GetVideoEncoderConfigurationOptionsRequest message
                    // (ProfileToken = “Profile1”, where “Profile1” is profile token from the step 5)
                    // to get video encoder configuration options.
                    //7.	Verify the GetVideoEncoderConfigurationOptionsResponse message from the DUT.

                    VideoEncoderConfigurationOptions options = GetVideoEncoderConfigurationOptions(null, profile.token);

                    Assert(CheckVideoSupport(options, encodingUnderTest),
                           string.Format("{0} encoding is not compatible with current configurations",
                                         GetVideoCodecName(encodingUnderTest)),
                           string.Format("Validate {0} options",
                                         GetVideoCodecName(encodingUnderTest)));

                    VideoResolution highest = null;
                    VideoResolution lowest  = null;
                    VideoResolution median  = null;

                    FindResolutions(getResolutionsAvailable(options), out highest, out lowest, out median);

                    //8.	ONVIF Client invokes SetVideoEncoderConfigurationRequest message
                    // (ConfigurationToken = VECToken1, Resolution = [Width1, Height1], Encoding = JPEG,
                    // where [Width1, Height1] is maximum resolution from the Options.JPEG.ResolutionsAvailable)
                    // to change video encoder configuration.

                    VideoEncoderConfiguration unchanged = Utils.CopyMaker.CreateCopy(encoderConfig);

                    encoderConfig.Encoding   = encodingUnderTest;
                    encoderConfig.Resolution = highest;

                    AdjustVideoEncoderConfiguration(encodingUnderTest, encoderConfig, options);
                    switch (encodingUnderTest)
                    {
                    case VideoEncoding.MPEG4:
                        AdjustMpeg4VideoEncoderConfiguration(encoderConfig, options, false);
                        break;

                    case VideoEncoding.H264:
                        AdjustH264VideoEncoderConfiguration(encoderConfig, options, false);
                        break;
                    }

                    SetVideoEncoderConfiguration(encoderConfig, false);

                    //9.	Verify the SetVideoEncoderConfigurationResponse message from the DUT.

                    changeLog.ModifiedVideoEncoderConfigurations.Add(unchanged);

                    //10.	ONVIF Client invokes GetVideoEncoderConfigurationRequest message
                    // (ConfigurationToken = VECToken1) to get video encoder configuration.
                    //11.	Verify the GetVideoEncoderConfigurationResponse message (ConfigurationToken =
                    // VECToken1, Resolution = [Width1, Height1], Encoding = JPEG) from the DUT. Check
                    // that new setting for Resolution and Encoding was applied.

                    VideoEncoderConfiguration actual = GetVideoEncoderConfiguration(unchanged.token);

                    string reason = string.Empty;
                    // check encoding and resolutions
                    bool ok = ConfigurationValid(actual, encodingUnderTest, highest, out reason);
                    Assert(ok, reason, "Check that the DUT accepted values passed");

                    //12.	ONVIF Client invokes GetStreamUriRequest message (Profile Token, RTP-Unicast,
                    // UDP transport) to retrieve media stream URI for the selected media profile.
                    //13.	DUT sends RTSP URI and parameters defining the lifetime of the URI like
                    // ValidUntilConnect, ValidUntilReboot and Timeout in the GetStreamUriResponse message.
                    StreamSetup streamSetup = new StreamSetup();
                    streamSetup.Transport          = new Transport();
                    streamSetup.Transport.Protocol = TransportProtocol.UDP;
                    streamSetup.Stream             = StreamType.RTPUnicast;

                    MediaUri streamUri = GetStreamUri(streamSetup, profile.token);

                    //14.	ONVIF Client verifies the RTSP media stream URI provided by the DUT.
                    //15.	ONVIF Client invokes RTSP DESCRIBE request.
                    //16.	DUT sends 200 OK message and SDP information.
                    //17.	 ONVIF Client invokes RTSP SETUP request with transport parameter as RTP/UDP.
                    //18.	DUT sends 200 OK message and the media stream information.
                    //19.	ONVIF Client invokes RTSP PLAY request.
                    //20.	DUT sends 200 OK message and starts media streaming.
                    //21.	DUT sends JPEG RTP media stream to ONVIF Client over UDP. Verify that stream
                    // has JPEG encoding and [Width1, Height1] resolution.
                    //22.	DUT sends RTCP sender report to ONVIF Client.
                    //23.	DUT validates the received RTP and RTCP packets, decodes and renders them.
                    //24.	ONVIF Client invokes RTSP TEARDOWN control request at the end of media
                    // streaming to terminate the RTSP session.
                    //25.	DUT sends 200 OK Response and terminates the RTSP Session.
                    //TestTool.Tests.Common.Media.VideoUtils.AdjustVideo(_videoForm, _username, _password, _messageTimeout,
                    //                                                   streamSetup.Transport.Protocol,
                    //                                                   streamSetup.Stream, streamUri, encoderConfig);
                    //ValidateStreamSequence(false, true);
                    DoSequence(encoderConfig, streamUri);

                    //26.	ONVIF Client invokes SetVideoEncoderConfigurationRequest message
                    // (ConfigurationToken = VECToken1, Resolution = [Width1, Height1], Encoding = JPEG,
                    // where [Width2, Height2] is minimum resolution from the Options.JPEG.ResolutionsAvailable) to change video encoder configuration.
                    //27.	Verify the SetVideoEncoderConfigurationResponse message from the DUT.
                    encoderConfig.Encoding   = encodingUnderTest;
                    encoderConfig.Resolution = lowest;

                    AdjustVideoEncoderConfiguration(encodingUnderTest, encoderConfig, options);

                    SetVideoEncoderConfiguration(encoderConfig, false);

                    //28.	ONVIF Client invokes GetVideoEncoderConfigurationRequest message
                    // (ConfigurationToken = VECToken1) to get video encoder configuration.
                    //29.	Verify the GetVideoEncoderConfigurationResponse message (ConfigurationToken =
                    // VECToken1, Resolution = [Width2, Height2], Encoding = JPEG, where [Width2, Height2]) from the DUT. Check that new setting for Resolution and Encoding was applied.
                    actual = GetVideoEncoderConfiguration(unchanged.token);
                    ok     = ConfigurationValid(actual, encodingUnderTest, lowest, out reason);
                    Assert(ok, reason, "Check that the DUT accepted values passed");

                    //30.	ONVIF Client invokes GetStreamUriRequest message (Profile Token, RTP-Unicast,
                    // UDP transport) to retrieve media stream URI for the selected media profile.
                    //31.	DUT sends RTSP URI and parameters defining the lifetime of the URI like ValidUntilConnect, ValidUntilReboot and Timeout in the GetStreamUriResponse message.

                    streamUri = GetStreamUri(streamSetup, profile.token);

                    //32.	ONVIF Client verifies the RTSP media stream URI provided by the DUT.
                    //33.	ONVIF Client invokes RTSP DESCRIBE request.
                    //34.	DUT sends 200 OK message and SDP information.
                    //35.	 ONVIF Client invokes RTSP SETUP request with transport parameter as RTP/UDP.
                    //36.	DUT sends 200 OK message and the media stream information.
                    //37.	ONVIF Client invokes RTSP PLAY request.
                    //38.	DUT sends 200 OK message and starts media streaming.
                    //39.	DUT sends JPEG RTP media stream to ONVIF Client over UDP. Verify that stream has JPEG encoding and [Width2, Height2] resolution.
                    //40.	DUT sends RTCP sender report to ONVIF Client.
                    //41.	DUT validates the received RTP and RTCP packets, decodes and renders them.
                    //42.	ONVIF Client invokes RTSP TEARDOWN control request at the end of media streaming to terminate the RTSP session.
                    //43.	DUT sends 200 OK Response and terminates the RTSP Session.
                    //TestTool.Tests.Common.Media.VideoUtils.AdjustVideo(_videoForm, _username, _password, _messageTimeout,
                    //                                                   streamSetup.Transport.Protocol,
                    //                                                   streamSetup.Stream, streamUri, encoderConfig);
                    //ValidateStreamSequence(false, true);
                    DoSequence(encoderConfig, streamUri);

                    //44.	ONVIF Client invokes SetVideoEncoderConfigurationRequest message (ConfigurationToken = VECToken1, Resolution = [Width1, Height1], Encoding = JPEG, where [Width3, Height3] is middle resolution from the Options.JPEG.ResolutionsAvailable) to change video encoder configuration.
                    //45.	Verify the SetVideoEncoderConfigurationResponse message from the DUT.
                    encoderConfig.Encoding   = encodingUnderTest;
                    encoderConfig.Resolution = median;

                    AdjustVideoEncoderConfiguration(encodingUnderTest, encoderConfig, options);

                    SetVideoEncoderConfiguration(encoderConfig, false);

                    //46.	ONVIF Client invokes GetVideoEncoderConfigurationRequest message (ConfigurationToken = VECToken1) to get video encoder configuration.
                    //47.	Verify the GetVideoEncoderConfigurationResponse message (ConfigurationToken = VECToken1, Resolution = [Width3, Height3], Encoding = JPEG) from the DUT. Check that new setting for Resolution and Encoding was applied.

                    actual = GetVideoEncoderConfiguration(unchanged.token);
                    ok     = ConfigurationValid(actual, encodingUnderTest, median, out reason);
                    Assert(ok, reason, "Check that the DUT accepted values passed");

                    //48.	ONVIF Client invokes GetStreamUriRequest message (Profile Token, RTP-Unicast, UDP transport) to retrieve media stream URI for the selected media profile.
                    //49.	DUT sends RTSP URI and parameters defining the lifetime of the URI like ValidUntilConnect, ValidUntilReboot and Timeout in the GetStreamUriResponse message.
                    streamUri = GetStreamUri(streamSetup, profile.token);

                    //50.	ONVIF Client verifies the RTSP media stream URI provided by the DUT.
                    //51.	ONVIF Client invokes RTSP DESCRIBE request.
                    //52.	DUT sends 200 OK message and SDP information.
                    //53.	 ONVIF Client invokes RTSP SETUP request with transport parameter as RTP/UDP.
                    //54.	DUT sends 200 OK message and the media stream information.
                    //55.	ONVIF Client invokes RTSP PLAY request.
                    //56.	DUT sends 200 OK message and starts media streaming.
                    //57.	DUT sends JPEG RTP media stream to ONVIF Client over UDP. Verify that stream has JPEG encoding and [Width3, Height3] resolution.
                    //58.	DUT sends RTCP sender report to ONVIF Client.
                    //59.	DUT validates the received RTP and RTCP packets, decodes and renders them.
                    //60.	ONVIF Client invokes RTSP TEARDOWN control request at the end of media streaming to terminate the RTSP session.
                    //61.	DUT sends 200 OK Response and terminates the RTSP Session.
                    //TestTool.Tests.Common.Media.VideoUtils.AdjustVideo(_videoForm, _username, _password, _messageTimeout,
                    //                                                   streamSetup.Transport.Protocol,
                    //                                                   streamSetup.Stream, streamUri, encoderConfig);
                    //ValidateStreamSequence(false, true);
                    DoSequence(encoderConfig, streamUri);

                    //62.	Repeat steps 5-62 for the rest Video Encoder configurations supported by the DUT with using different multicast ports and the same multicast addresses for Video Encoder Configurations.
                }

                if (!NoProfilesForEncoding)
                {
                    break;
                }
            }

            if (NoProfilesForEncoding)
            {
                RunStep(() =>
                {
                    throw new Exception(string.Format("No profiles for {0}", GetVideoCodecName(encodingUnderTest)));
                }, string.Format("Check if at least one profile were found ({0} not supported?)", GetVideoCodecName(encodingUnderTest)));
            }
        }
        protected void CheckRecording(RecordingServiceCapabilities capabilities, GetRecordingsResponseItem recording, RecordingConfiguration conf)
        {
            //var recording = recordings.Where(r => r.RecordingToken == recordingToken).FirstOrDefault();
            //Assert(recording != null,
            //    "recording list doesn't contain new recording",
            //        "Check that recording list contains new recording after refresh");
            bool          ok     = true;
            StringBuilder logger = new StringBuilder();

            if (recording.Configuration.MaximumRetentionTime != conf.MaximumRetentionTime)
            {
                ok = false;
                logger.Append(string.Format("MaximumRetentionTime is invalid{0}", Environment.NewLine));
            }
            if (recording.Configuration.Content != conf.Content)
            {
                ok = false;
                logger.Append(string.Format("Content is '{0}' but must be '{1}'{2}",
                                            recording.Configuration.Content, conf.Content, Environment.NewLine));
            }

            if (recording.Configuration.Source != null)
            {
                if (recording.Configuration.Source.Address != conf.Source.Address)
                {
                    ok = false;
                    logger.Append(string.Format("Source Address is '{0}' but must be '{1}'{2}",
                                                recording.Configuration.Source.Address, conf.Source.Address,
                                                Environment.NewLine));
                }
                if (recording.Configuration.Source.Description != conf.Source.Description)
                {
                    ok = false;
                    logger.Append(string.Format("Source Description is '{0}' but must be '{1}'{2}",
                                                recording.Configuration.Source.Description, conf.Source.Description,
                                                Environment.NewLine));
                }
                if (recording.Configuration.Source.Location != conf.Source.Location)
                {
                    ok = false;
                    logger.Append(string.Format("Source Location is '{0}' but must be '{1}'{2}",
                                                recording.Configuration.Source.Location, conf.Source.Location,
                                                Environment.NewLine));
                }
                if (recording.Configuration.Source.Name != conf.Source.Name)
                {
                    ok = false;
                    logger.Append(string.Format("Source Name is '{0}' but must be '{1}'{2}",
                                                recording.Configuration.Source.Name, conf.Source.Name,
                                                Environment.NewLine));
                }
                if (recording.Configuration.Source.SourceId != conf.Source.SourceId)
                {
                    ok = false;
                    logger.Append(string.Format("Source SourceId is '{0}' but must be '{1}'{2}",
                                                recording.Configuration.Source.SourceId, conf.Source.SourceId,
                                                Environment.NewLine));
                }
            }
            else
            {
                ok = false;
                logger.Append(string.Format("recording doesn't contain Source{0}", Environment.NewLine));
            }
            if (recording.Tracks == null || recording.Tracks.Track == null)
            {
                ok = false;
                logger.Append(string.Format("Track list of recording '{0}' is empty", recording.RecordingToken));
            }
            else
            {
                foreach (TrackType type in Enum.GetValues(typeof(TrackType)))
                {
                    if (type != TrackType.Extended)
                    {
                        //if (recording.Tracks.Track.FirstOrDefault(t => t.Configuration.TrackType == type) == null)
                        //{
                        //    ok = false;
                        //    logger.Append(string.Format("Recording doesn't contain tracks with track type '{0}'{1}", type, Environment.NewLine));
                        //}
                        var actualTrackCount = recording.Tracks.Track.Count(t => t.Configuration.TrackType == type);
                        if (TrackType.Audio == type)
                        {
                            var audioEncodingsCount = capabilities.Encoding.Count(e =>
                            {
                                AudioEncoding v;
                                return(AudioEncoding.TryParse(e, out v));
                            });

                            if (0 != audioEncodingsCount)
                            {
                                var flag = (1 <= actualTrackCount);
                                if (!flag)
                                {
                                    logger.AppendLine(string.Format("There are no tracks of type: '{0}'.", type));
                                }
                                ok = ok && flag;
                            }
                        }

                        if (TrackType.Video == type)
                        {
                            var videoEncodingsCount = capabilities.Encoding.Count(e =>
                            {
                                VideoEncoding v;
                                return(VideoEncoding.TryParse(e, out v));
                            });

                            if (0 != videoEncodingsCount)
                            {
                                var flag = (1 <= actualTrackCount);
                                if (!flag)
                                {
                                    logger.AppendLine(string.Format("There are no tracks of type: '{0}'.", type));
                                }
                                ok = ok && flag;
                            }
                        }

                        if (TrackType.Metadata == type && Features.ContainsFeature(Feature.MetadataRecording))
                        {
                            var flag = (1 <= actualTrackCount);
                            if (!flag)
                            {
                                logger.AppendLine(string.Format("There are no tracks of type: '{0}'.", type));
                            }
                            ok = ok && flag;
                        }
                    }
                }
            }

            Assert(ok, logger.ToStringTrimNewLine(), "Check that configuration parameters of new recording are valid");
        }
 public VideoTrackSource(VideoEncoding frameEncoding)
 {
     FrameEncoding = frameEncoding;
 }
		public CommandLineBuilder VideoCodec(VideoEncoding codec) {
			VideoEncoding = codec;
			return this;
		}