Esempio n. 1
0
    protected override void PrepareToJoin()
    {
        base.PrepareToJoin();
        VideoEncoderConfiguration configuration = new VideoEncoderConfiguration
        {
            dimensions = new VideoDimensions {
                width = HOSTVIEW_WIDTH * 2, height = HOSTVIEW_HEIGHT
            },
            frameRate = FRAME_RATE.FRAME_RATE_FPS_24
                        // mirrorMode = VIDEO_MIRROR_MODE_TYPE.VIDEO_MIRROR_MODE_ENABLED
        };

        mRtcEngine.SetVideoEncoderConfiguration(configuration);

        mRtcEngine.OnFirstLocalVideoFrame = delegate(int width, int height, int elapsed)
        {
            Debug.LogFormat("OnFirstLocalVideoFrame => width:{0} height:{1} elapsed:{2}", width, height, elapsed);
        };
        mRtcEngine.OnFirstRemoteVideoFrame = delegate(uint uid, int width, int height, int elapsed)
        {
            Debug.LogFormat("OnFirstRemoteVideoFrame => width:{0} height:{1} elapsed:{2} uid:{3}", width, height, elapsed, uid);
        };

        mRtcEngine.OnStreamPublished = OnStreamPublished;
    }
    void SetEngineConfiguration()
    {
        // Create a VideoEncoderConfiguration instance. See the descriptions of the parameters in API Reference.
        VideoEncoderConfiguration config = new VideoEncoderConfiguration();

        // Sets the video resolution.
#if UNITY_STANDALONE
        config.dimensions.width  = 1920;
        config.dimensions.height = 1080;
#endif



#if UNITY_ANDROID || !UNITY_EDITOR
        config.dimensions.width  = 1080;
        config.dimensions.height = 1920;
#endif

        // Sets the video frame rate.
        config.frameRate = FRAME_RATE.FRAME_RATE_FPS_24;
        // Sets the video encoding bitrate (Kbps).
        config.bitrate = 800;
        // Sets the adaptive orientation mode. See the description in API Reference.
        config.orientationMode = ORIENTATION_MODE.ORIENTATION_MODE_ADAPTIVE;
        // Sets the video encoding degradation preference under limited bandwidth. MIANTAIN_QUALITY means to degrade the frame rate to maintain the video quality.
        //config.degradationPreference = DEGRADATION_PREFERENCE.MAINTAIN_QUALITY;
        config.degradationPreference = DEGRADATION_PREFERENCE.MAINTAIN_BALANCED;
        // Sets the video encoder configuration.
        mRtcEngine.SetVideoEncoderConfiguration(config);
    }
Esempio n. 3
0
        public async Task <OnvifMedia10.VideoEncoderConfiguration> getVideoEncodingsAsync(string targetProfileToken)
        {
            VideoEncoderConfiguration configuration = null;

            try
            {
                if (m_mediaClient == null)
                {
                    bool b = await this.InitalizeAsync();

                    if (m_mediaClient != null)
                    {
                        return(null);
                    }
                }

                configuration = await m_mediaClient.GetVideoEncoderConfigurationAsync(targetProfileToken);
            }
            catch (Exception ex)
            {
                m_ErrorMessage = ex.Message;
                throw new OnVifException("OnVif1Media.getVideoEncodingsAsync", ex);
                //               return null;
            }

            return(configuration);
        }
Esempio n. 4
0
        public void GetMediaUri(
            Profile profile,
            VideoSourceConfiguration videoSourceConfig,
            VideoEncoderConfiguration videoEncoderConfig,
            AudioSourceConfiguration audioSourceConfig,
            AudioEncoderConfiguration audioEncoderConfig,
            TransportProtocol protocol)
        {
            RunInBackground(new Action(() =>
            {
                if (profile == null)
                {
                    profile = CreateProfile(TestMediaProfileName);
                }
                ConfigureProfile(profile, videoSourceConfig, videoEncoderConfig, audioSourceConfig, audioEncoderConfig);

                StreamSetup streamSetup        = new StreamSetup();
                streamSetup.Transport          = new Transport();
                streamSetup.Transport.Protocol = protocol;
                streamSetup.Stream             = StreamType.RTPUnicast;

                MediaUri streamUri = Client.GetStreamUri(streamSetup, profile.token);
                if (OnMediaUriReceived != null)
                {
                    OnMediaUriReceived(streamUri, videoEncoderConfig, audioEncoderConfig);
                }
            }));
        }
Esempio n. 5
0
 //TODO remove protected attribute - temporary usage only, till GetURI will work fully OK
 protected void AdjustVideo(
     TransportProtocol protocol,
     StreamType streamType,
     MediaUri streamUri,
     VideoEncoderConfiguration conf)
 {
     VideoUtils.AdjustVideo(_videoForm, _username, _password, _messageTimeout, protocol, streamType, streamUri, conf);
 }
Esempio n. 6
0
        protected void ConfigureProfile(
            Profile profile,
            VideoSourceConfiguration videoSourceConfig,
            VideoEncoderConfiguration videoEncoderConfig,
            AudioSourceConfiguration audioSourceConfig,
            AudioEncoderConfiguration audioEncoderConfig)
        {
            if ((profile.VideoSourceConfiguration != null) && (profile.VideoSourceConfiguration.token != videoSourceConfig.token))
            {
                Client.RemoveVideoSourceConfiguration(profile.token);
                profile.VideoSourceConfiguration = null;
            }
            if (profile.VideoSourceConfiguration == null)
            {
                Client.AddVideoSourceConfiguration(profile.token, videoSourceConfig.token);
            }

            if ((profile.VideoEncoderConfiguration != null) && (profile.VideoEncoderConfiguration.token != videoEncoderConfig.token))
            {
                Client.RemoveVideoEncoderConfiguration(profile.token);
                profile.VideoEncoderConfiguration = null;
            }
            if (profile.VideoEncoderConfiguration == null)
            {
                Client.AddVideoEncoderConfiguration(profile.token, videoEncoderConfig.token);
            }
            //encoder configuration can be modified - update it
            Client.SetVideoEncoderConfiguration(videoEncoderConfig, false);

            if ((profile.AudioSourceConfiguration != null) &&
                ((audioSourceConfig == null) ||
                 (profile.AudioSourceConfiguration.token != audioSourceConfig.token)))
            {
                Client.RemoveAudioSourceConfiguration(profile.token);
                profile.AudioSourceConfiguration = null;
            }
            if ((profile.AudioSourceConfiguration == null) && (audioSourceConfig != null))
            {
                Client.AddAudioSourceConfiguration(profile.token, audioSourceConfig.token);
            }

            if ((profile.AudioEncoderConfiguration != null) &&
                ((audioEncoderConfig == null) ||
                 (profile.AudioEncoderConfiguration.token != audioEncoderConfig.token)))
            {
                Client.RemoveAudioEncoderConfiguration(profile.token);
                profile.AudioEncoderConfiguration = null;
            }
            if (audioEncoderConfig != null)
            {
                if ((profile.AudioEncoderConfiguration == null) && (audioEncoderConfig != null))
                {
                    Client.AddAudioEncoderConfiguration(profile.token, audioEncoderConfig.token);
                }
                //encoder configuration can be modified - update it
                Client.SetAudioEncoderConfiguration(audioEncoderConfig, false);
            }
        }
        public void TrackModifiedConfiguration(VideoEncoderConfiguration config)
        {
            VideoEncoderConfiguration modified = _modifiedVideoEncoderConfigurations.Where(AEC => AEC.token == config.token).FirstOrDefault();

            if (modified == null)
            {
                _modifiedVideoEncoderConfigurations.Add(config);
            }
        }
Esempio n. 8
0
        public static void AdjustVideo2(
            VideoContainer2 form,
            VideoEncoderConfiguration conf)
        {
            if ((conf != null) && (conf.Resolution != null))
            {
                string Encoding = null;
                switch (conf.Encoding)
                {
                case VideoEncoding.JPEG: Encoding = "JPEG"; break;

                case VideoEncoding.MPEG4: Encoding = "MP4V-ES"; break;

                case VideoEncoding.H264: Encoding = "H264"; break;
                }
                int FPS = 5;
                if (conf.RateControl != null)
                {
                    if (conf.RateControl.EncodingInterval > 0)
                    {
                        FPS = conf.RateControl.FrameRateLimit / conf.RateControl.EncodingInterval;
                    }
                    else
                    {
                        FPS = conf.RateControl.FrameRateLimit;
                    }
                    if (FPS < 1)
                    {
                        FPS = 1;
                    }
                    if (FPS > 120)
                    {
                        FPS = 120;
                    }
                }
                bool Done = false;
                if ((conf.Multicast != null) && (conf.Multicast.Address != null))
                {
                    if ((conf.Multicast.Address.Type == IPType.IPv4) && !string.IsNullOrEmpty(conf.Multicast.Address.IPv4Address))
                    {
                        form.ConfigureVideo(Encoding, conf.Resolution.Width, conf.Resolution.Height, FPS, conf.Multicast.Address.IPv4Address, conf.Multicast.Port, conf.Multicast.TTL);
                        Done = true;
                    }
                    if ((conf.Multicast.Address.Type == IPType.IPv6) && !string.IsNullOrEmpty(conf.Multicast.Address.IPv6Address))
                    {
                        form.ConfigureVideo(Encoding, conf.Resolution.Width, conf.Resolution.Height, FPS, conf.Multicast.Address.IPv6Address, conf.Multicast.Port, conf.Multicast.TTL);
                        Done = true;
                    }
                }
                if (!Done)
                {
                    form.ConfigureVideo(Encoding, conf.Resolution.Width, conf.Resolution.Height, FPS);
                }
            }
        }
Esempio n. 9
0
            public static Model Create(
                VideoEncoderConfiguration vec
                )
            {
                var _this = new Model();

                _this.origin.vec = vec;
                _this.RevertChanges();

                return(_this);
            }
Esempio n. 10
0
        private void SetVideoEncoder()
        {
            VideoEncoderConfiguration.ORIENTATION_MODE
                orientationMode =
                VideoEncoderConfiguration.ORIENTATION_MODE.OrientationModeFixedPortrait;

            VideoEncoderConfiguration.VideoDimensions dimensions = new VideoEncoderConfiguration.VideoDimensions(360, 640);

            VideoEncoderConfiguration videoEncoderConfiguration = new VideoEncoderConfiguration(dimensions, VideoEncoderConfiguration.FRAME_RATE.FrameRateFps15, VideoEncoderConfiguration.StandardBitrate, orientationMode);

            _rtcEngine.SetVideoEncoderConfiguration(videoEncoderConfiguration);
        }
Esempio n. 11
0
 private void ConfigVideo()
 {
     try
     {
         VideoEncoderConfiguration configuration = new VideoEncoderConfiguration(Constants.VideoDimensions[Config().GetVideoDimenIndex()], VideoEncoderConfiguration.FRAME_RATE.FrameRateFps15,
                                                                                 VideoEncoderConfiguration.StandardBitrate, VideoEncoderConfiguration.ORIENTATION_MODE.OrientationModeFixedPortrait)
         {
             MirrorMode = Constants.VideoMirrorModes[Config().GetMirrorEncodeIndex()]
         };
         RtcEngine().SetVideoEncoderConfiguration(configuration);
     }
     catch (Exception e)
     {
         Methods.DisplayReportResultTrack(e);
     }
 }
        public void VideoEncoderConfiguration()
        {
            RunTest(() =>
            {
                Profile[] profiles = GetProfiles();
                string reason;
                Profile profile;
                Assert(ValidateProfiles(profiles, out reason, out profile), reason, Resources.StepValidatingProfiles_Title);

                VideoEncoderConfiguration[] configs = GetCompatibleVideoEncoderConfigurations(profile.token);
                Assert(ValidateVideoEncoderConfigs(configs, out reason), reason, Resources.StepValidatingVideoEncoderConfigs_Title);
                VideoEncoderConfiguration config = configs[0];

                configs = GetVideoEncoderConfigurations();
                Assert(ValidateVideoEncoderConfigs(configs, out reason), reason, Resources.StepValidatingVideoEncoderConfigs_Title);
            });
        }
Esempio n. 13
0
    void Start()
    {
        if (!photonView.isMine)
        {
            return;
        }


        playerVideoList = new List <GameObject>();

        // Setup Agora Engine and Callbacks.
        if (mRtcEngine != null)
        {
            IRtcEngine.Destroy();
        }

        originalChannel = channel;

        // -- These are all necessary steps to initialize the Agora engine -- //
        // Initialize Agora engine
        mRtcEngine = IRtcEngine.GetEngine(appID);

        // Setup square video profile
        VideoEncoderConfiguration config = new VideoEncoderConfiguration();

        config.dimensions.width      = 480;
        config.dimensions.height     = 480;
        config.frameRate             = FRAME_RATE.FRAME_RATE_FPS_15;
        config.bitrate               = 800;
        config.degradationPreference = DEGRADATION_PREFERENCE.MAINTAIN_QUALITY;
        mRtcEngine.SetVideoEncoderConfiguration(config);

        // Setup our callbacks (there are many other Agora callbacks, however these are the calls we need).
        mRtcEngine.OnJoinChannelSuccess = OnJoinChannelSuccessHandler;
        mRtcEngine.OnUserJoined         = OnUserJoinedHandler;
        mRtcEngine.OnLeaveChannel       = OnLeaveChannelHandler;
        mRtcEngine.OnUserOffline        = OnUserOfflineHandler;

        // Your video feed will not render if EnableVideo() isn't called.
        mRtcEngine.EnableVideo();
        mRtcEngine.EnableVideoObserver();

        // By setting our UID to "0" the Agora Engine creates a unique UID and returns it in the OnJoinChannelSuccess callback.
        mRtcEngine.JoinChannel(channel, null, 0);
    }
        public void VideoEncoderConfigurationConsistencyTest()
        {
            RunTest(() =>
            {
                VideoEncoderConfiguration[] configurations = GetVideoEncoderConfigurations();

                Assert(configurations != null,
                       "DUT did not return any configuration",
                       "Check if the DUT returned configurations");

                foreach (VideoEncoderConfiguration configuration in configurations)
                {
                    VideoEncoderConfiguration config = GetVideoEncoderConfiguration(configuration.token);

                    CompareConfigurations(configuration, config, true);
                }
            });
        }
Esempio n. 15
0
        /// <summary>
        /// Get stream uri with specified configurations
        /// </summary>
        /// <param name="profile"></param>
        /// <param name="videoSourceConfig">Video source configuration</param>
        /// <param name="videoEncoderConfig">Video encoder configuration</param>
        /// <param name="audioSourceConfig">Audio source configuration</param>
        /// <param name="audioEncoderConfig">Audio encoder configuration</param>
        /// <param name="protocol"></param>
        public void GetMediaUri(
            Proxies.Onvif.Profile profile,
            VideoSourceConfiguration videoSourceConfig,
            VideoEncoderConfiguration videoEncoderConfig,
            AudioSourceConfiguration audioSourceConfig,
            AudioEncoderConfiguration audioEncoderConfig,
            TransportProtocol protocol)
        {
            if ((videoEncoderConfig == null) || (videoSourceConfig == null))
            {
                throw new ArgumentNullException();
            }
            string address = View.MediaAddress;

            _mediaClientWorking = true;
            InitializeMediaClient(address);
            _mediaClient.GetMediaUri(profile, videoSourceConfig, videoEncoderConfig, audioSourceConfig, audioEncoderConfig, protocol);
        }
Esempio n. 16
0
    void SetVideoEncoderConfiguration()
    {
        VideoEncoderConfiguration config = new VideoEncoderConfiguration
        {
            dimensions = new VideoDimensions
            {
                width  = 640,
                height = 480
            },
            frameRate             = FRAME_RATE.FRAME_RATE_FPS_15,
            minFrameRate          = -1,
            bitrate               = 0,
            minBitrate            = 1,
            orientationMode       = ORIENTATION_MODE.ORIENTATION_MODE_ADAPTIVE,
            degradationPreference = DEGRADATION_PREFERENCE.MAINTAIN_FRAMERATE,
            mirrorMode            = VIDEO_MIRROR_MODE_TYPE.VIDEO_MIRROR_MODE_AUTO
        };

        mRtcEngine.SetVideoEncoderConfiguration(config);
    }
        private void DoSequence(VideoEncoderConfiguration config, MediaUri Uri)
        {
            if (NewGenVideo == null)
            {
                NewGenVideo = new VideoContainer2();
            }
            VideoUtils.AdjustGeneral2(NewGenVideo, _username, _password, MessageTimeout, TransportProtocol.UDP, StreamType.RTPUnicast, Uri);
            VideoUtils.AdjustVideo2(NewGenVideo, config);
            //ValidateStreamSequence22();
            NewGenVideo.EventSink = this;
            NewGenVideo.SetSequence(2, OperationDelay, false, true);
            NewGenVideo.Connect();
            NewGenVideo.SetupWindow();
            VideoIsOpened = true;

            NewGenVideo.Run();
            NewGenVideo.CloseWindow();
            VideoIsOpened         = false;
            NewGenVideo.EventSink = null;
        }
Esempio n. 18
0
        public async UniTask <bool> Join(AgoraJoinParameters joinParameters, int timeoutSeconds = 30)
        {
            if (_IsJoined)
            {
                return(true);
            }

            _RtcEngine.SetChannelProfile(joinParameters.ChannelProfile);
            _RtcEngine.SetClientRole(joinParameters.ClientRoleType);

            // Audio
            _RtcEngine.SetAudioProfile(AUDIO_PROFILE_TYPE.AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO, AUDIO_SCENARIO_TYPE.AUDIO_SCENARIO_SHOWROOM);
            _RtcEngine.SetEnableSpeakerphone(true);
            _RtcEngine.SetExternalAudioSource(joinParameters.UseExternalAudioSource, joinParameters.SampleRate, joinParameters.AudioChannels);
            _RtcEngine.SetExternalAudioSink(joinParameters.UseExternalAudioSink, joinParameters.SampleRate, joinParameters.AudioChannels);

            // Video
            var config = new VideoEncoderConfiguration()
            {
                dimensions = new VideoDimensions()
                {
                    width  = joinParameters.VideoWidth,
                    height = joinParameters.VideoHeight
                },
                frameRate = joinParameters.FrameRate,
            };

            _RtcEngine.SetVideoEncoderConfiguration(config);

            _RtcEngine.SetExternalVideoSource(joinParameters.UseExternalVideoSource);

            _RtcEngine.EnableVideoObserver();
            _RtcEngine.EnableVideo();
            _VideoDeviceManager.CreateAVideoDeviceManager();

            // Join channel
            _RtcEngine.JoinChannel(joinParameters.ChannelName, "", joinParameters.UserId);
            await UniTask.WaitUntil(() => _IsJoined).TimeoutWithoutException(TimeSpan.FromSeconds(timeoutSeconds));

            return(_IsJoined);
        }
            public static Model Create(
                VideoEncoderConfiguration videoEncCfg,
                AudioEncoderConfiguration audioEncCfg,
                VideoAnalyticsConfiguration analyticsCfg,
                PTZConfiguration ptzCfg,
                MetadataConfiguration metaCfg,
                bool isVideoEncCfgEnabled,
                bool isAudioEncCfgEnabled,
                bool isAnalyticsCfgEnabled,
                bool isPtzCfgEnabled,
                bool isMetaCfgEnabled,
                VideoEncoderConfiguration[] videoEncCfgs,
                AudioEncoderConfiguration[] audioEncCfgs,
                PTZConfiguration[] ptzCfgs,
                VideoAnalyticsConfiguration[] analyticsCfgs,
                MetadataConfiguration[] metaCfgs
                )
            {
                var _this = new Model();

                _this.videoEncCfgs                 = videoEncCfgs;
                _this.audioEncCfgs                 = audioEncCfgs;
                _this.ptzCfgs                      = ptzCfgs;
                _this.analyticsCfgs                = analyticsCfgs;
                _this.metaCfgs                     = metaCfgs;
                _this.origin.videoEncCfg           = videoEncCfg;
                _this.origin.audioEncCfg           = audioEncCfg;
                _this.origin.analyticsCfg          = analyticsCfg;
                _this.origin.ptzCfg                = ptzCfg;
                _this.origin.metaCfg               = metaCfg;
                _this.origin.isVideoEncCfgEnabled  = isVideoEncCfgEnabled;
                _this.origin.isAudioEncCfgEnabled  = isAudioEncCfgEnabled;
                _this.origin.isAnalyticsCfgEnabled = isAnalyticsCfgEnabled;
                _this.origin.isPtzCfgEnabled       = isPtzCfgEnabled;
                _this.origin.isMetaCfgEnabled      = isMetaCfgEnabled;
                _this.RevertChanges();

                return(_this);
            }
        public void JpegVideoEncoderConfiguration()
        {
            RunTest(() =>
            {
                VideoEncoderConfiguration[] configs = GetVideoEncoderConfigurations();
                string reason;
                Assert(ValidateVideoEncoderConfigs(configs, out reason), reason, Resources.StepValidatingVideoEncoderConfigs_Title);

                VideoEncoderConfigurationOptions options = null;
                VideoEncoderConfiguration config         = GetVideoEncoderConfiguration(configs, VideoEncoding.JPEG, out options);

                config.Encoding   = VideoEncoding.JPEG;
                config.Resolution = options.JPEG.ResolutionsAvailable[0];
                config.Resolution.Height++;//invalid param
                config.Quality = options.QualityRange.Max;
                config.MPEG4   = null;
                config.H264    = null;
                //config.SessionTimeout = "PT600S";//send the same as received
                if (config.RateControl != null)
                {
                    config.RateControl.FrameRateLimit   = options.JPEG.FrameRateRange.Min;
                    config.RateControl.EncodingInterval = options.JPEG.EncodingIntervalRange.Min;
                }

                string details = string.Format("Setting invalid configuration (VideoEncoderConfiguration/Resolution/Height = '{0}')", config.Resolution.Height);
                SetInvalidVideoEncoderConfiguration(config, false, details);

                config.Resolution.Height--;
                SetVideoEncoderConfiguration(config, false);

                VideoEncoderConfiguration newConfig = GetVideoEncoderConfiguration(config.token);
                Assert(
                    EqualConfigurations(config, newConfig, out reason),
                    string.Format(Resources.ErrorVideoEncoderConfigNotEqual_Format, reason),
                    Resources.StepCompareVideoEncoderConfigs_Title);
            });
        }
        public void H264VideoEncoderConfiguration()
        {
            RunTest(() =>
            {
                VideoEncoderConfiguration[] configs = GetVideoEncoderConfigurations();
                string reason;
                Assert(ValidateVideoEncoderConfigs(configs, out reason), reason, Resources.StepValidatingVideoEncoderConfigs_Title);

                VideoEncoderConfigurationOptions options = null;
                VideoEncoderConfiguration config         = GetVideoEncoderConfiguration(configs, VideoEncoding.H264, out options);

                config.Encoding         = VideoEncoding.H264;
                config.Resolution       = options.H264.ResolutionsAvailable[0];
                config.Quality          = options.QualityRange.Max;
                config.H264             = new H264Configuration();
                config.H264.GovLength   = options.H264.GovLengthRange.Max + 1;//invalid param
                config.H264.H264Profile = H264Profile.Baseline;
                config.MPEG4            = null;
                //config.SessionTimeout = "PT600S";////send the same as received
                if (config.RateControl != null)
                {
                    config.RateControl.FrameRateLimit   = options.H264.FrameRateRange.Min;
                    config.RateControl.EncodingInterval = options.H264.EncodingIntervalRange.Min;
                }
                string details = string.Format("Setting invalid configuration (/VideoEncoderConfiguration/H264/GovLength = {0})", config.H264.GovLength);
                SetInvalidVideoEncoderConfiguration(config, false, details);

                config.H264.GovLength = options.H264.GovLengthRange.Max;
                SetVideoEncoderConfiguration(config, false);

                VideoEncoderConfiguration newConfig = GetVideoEncoderConfiguration(config.token);
                Assert(EqualConfigurations(config, newConfig, out reason),
                       string.Format(Resources.ErrorVideoEncoderConfigNotEqual_Format, reason),
                       Resources.StepCompareVideoEncoderConfigs_Title);
            });
        }
Esempio n. 22
0
        void SetResourcesUsageToMimimal(MediaConfigurationChangeLog changeLog, IEnumerable <Profile> profiles)
        {
            List <VideoEncoderConfiguration> modifiedConfigurations = new List <VideoEncoderConfiguration>();

            // set resolutions and FPS to minimal values
            foreach (Profile profile in profiles)
            {
                // actually we change configurations
                // so we must track modified configurations and don't apply changes more than once
                VideoEncoderConfiguration AlreadyModifiedVec = null;
                AlreadyModifiedVec = modifiedConfigurations.Find(vec => null != profile.VideoEncoderConfiguration && vec.token == profile.VideoEncoderConfiguration.token);
                if (AlreadyModifiedVec != null)
                {
                    profile.VideoEncoderConfiguration = AlreadyModifiedVec;
                    continue;
                }

                if (null != profile.VideoEncoderConfiguration)
                {
                    VideoEncoderConfigurationOptions options = GetVideoEncoderConfigurationOptions(profile.VideoEncoderConfiguration.token, null);
    #if true
                    if (OptimizeVEC(changeLog, profile.VideoEncoderConfiguration, options))
                    {
                        //SetVideoEncoderConfiguration(profile.VideoEncoderConfiguration, false);
                        SetVideoEncoderConfiguration(Utils.CopyMaker.CreateCopy(profile.VideoEncoderConfiguration), false);
                        modifiedConfigurations.Add(profile.VideoEncoderConfiguration);
                    }
    #else
                    VideoResolution[] resolutionsAvailable = null;
                    IntRange          fpsRange             = null;
                    switch (profile.VideoEncoderConfiguration.Encoding)
                    {
                    case VideoEncoding.JPEG:
                        if (options.JPEG != null)
                        {
                            resolutionsAvailable = options.JPEG.ResolutionsAvailable;
                            fpsRange             = options.JPEG.FrameRateRange;
                        }
                        break;

                    case VideoEncoding.H264:
                        if (options.H264 != null)
                        {
                            resolutionsAvailable = options.H264.ResolutionsAvailable;
                            fpsRange             = options.H264.FrameRateRange;
                        }
                        break;

                    case VideoEncoding.MPEG4:
                        if (options.MPEG4 != null)
                        {
                            resolutionsAvailable = options.MPEG4.ResolutionsAvailable;
                            fpsRange             = options.MPEG4.FrameRateRange;
                        }
                        break;
                    }

                    VideoResolution minimalResolution = null;
                    bool            updateResolution  = false;
                    if (resolutionsAvailable != null)
                    {
                        VideoResolution currentResolution = profile.VideoEncoderConfiguration.Resolution;
                        foreach (VideoResolution resolution in resolutionsAvailable)
                        {
                            if (minimalResolution == null)
                            {
                                minimalResolution = resolution;
                            }
                            else
                            {
                                if (minimalResolution.Height * minimalResolution.Width > resolution.Height * resolution.Width)
                                {
                                    minimalResolution = resolution;
                                }
                            }
                        }
                        updateResolution = (minimalResolution.Width * minimalResolution.Height < currentResolution.Width * currentResolution.Height);
                    }

                    bool updateFps = false;
                    if (fpsRange != null)
                    {
                        if (profile.VideoEncoderConfiguration.RateControl != null)
                        {
                            if (profile.VideoEncoderConfiguration.RateControl.FrameRateLimit > fpsRange.Min)
                            {
                                updateFps = true;
                            }
                        }
                        else
                        {
                            updateFps = true;
                        }
                    }

                    if (updateResolution || updateFps)
                    {
                        VideoEncoderConfiguration backup = Utils.CopyMaker.CreateCopy(profile.VideoEncoderConfiguration);
                        changeLog.TrackModifiedConfiguration(backup);
                        if (updateResolution)
                        {
                            profile.VideoEncoderConfiguration.Resolution = minimalResolution;
                        }
                        if (updateFps)
                        {
                            if (profile.VideoEncoderConfiguration.RateControl == null)
                            {
                                profile.VideoEncoderConfiguration.RateControl = new VideoRateControl();
                            }
                            profile.VideoEncoderConfiguration.RateControl.FrameRateLimit = fpsRange.Min;
                        }
                        SetVideoEncoderConfiguration(profile.VideoEncoderConfiguration, false);
                        modifiedConfigurations.Add(profile.VideoEncoderConfiguration);
                    }
    #endif
                }
            }
        }
Esempio n. 23
0
 public MediaEndpoint(MediaUri uri, VideoEncoderConfiguration config)
 {
     Uri    = uri;
     Width  = config.Resolution.Width;
     Height = config.Resolution.Height;
 }
Esempio n. 24
0
 public static void SetVideoEncoderConfiguration(BaseOnvifTest test, MediaClient client, VideoEncoderConfiguration configuration, bool persistency)
 {
     RunStep(test, () => { client.SetVideoEncoderConfiguration(configuration, persistency); },
             Resources.StepSetVideoEncoderConfig_Title);
     DoRequestDelay(test);
 }
Esempio n. 25
0
        public static void AdjustVideo(
            IVideoForm form,
            string username,
            string password,
            int messageTimeout,
            TransportProtocol protocol,
            StreamType streamType,
            MediaUri streamUri,
            VideoEncoderConfiguration conf)
        {
            if (!string.IsNullOrEmpty(username))
            {
                form.User = username;
            }
            else
            {
                form.User = null;
            }
            if (!string.IsNullOrEmpty(password))
            {
                form.Password = password;
            }
            else
            {
                form.Password = null;
            }

            string uri = (streamUri != null) ? streamUri.Uri : null;

            //uri = "http://[195.145.107.77]:90/rtsp_tunnel?h26x=0&line=1&enableaudio=1";
            switch (protocol)
            {
            case TransportProtocol.UDP:
                form.HTTPPort = 0;
                form.TCP      = false;
                break;

            case TransportProtocol.TCP:
                form.HTTPPort = 0;
                form.TCP      = true;
                break;

            case TransportProtocol.RTSP:
                form.HTTPPort = 0;
                form.TCP      = true;
                break;

            case TransportProtocol.HTTP:
            {
                form.HTTPPort = 80;
                try
                {
                    int pos = uri.IndexOf(']', 8);
                    if (pos > 0)         // IPv6
                    {
                        pos++;
                    }
                    else
                    {
                        pos = uri.IndexOf(':', 8);
                    }
                    if (uri.Substring(pos, 1) == ":")         // port specified
                    {
                        pos++;
                        int pos2 = uri.IndexOf('/', pos);
                        pos = int.Parse(uri.Substring(pos, pos2 - pos));
                        if (pos > 0)
                        {
                            form.HTTPPort = pos;
                        }
                    }
                }
                catch (System.Exception)
                {
                }
                form.TCP = true;
            }
            break;
            }
            ;

            form.Multicast = streamType == StreamType.RTPMulticast;
            form.Address   = uri;
            form.RTSP      = !string.IsNullOrEmpty(uri);
            form.Timeout   = messageTimeout;
            if ((conf != null) && (conf.Resolution != null))
            {
                form.VideoWidth  = conf.Resolution.Width;
                form.VideoHeight = conf.Resolution.Height;
            }
        }
Esempio n. 26
0
 public void RevertChanges()
 {
     this.current.vec = this.origin.vec;
 }
Esempio n. 27
0
        public void GuarenteedNumberOfVideoEncoderInstancesMixReduced()
        {
            MediaConfigurationChangeLog changeLog = new MediaConfigurationChangeLog();

            // 4 StreamSetup types:
            // - RTPUDP - must be supported
            // - RTPRTSPHTTP - must be supported
            // - RTPRTSPTCP - optional
            // - RTPMulticastUDP - optional

            List <StreamSetup> supportedSetups = new List <StreamSetup>();

            StreamSetup streamSetup1 = new StreamSetup();

            streamSetup1.Transport          = new Transport();
            streamSetup1.Transport.Protocol = TransportProtocol.UDP;
            streamSetup1.Stream             = StreamType.RTPUnicast;
            supportedSetups.Add(streamSetup1);

            StreamSetup streamSetup2 = new StreamSetup();

            streamSetup2.Transport          = new Transport();
            streamSetup2.Transport.Protocol = TransportProtocol.HTTP;
            streamSetup2.Stream             = StreamType.RTPUnicast;
            supportedSetups.Add(streamSetup2);

            bool        TCP          = Features.Contains(Feature.RTPRTSPTCP);
            StreamSetup streamSetup3 = new StreamSetup();

            streamSetup3.Transport = new Transport();
            if (TCP)
            {
                streamSetup3.Transport.Protocol = TransportProtocol.RTSP;
                streamSetup3.Stream             = StreamType.RTPUnicast;
                supportedSetups.Add(streamSetup3);
            }

            bool        Multicast    = Features.Contains(Feature.RTPMulticastUDP);
            StreamSetup streamSetup4 = new StreamSetup();

            streamSetup4.Transport = new Transport();
            if (Multicast)
            {
                streamSetup4.Transport.Protocol = TransportProtocol.UDP;
                streamSetup4.Stream             = StreamType.RTPMulticast;
                supportedSetups.Add(streamSetup4);
            }

            // list of used VEC tokens for RTP-Multicast streaming
            List <string> MulticastTokenList = new List <string>();

            // list of used VSC tokens
            List <string> VscTokenList = new List <string>();

            // lists of used ip addresses and ports for RTP-Multicast streaming
            List <int>    usedMulticastPorts = new List <int>();
            List <string> usedMulticastIPs   = new List <string>();

            RunTest(
                () =>
            {
                int step = 0;

                MultipleStreamTestReduced(changeLog, (ref Profile profile) =>
                {
                    if (!VscTokenList.Contains(profile.VideoSourceConfiguration.token))
                    {
                        VscTokenList.Add(profile.VideoSourceConfiguration.token);

                        // we should clear this for every new Video Source Configuration
                        if (0 != MulticastTokenList.Count)
                        {
                            MulticastTokenList.Clear();
                        }
                    }

                    int idx             = step % supportedSetups.Count;
                    StreamSetup current = supportedSetups[idx];

                    // setup multicast
                    if (current.Stream == StreamType.RTPMulticast)
                    {
                        if (MulticastTokenList.Contains(profile.VideoEncoderConfiguration.token))
                        {
                            idx     = (++step) % supportedSetups.Count;
                            current = supportedSetups[idx];
                        }
                        else
                        {
                            MulticastTokenList.Add(profile.VideoEncoderConfiguration.token);

                            string addressVideo = "";
                            int portVideo       = 0;

                            string addressAudio = "";
                            int portAudio       = 0;

                            string addressMetadata = "";
                            int portMetadata       = 0;

                            if (profile.VideoEncoderConfiguration != null)
                            {
                                addressVideo = GetMulticastAddress3(usedMulticastIPs);
                                usedMulticastIPs.Add(addressVideo);
                                portVideo = GetMulticastPort2(usedMulticastPorts);
                                usedMulticastPorts.Add(portVideo);

                                VideoEncoderConfiguration configCopy = Utils.CopyMaker.CreateCopy(profile.VideoEncoderConfiguration);
                                changeLog.TrackModifiedConfiguration(configCopy);
                                SetMulticast(profile.VideoEncoderConfiguration.Multicast, IPType.IPv4, addressVideo, portVideo);
                                SetVideoEncoderConfiguration(profile.VideoEncoderConfiguration, false, true);
                            }

                            if (profile.AudioEncoderConfiguration != null)
                            {
                                addressAudio = GetMulticastAddress3(usedMulticastIPs);
                                usedMulticastIPs.Add(addressAudio);
                                portAudio = GetMulticastPort2(usedMulticastPorts);
                                usedMulticastPorts.Add(portAudio);
                            }

                            if (profile.MetadataConfiguration != null)
                            {
                                addressMetadata = GetMulticastAddress3(usedMulticastIPs);
                                usedMulticastIPs.Add(addressMetadata);
                                portMetadata = GetMulticastPort2(usedMulticastPorts);
                                usedMulticastPorts.Add(portMetadata);
                            }

                            SetMulticastSettings(profile, IPType.IPv4, changeLog,
                                                 addressAudio, portAudio,
                                                 addressVideo, portVideo,
                                                 addressMetadata, portMetadata);
                        }
                    }

                    ++step;
                    return(current);
                });
            },
                () =>
            {
                RestoreMediaConfiguration(changeLog);
            }
                );
        }
        protected void ResolutionTest(
            MediaConfigurationChangeLog changeLog,
            VideoEncoding encodingUnderTest,
            GetResolutionsAvailable getResolutionsAvailable)
        {
            //3.	ONVIF Client invokes GetVideoEncoderConfigurationsRequest message to retrieve
            // video configuration list.
            //4.	Verify the GetVideoEncoderConfigurationsResponse message.

            VideoEncoderConfiguration[] encoderConfigurations = GetVideoEncoderConfigurations();
            CheckVideoEncoderConfigurationsList(encoderConfigurations);
            bool NoProfilesForEncoding = true;

            List <VideoEncoderConfiguration> selectedConfigs = null;
            var WhereRes = encoderConfigurations.Where(C => C.Encoding == encodingUnderTest);

            if (WhereRes != null)
            {
                selectedConfigs = WhereRes.ToList();
            }
            if (selectedConfigs == null || selectedConfigs.Count == 0)
            {
                LogTestEvent("There are no VideoEncoderConfiguration ready for selected encoder type - will try to reconfigure (if this may fail - please pre-configure before making tests)." + Environment.NewLine);
                selectedConfigs = encoderConfigurations.ToList();
            }
            selectedConfigs = MediaTestUtils.SelectConfigurations(selectedConfigs);

            var configGroups = encoderConfigurations.Where(e => !selectedConfigs.Contains(e)).Select(e => new List <VideoEncoderConfiguration>()
            {
                e
            }).ToList();

            configGroups.Insert(0, selectedConfigs);

            //Try to perform steps for selected profiles.
            //In case of fail for all selected profiles try to perform steps for each another profile until first success.
            foreach (var configGroup in configGroups)
            {
                foreach (VideoEncoderConfiguration encoderConfig in configGroup)
                {
                    //5.	Find or create media profile with Video Source Configuration and Video Encoder
                    // Configuration with token VECToken1 and supporting of JPEG encoding, where VECToken1
                    // is first video encoder configuration token from GetVideoEncoderConfigurationsResponse
                    // message (see Annex A.8). If it is not possible skip steps 6-61 and go to the step 62.

                    Profile profile = GetProfileForSpecificConfigurationAndCodec(encoderConfig.token, encodingUnderTest, changeLog);

                    if (profile == null)
                    {
                        continue;
                    }
                    NoProfilesForEncoding = false;

                    //6.	ONVIF Client invokes GetVideoEncoderConfigurationOptionsRequest message
                    // (ProfileToken = “Profile1”, where “Profile1” is profile token from the step 5)
                    // to get video encoder configuration options.
                    //7.	Verify the GetVideoEncoderConfigurationOptionsResponse message from the DUT.

                    VideoEncoderConfigurationOptions options = GetVideoEncoderConfigurationOptions(null, profile.token);

                    Assert(CheckVideoSupport(options, encodingUnderTest),
                           string.Format("{0} encoding is not compatible with current configurations",
                                         GetVideoCodecName(encodingUnderTest)),
                           string.Format("Validate {0} options",
                                         GetVideoCodecName(encodingUnderTest)));

                    VideoResolution highest = null;
                    VideoResolution lowest  = null;
                    VideoResolution median  = null;

                    FindResolutions(getResolutionsAvailable(options), out highest, out lowest, out median);

                    //8.	ONVIF Client invokes SetVideoEncoderConfigurationRequest message
                    // (ConfigurationToken = VECToken1, Resolution = [Width1, Height1], Encoding = JPEG,
                    // where [Width1, Height1] is maximum resolution from the Options.JPEG.ResolutionsAvailable)
                    // to change video encoder configuration.

                    VideoEncoderConfiguration unchanged = Utils.CopyMaker.CreateCopy(encoderConfig);

                    encoderConfig.Encoding   = encodingUnderTest;
                    encoderConfig.Resolution = highest;

                    AdjustVideoEncoderConfiguration(encodingUnderTest, encoderConfig, options);
                    switch (encodingUnderTest)
                    {
                    case VideoEncoding.MPEG4:
                        AdjustMpeg4VideoEncoderConfiguration(encoderConfig, options, false);
                        break;

                    case VideoEncoding.H264:
                        AdjustH264VideoEncoderConfiguration(encoderConfig, options, false);
                        break;
                    }

                    SetVideoEncoderConfiguration(encoderConfig, false);

                    //9.	Verify the SetVideoEncoderConfigurationResponse message from the DUT.

                    changeLog.ModifiedVideoEncoderConfigurations.Add(unchanged);

                    //10.	ONVIF Client invokes GetVideoEncoderConfigurationRequest message
                    // (ConfigurationToken = VECToken1) to get video encoder configuration.
                    //11.	Verify the GetVideoEncoderConfigurationResponse message (ConfigurationToken =
                    // VECToken1, Resolution = [Width1, Height1], Encoding = JPEG) from the DUT. Check
                    // that new setting for Resolution and Encoding was applied.

                    VideoEncoderConfiguration actual = GetVideoEncoderConfiguration(unchanged.token);

                    string reason = string.Empty;
                    // check encoding and resolutions
                    bool ok = ConfigurationValid(actual, encodingUnderTest, highest, out reason);
                    Assert(ok, reason, "Check that the DUT accepted values passed");

                    //12.	ONVIF Client invokes GetStreamUriRequest message (Profile Token, RTP-Unicast,
                    // UDP transport) to retrieve media stream URI for the selected media profile.
                    //13.	DUT sends RTSP URI and parameters defining the lifetime of the URI like
                    // ValidUntilConnect, ValidUntilReboot and Timeout in the GetStreamUriResponse message.
                    StreamSetup streamSetup = new StreamSetup();
                    streamSetup.Transport          = new Transport();
                    streamSetup.Transport.Protocol = TransportProtocol.UDP;
                    streamSetup.Stream             = StreamType.RTPUnicast;

                    MediaUri streamUri = GetStreamUri(streamSetup, profile.token);

                    //14.	ONVIF Client verifies the RTSP media stream URI provided by the DUT.
                    //15.	ONVIF Client invokes RTSP DESCRIBE request.
                    //16.	DUT sends 200 OK message and SDP information.
                    //17.	 ONVIF Client invokes RTSP SETUP request with transport parameter as RTP/UDP.
                    //18.	DUT sends 200 OK message and the media stream information.
                    //19.	ONVIF Client invokes RTSP PLAY request.
                    //20.	DUT sends 200 OK message and starts media streaming.
                    //21.	DUT sends JPEG RTP media stream to ONVIF Client over UDP. Verify that stream
                    // has JPEG encoding and [Width1, Height1] resolution.
                    //22.	DUT sends RTCP sender report to ONVIF Client.
                    //23.	DUT validates the received RTP and RTCP packets, decodes and renders them.
                    //24.	ONVIF Client invokes RTSP TEARDOWN control request at the end of media
                    // streaming to terminate the RTSP session.
                    //25.	DUT sends 200 OK Response and terminates the RTSP Session.
                    //TestTool.Tests.Common.Media.VideoUtils.AdjustVideo(_videoForm, _username, _password, _messageTimeout,
                    //                                                   streamSetup.Transport.Protocol,
                    //                                                   streamSetup.Stream, streamUri, encoderConfig);
                    //ValidateStreamSequence(false, true);
                    DoSequence(encoderConfig, streamUri);

                    //26.	ONVIF Client invokes SetVideoEncoderConfigurationRequest message
                    // (ConfigurationToken = VECToken1, Resolution = [Width1, Height1], Encoding = JPEG,
                    // where [Width2, Height2] is minimum resolution from the Options.JPEG.ResolutionsAvailable) to change video encoder configuration.
                    //27.	Verify the SetVideoEncoderConfigurationResponse message from the DUT.
                    encoderConfig.Encoding   = encodingUnderTest;
                    encoderConfig.Resolution = lowest;

                    AdjustVideoEncoderConfiguration(encodingUnderTest, encoderConfig, options);

                    SetVideoEncoderConfiguration(encoderConfig, false);

                    //28.	ONVIF Client invokes GetVideoEncoderConfigurationRequest message
                    // (ConfigurationToken = VECToken1) to get video encoder configuration.
                    //29.	Verify the GetVideoEncoderConfigurationResponse message (ConfigurationToken =
                    // VECToken1, Resolution = [Width2, Height2], Encoding = JPEG, where [Width2, Height2]) from the DUT. Check that new setting for Resolution and Encoding was applied.
                    actual = GetVideoEncoderConfiguration(unchanged.token);
                    ok     = ConfigurationValid(actual, encodingUnderTest, lowest, out reason);
                    Assert(ok, reason, "Check that the DUT accepted values passed");

                    //30.	ONVIF Client invokes GetStreamUriRequest message (Profile Token, RTP-Unicast,
                    // UDP transport) to retrieve media stream URI for the selected media profile.
                    //31.	DUT sends RTSP URI and parameters defining the lifetime of the URI like ValidUntilConnect, ValidUntilReboot and Timeout in the GetStreamUriResponse message.

                    streamUri = GetStreamUri(streamSetup, profile.token);

                    //32.	ONVIF Client verifies the RTSP media stream URI provided by the DUT.
                    //33.	ONVIF Client invokes RTSP DESCRIBE request.
                    //34.	DUT sends 200 OK message and SDP information.
                    //35.	 ONVIF Client invokes RTSP SETUP request with transport parameter as RTP/UDP.
                    //36.	DUT sends 200 OK message and the media stream information.
                    //37.	ONVIF Client invokes RTSP PLAY request.
                    //38.	DUT sends 200 OK message and starts media streaming.
                    //39.	DUT sends JPEG RTP media stream to ONVIF Client over UDP. Verify that stream has JPEG encoding and [Width2, Height2] resolution.
                    //40.	DUT sends RTCP sender report to ONVIF Client.
                    //41.	DUT validates the received RTP and RTCP packets, decodes and renders them.
                    //42.	ONVIF Client invokes RTSP TEARDOWN control request at the end of media streaming to terminate the RTSP session.
                    //43.	DUT sends 200 OK Response and terminates the RTSP Session.
                    //TestTool.Tests.Common.Media.VideoUtils.AdjustVideo(_videoForm, _username, _password, _messageTimeout,
                    //                                                   streamSetup.Transport.Protocol,
                    //                                                   streamSetup.Stream, streamUri, encoderConfig);
                    //ValidateStreamSequence(false, true);
                    DoSequence(encoderConfig, streamUri);

                    //44.	ONVIF Client invokes SetVideoEncoderConfigurationRequest message (ConfigurationToken = VECToken1, Resolution = [Width1, Height1], Encoding = JPEG, where [Width3, Height3] is middle resolution from the Options.JPEG.ResolutionsAvailable) to change video encoder configuration.
                    //45.	Verify the SetVideoEncoderConfigurationResponse message from the DUT.
                    encoderConfig.Encoding   = encodingUnderTest;
                    encoderConfig.Resolution = median;

                    AdjustVideoEncoderConfiguration(encodingUnderTest, encoderConfig, options);

                    SetVideoEncoderConfiguration(encoderConfig, false);

                    //46.	ONVIF Client invokes GetVideoEncoderConfigurationRequest message (ConfigurationToken = VECToken1) to get video encoder configuration.
                    //47.	Verify the GetVideoEncoderConfigurationResponse message (ConfigurationToken = VECToken1, Resolution = [Width3, Height3], Encoding = JPEG) from the DUT. Check that new setting for Resolution and Encoding was applied.

                    actual = GetVideoEncoderConfiguration(unchanged.token);
                    ok     = ConfigurationValid(actual, encodingUnderTest, median, out reason);
                    Assert(ok, reason, "Check that the DUT accepted values passed");

                    //48.	ONVIF Client invokes GetStreamUriRequest message (Profile Token, RTP-Unicast, UDP transport) to retrieve media stream URI for the selected media profile.
                    //49.	DUT sends RTSP URI and parameters defining the lifetime of the URI like ValidUntilConnect, ValidUntilReboot and Timeout in the GetStreamUriResponse message.
                    streamUri = GetStreamUri(streamSetup, profile.token);

                    //50.	ONVIF Client verifies the RTSP media stream URI provided by the DUT.
                    //51.	ONVIF Client invokes RTSP DESCRIBE request.
                    //52.	DUT sends 200 OK message and SDP information.
                    //53.	 ONVIF Client invokes RTSP SETUP request with transport parameter as RTP/UDP.
                    //54.	DUT sends 200 OK message and the media stream information.
                    //55.	ONVIF Client invokes RTSP PLAY request.
                    //56.	DUT sends 200 OK message and starts media streaming.
                    //57.	DUT sends JPEG RTP media stream to ONVIF Client over UDP. Verify that stream has JPEG encoding and [Width3, Height3] resolution.
                    //58.	DUT sends RTCP sender report to ONVIF Client.
                    //59.	DUT validates the received RTP and RTCP packets, decodes and renders them.
                    //60.	ONVIF Client invokes RTSP TEARDOWN control request at the end of media streaming to terminate the RTSP session.
                    //61.	DUT sends 200 OK Response and terminates the RTSP Session.
                    //TestTool.Tests.Common.Media.VideoUtils.AdjustVideo(_videoForm, _username, _password, _messageTimeout,
                    //                                                   streamSetup.Transport.Protocol,
                    //                                                   streamSetup.Stream, streamUri, encoderConfig);
                    //ValidateStreamSequence(false, true);
                    DoSequence(encoderConfig, streamUri);

                    //62.	Repeat steps 5-62 for the rest Video Encoder configurations supported by the DUT with using different multicast ports and the same multicast addresses for Video Encoder Configurations.
                }

                if (!NoProfilesForEncoding)
                {
                    break;
                }
            }

            if (NoProfilesForEncoding)
            {
                RunStep(() =>
                {
                    throw new Exception(string.Format("No profiles for {0}", GetVideoCodecName(encodingUnderTest)));
                }, string.Format("Check if at least one profile were found ({0} not supported?)", GetVideoCodecName(encodingUnderTest)));
            }
        }
        protected void AllResolutionsTest(MediaConfigurationChangeLog changeLog,
                                          VideoEncoding encodingUnderTest,
                                          GetResolutionsAvailable getResolutionsAvailable)
        {
            //3.	ONVIF Client invokes GetVideoEncoderConfigurationsRequest message to retrieve
            // video configuration list.
            //4.	Verify the GetVideoEncoderConfigurationsResponse message.

            VideoEncoderConfiguration[] encoderConfigurations = GetVideoEncoderConfigurations();
            CheckVideoEncoderConfigurationsList(encoderConfigurations);
            bool NoProfilesForEncoding = true;

            foreach (VideoEncoderConfiguration encoderConfig in encoderConfigurations)
            {
                //5.	Find or create media profile with Video Source Configuration and
                // Video Encoder Configuration with token VECToken1 and supporting of JPEG encoding,
                // where VECToken1 is first video encoder configuration token from
                // GetVideoEncoderConfigurationsResponse message (see Annex A.14). If it is not possible
                // skip steps 6-26 and go to the step 27.
                Profile profile = GetProfileForSpecificConfigurationAndCodec(
                    encoderConfig.token, encodingUnderTest, changeLog);

                if (profile == null)
                {
                    continue;
                }
                if (NoProfilesForEncoding)
                {
                    NoProfilesForEncoding = false;
                }

                //6.	ONVIF Client invokes GetVideoEncoderConfigurationOptionsRequest message
                // (ProfileToken = “Profile1”, where “Profile1” is profile token from the step 5)
                // to get video encoder configuration options.
                //7.	Verify the GetVideoEncoderConfigurationOptionsResponse message from the DUT.
                VideoEncoderConfigurationOptions options = GetVideoEncoderConfigurationOptions(
                    null, profile.token);

                Assert(CheckVideoSupport(options, encodingUnderTest),
                       string.Format("{0} encoding is not compatible with current configuration",
                                     GetVideoCodecName(encodingUnderTest)),
                       string.Format("Validate {0} options", GetVideoCodecName(encodingUnderTest)));

                VideoResolution[] ResolutionsAvailable = getResolutionsAvailable(options);

                VideoEncoderConfiguration unchanged = Utils.CopyMaker.CreateCopy(encoderConfig);
                bool addToChangeLog = true;

                foreach (VideoResolution resolution in ResolutionsAvailable)
                {
                    //8.	ONVIF Client invokes SetVideoEncoderConfigurationRequest message
                    // (ConfigurationToken = VECToken1, Resolution = [Width1, Height1], Encoding = JPEG,
                    // where [Width1, Height1] is the first resolution
                    // from the Options.JPEG.ResolutionsAvailable) to change video encoder configuration.
                    //9.	Verify the SetVideoEncoderConfigurationResponse message from the DUT.

                    encoderConfig.Encoding   = encodingUnderTest;
                    encoderConfig.Resolution = resolution;

                    AdjustVideoEncoderConfiguration(encodingUnderTest, encoderConfig, options);
                    switch (encodingUnderTest)
                    {
                    case VideoEncoding.MPEG4:
                        AdjustMpeg4VideoEncoderConfiguration(encoderConfig, options, false);
                        break;

                    case VideoEncoding.H264:
                        AdjustH264VideoEncoderConfiguration(encoderConfig, options, false);
                        break;
                    }

                    SetVideoEncoderConfiguration(encoderConfig, false);

                    if (addToChangeLog)
                    {
                        changeLog.ModifiedVideoEncoderConfigurations.Add(unchanged);
                        addToChangeLog = false;
                    }


                    //10.	ONVIF Client invokes GetVideoEncoderConfigurationRequest message
                    // (ConfigurationToken = VECToken1) to get video encoder configuration.
                    //11.	Verify the GetVideoEncoderConfigurationResponse message
                    //(ConfigurationToken = VECToken1, Resolution = [Width1, Height1], Encoding = JPEG)
                    // from the DUT. Check that new setting for Resolution and Encoding was applied.

                    VideoEncoderConfiguration actual = GetVideoEncoderConfiguration(unchanged.token);

                    string reason = string.Empty;
                    // check encoding and resolutions
                    bool ok = ConfigurationValid(actual, encodingUnderTest, resolution, out reason);
                    Assert(ok, reason, "Check that the DUT accepted values passed");

                    //12.	ONVIF Client invokes GetStreamUriRequest message (Profile Token, RTP-Unicast,
                    // UDP transport) to retrieve media stream URI for the selected media profile.
                    //13.	DUT sends RTSP URI and parameters defining the lifetime of the URI like
                    // ValidUntilConnect, ValidUntilReboot and Timeout in the GetStreamUriResponse message.

                    StreamSetup streamSetup = new StreamSetup();
                    streamSetup.Transport          = new Transport();
                    streamSetup.Transport.Protocol = TransportProtocol.UDP;
                    streamSetup.Stream             = StreamType.RTPUnicast;

                    MediaUri streamUri = GetStreamUri(streamSetup, profile.token);

                    //14.	ONVIF Client verifies the RTSP media stream URI provided by the DUT.
                    //15.	ONVIF Client invokes RTSP DESCRIBE request.
                    //16.	DUT sends 200 OK message and SDP information.
                    //17.	 ONVIF Client invokes RTSP SETUP request with transport parameter as RTP/UDP.
                    //18.	DUT sends 200 OK message and the media stream information.
                    //19.	ONVIF Client invokes RTSP PLAY request.
                    //20.	DUT sends 200 OK message and starts media streaming.
                    //21.	DUT sends JPEG RTP media stream to ONVIF Client over UDP. Verify that stream has JPEG encoding and [Width1, Height1] resolution.
                    //22.	DUT sends RTCP sender report to ONVIF Client.
                    //23.	DUT validates the received RTP and RTCP packets, decodes and renders them.
                    //24.	ONVIF Client invokes RTSP TEARDOWN control request at the end of media streaming to terminate the RTSP session.
                    //25.	DUT sends 200 OK Response and terminates the RTSP Session.
                    //TestTool.Tests.Common.Media.VideoUtils.AdjustVideo(_videoForm, _username,
                    //    _password, _messageTimeout, streamSetup.Transport.Protocol, streamSetup.Stream,
                    //    streamUri, encoderConfig);
                    //ValidateStreamSequence(false, true);
                    DoSequence(encoderConfig, streamUri);
                }
            }

            if (NoProfilesForEncoding)
            {
                RunStep(() =>
                {
                    throw new Exception(string.Format("No profiles for {0}", GetVideoCodecName(encodingUnderTest)));
                }, string.Format("Check if at least one profile were found ({0} not supported?)", GetVideoCodecName(encodingUnderTest)));
            }
        }
Esempio n. 30
0
        public void GuarenteedNumberOfVideoEncoderInstancesMulticastReduced()
        {
            MediaConfigurationChangeLog changeLog = new MediaConfigurationChangeLog();

            RunTest(
                () =>
            {
                // list of used VEC tokens for RTP-Multicast streaming
                List <string> MulticastTokenList = new List <string>();

                // list of used VSC tokens
                List <string> VscTokenList = new List <string>();

                // lists of used ip addresses and ports for RTP-Multicast streaming
                List <int> usedMulticastPorts  = new List <int>();
                List <string> usedMulticastIPs = new List <string>();

                MultipleStreamTestReduced(changeLog, (ref Profile profile) =>
                {
                    if (!VscTokenList.Contains(profile.VideoSourceConfiguration.token))
                    {
                        VscTokenList.Add(profile.VideoSourceConfiguration.token);

                        // we should clear this for every new Video Source Configuration
                        if (0 != MulticastTokenList.Count)
                        {
                            MulticastTokenList.Clear();
                        }
                    }

                    StreamSetup streamSetup        = new StreamSetup();
                    streamSetup.Transport          = new Transport();
                    streamSetup.Transport.Protocol = TransportProtocol.UDP;
                    streamSetup.Stream             = StreamType.RTPMulticast;

                    if (MulticastTokenList.Contains(profile.VideoEncoderConfiguration.token))
                    {
                        profile = null;
                    }
                    else
                    {
                        MulticastTokenList.Add(profile.VideoEncoderConfiguration.token);

                        string addressVideo = "";
                        int portVideo       = 0;

                        string addressAudio = "";
                        int portAudio       = 0;

                        string addressMetadata = "";
                        int portMetadata       = 0;

                        if (profile.VideoEncoderConfiguration != null)
                        {
                            addressVideo = GetMulticastAddress3(usedMulticastIPs);
                            usedMulticastIPs.Add(addressVideo);
                            portVideo = GetMulticastPort2(usedMulticastPorts);
                            usedMulticastPorts.Add(portVideo);

                            VideoEncoderConfiguration configCopy = Utils.CopyMaker.CreateCopy(profile.VideoEncoderConfiguration);
                            changeLog.TrackModifiedConfiguration(configCopy);
                            SetMulticast(profile.VideoEncoderConfiguration.Multicast, IPType.IPv4, addressVideo, portVideo);
                            SetVideoEncoderConfiguration(profile.VideoEncoderConfiguration, false, true);
                        }

                        if (profile.AudioEncoderConfiguration != null)
                        {
                            addressAudio = GetMulticastAddress3(usedMulticastIPs);
                            usedMulticastIPs.Add(addressAudio);
                            portAudio = GetMulticastPort2(usedMulticastPorts);
                            usedMulticastPorts.Add(portAudio);
                        }

                        if (profile.MetadataConfiguration != null)
                        {
                            addressMetadata = GetMulticastAddress3(usedMulticastIPs);
                            usedMulticastIPs.Add(addressMetadata);
                            portMetadata = GetMulticastPort2(usedMulticastPorts);
                            usedMulticastPorts.Add(portMetadata);
                        }

                        SetMulticastSettings(profile, IPType.IPv4, changeLog,
                                             addressAudio, portAudio,
                                             addressVideo, portVideo,
                                             addressMetadata, portMetadata);
                    }

                    return(streamSetup);
                });
            },
                () =>
            {
                RestoreMediaConfiguration(changeLog);
            }
                );
        }