Example #1
1
        private MCvPoint3D32f[] Get3DFeatures(StereoCameraParams stereoParams, VideoSource.StereoFrameSequenceElement stereoFrame, out Image<Gray, short> disparityImg)
        {
            using (var gpuSBM = new Emgu.CV.GPU.GpuStereoBM(128, 19))
            using (StereoSGBM stereoSolver = new StereoSGBM(
                            minDisparity: 0,
                            numDisparities: 32,
                            blockSize: 0,
                            p1: 0,
                            p2: 0,
                            disp12MaxDiff: 0,
                            preFilterCap: 0,
                            uniquenessRatio: 0,
                            speckleRange: 0,
                            speckleWindowSize: 0,
                            mode: StereoSGBM.Mode.HH
                            ))
            using (var leftImg = new Image<Gray, byte>(stereoFrame.LeftRawFrame))
            using (var rightImg = new Image<Gray, byte>(stereoFrame.RightRawFrame))
            using (var dispImg = new Image<Gray, short>(leftImg.Size))
            using (var gpuLeftImg = new Emgu.CV.GPU.GpuImage<Gray, byte>(leftImg))
            using (var gpuRightImg = new Emgu.CV.GPU.GpuImage<Gray, byte>(rightImg))
            using (var gpuDispImg = new Emgu.CV.GPU.GpuImage<Gray, byte>(leftImg.Size))
            {
                var dispMap = new Image<Gray, short>(leftImg.Size);
                //CPU
                //stereoSolver.FindStereoCorrespondence(leftImg, rightImg, dispImg);
                //dispMap = dispImg.Convert<Gray, short>();
                //
                //GPU
                gpuSBM.FindStereoCorrespondence(gpuLeftImg, gpuRightImg, gpuDispImg, null);
                dispMap = gpuDispImg.ToImage().Convert<Gray, short>();
                //

                var points = PointCollection.ReprojectImageTo3D(dispMap, stereoParams.Q);
                disparityImg = dispMap;
                return points;
            }
        }
Example #2
1
        private Image<Gray, short> GetDispMap(VideoSource.StereoFrameSequenceElement stereoFrame)
        {
            int numDisparities = GetSliderValue(Num_Disparities);
            int minDispatities = GetSliderValue(Min_Disparities);
            int SAD = GetSliderValue(SAD_Window);
            int P1 = 8 * 1 * SAD * SAD;//GetSliderValue(P1_Slider);
            int P2 = 32 * 1 * SAD * SAD;//GetSliderValue(P2_Slider);
            int disp12MaxDiff = GetSliderValue(Disp12MaxDiff);
            int PreFilterCap = GetSliderValue(pre_filter_cap);
            int UniquenessRatio = GetSliderValue(uniquenessRatio);
            int SpeckleWindow = GetSliderValue(Speckle_Window);
            int SpeckleRange = GetSliderValue(specklerange);

            using (var gpuSBM = new Emgu.CV.GPU.GpuStereoBM(numDisparities, SAD))
            using (StereoSGBM stereoSolver = new StereoSGBM(
                            minDisparity: minDispatities,
                            numDisparities: numDisparities,
                            blockSize: SAD,
                            p1: P1,
                            p2: P2,
                            disp12MaxDiff: disp12MaxDiff,
                            preFilterCap: PreFilterCap,
                            uniquenessRatio: UniquenessRatio,
                            speckleRange: SpeckleRange,
                            speckleWindowSize: SpeckleWindow,
                            mode: StereoSGBM.Mode.SGBM
                            ))
            using (var leftImg = new Image<Gray, byte>(stereoFrame.LeftRawFrame))
            using (var rightImg = new Image<Gray, byte>(stereoFrame.RightRawFrame))
            using (var dispImg = new Image<Gray, short>(leftImg.Size))
            using (var gpuLeftImg = new Emgu.CV.GPU.GpuImage<Gray, byte>(leftImg))
            using (var gpuRightImg = new Emgu.CV.GPU.GpuImage<Gray, byte>(rightImg))
            using (var gpuDispImg = new Emgu.CV.GPU.GpuImage<Gray, byte>(leftImg.Size))
            {
                var dispMap = new Image<Gray, short>(leftImg.Size);
                //CPU
                //stereoSolver.FindStereoCorrespondence(leftImg, rightImg, dispImg);
                //dispMap = dispImg.Convert<Gray, short>();
                //
                //GPU
                gpuSBM.FindStereoCorrespondence(gpuLeftImg, gpuRightImg, gpuDispImg, null);
                dispMap = gpuDispImg.ToImage().Convert<Gray, short>();
                //

                return dispMap;
            }
        }
Example #3
0
        public void Stop()
        {
            if (VideoSource != null)
            {
                VideoSource.Stop();
            }

            RaiseCameraStoppedEvent();

            UnbindVideoSource();
        }
Example #4
0
        public void FrameObjectDisposedCaseFive()
        {
            Index       index  = new Index("h264_720p_hp_5.1_3mbps_vorbis_styled_and_unstyled_subs_suzumiya.ffindex");
            VideoSource source = index.VideoSource("h264_720p_hp_5.1_3mbps_vorbis_styled_and_unstyled_subs_suzumiya.mkv", 0);

            Frame frame = source.GetFrame(10);

            source.ResetOutputFormat(); // frame is now invalid

            char dummy = frame.FrameType;
        }
        /// <summary>
        /// Provides the value.
        /// </summary>
        /// <param name="serviceProvider">The service provider.</param>
        /// <returns></returns>
        public object ProvideValue(IServiceProvider serviceProvider)
        {
            if (Resource == null)
            {
                return(null);
            }

            var videoSource = VideoSource.FromResource(Resource);

            return(videoSource);
        }
Example #6
0
        public static bool IsAvailable(this VideoSource src, SourceAvailability available)
        {
            var attr = src.GetPossibleAttribute <VideoSource, VideoSourceAvailabilityAttribute>();

            if (attr == null)
            {
                return(false);
            }

            return(attr.SourceAvailability.HasFlag(available));
        }
        public Task <string> LoadVideoAsync(VideoSource source, CancellationToken cancellationToken = new CancellationToken())
        {
            string          result  = null;
            FileVideoSource source2 = source as FileVideoSource;

            if (!string.IsNullOrEmpty(source2?.File) && File.Exists(source2.File))
            {
                result = source2.File;
            }
            return(Task.FromResult <string>(result));
        }
Example #8
0
        public static bool IsAvailable(this VideoSource src, MixEffectBlockId meId)
        {
            var attr = src.GetPossibleAttribute <VideoSource, VideoSourceAvailabilityAttribute>();

            if (attr == null)
            {
                return(false);
            }

            return(attr.MeAvailability.Includes(meId));
        }
        public override object ConvertFromInvariantString(string value)
        {
            if (!String.IsNullOrWhiteSpace(value))
            {
                Uri uri;
                return(Uri.TryCreate(value, UriKind.Absolute, out uri) && uri.Scheme != "file" ?
                       VideoSource.FromUri(value) : VideoSource.FromResource(value));
            }

            throw new InvalidOperationException("Cannot convert null or whitespace to ImageSource");
        }
Example #10
0
        public VideoStorager(string videoId, int streamId)
        {
            VideoId_StreamId = $"{videoId}_{streamId}";
            string path = System.IO.Path.Combine(GlobalData.Path, VideoId_StreamId);

            _syncRec = new SyncRecorder(path);
            VideoSource source = VideoSourcesCmd.Instance.GetVideoSource(videoId, streamId);

            _videoSourceCmd = new  VideoSourceCmd(source);
            _videoSourceCmd.VideoDisplayEvent += _syncRec.Set;
        }
Example #11
0
        public void Start()
        {
            BindVideoSource();

            if (VideoSource != null)
            {
                VideoSource.Start();
            }

            RaiseCameraStartedEvent();
        }
Example #12
0
        /// <summary>
        /// Loads the video from the specified source.
        /// </summary>
        /// <param name="source">The source of the video file.</param>
        /// <param name="cancellationToken">The cancellation token.</param>
        /// <returns>The path to the video file.</returns>
        public Task <string> LoadVideoAsync(VideoSource source, CancellationToken cancellationToken = default(CancellationToken))
        {
            string path            = null;
            var    fileVideoSource = source as FileVideoSource;

            if (!string.IsNullOrEmpty(fileVideoSource?.File))
            {
                path = fileVideoSource.File;
            }

            return(Task.FromResult(path));
        }
Example #13
0
        public IRtspClient Create(VideoSource videoSource)
        {
            Exception getIPexception;
            IPAddress ipAddress = IPUtils.GetIPAddressFromString(_appConfigurationFacade.RtspClientAddress, out getIPexception);

            if (getIPexception != null)
            {
                LogManager.GetLogger("RtspClientFactory").Error("Error getting RtspClient address: " + getIPexception);
            }

            return(new RTSPClient.RtspClient(videoSource, ipAddress));
        }
Example #14
0
        public void DeleteVideoSource(Guid videoSourceId)
        {
            int         index       = _videoSources.FindIndex(vs => vs.Id == videoSourceId);
            VideoSource videoSource = _videoSources[index];

            _videoSources.RemoveAt(index);
            _videoSourceDatabase.Save(_videoSources.ToArray());
            if (OnVideoSourceDeleted != null)
            {
                OnVideoSourceDeleted(videoSource);
            }
        }
Example #15
0
        protected void StopRtspClient(VideoSource videoSource)
        {
            IRtspClient rtspClient;

            if (_rtspClientsDictionary.TryRemove(videoSource.Id, out rtspClient))
            {
                rtspClient.Received_Rtp  -= RTSPClientReceivedData;
                rtspClient.Received_Rtcp -= RTSPClientReceivedData;
                rtspClient.OnStopped     -= RTSPClientStopped;
                rtspClient.Stop();
            }
        }
Example #16
0
        /// <summary>
        /// Loads the video from the specified source.
        /// </summary>
        /// <param name="source">The source of the video file.</param>
        /// <param name="cancellationToken">The cancellation token.</param>
        /// <returns>The path to the video file.</returns>
        public Task <string> LoadVideoAsync(VideoSource source, CancellationToken cancellationToken = default(CancellationToken))
        {
            string path           = null;
            var    uriVideoSource = source as UriVideoSource;

            if (uriVideoSource?.Uri != null)
            {
                path = uriVideoSource.Uri.AbsoluteUri;
            }

            return(Task.FromResult(path));
        }
Example #17
0
        protected void ProcessConnectionRemoved(Guid connectionId, VideoSource videoSource)
        {
            if (videoSource != null)
            {
                OnConnectionRemoved?.Invoke(connectionId, videoSource);
            }

            lock (_rtspList)
            {
                _rtspList.RemoveAll(c => c.Id == connectionId);
            }
        }
Example #18
0
 public static IVideoSourceHandler Create(VideoSource source)
 {
     if (source is FileVideoSource)
     {
         return(new FileVideoSourceHandler());
     }
     if (source is StreamVideoSource)
     {
         return(new StreamVideoSourceHandler());
     }
     return(new UriVideoSourceHandler());
 }
Example #19
0
        internal void PlayIntro()
        {
            if (!UseWebPlayer)
            {
                VideoPlayerView.Source = VideoSource.FromResource("AvanadeStudioIntro.mp4");

                VideoPlayerView.Play();
                this.ForceLayout();

                VideoPlayerView.VideoEnded += VideoPlayerView_IntroVideoEnded;
            }
        }
Example #20
0
        private async void btAddInputFile_Click(object sender, EventArgs e)
        {
            if (OpenDialog1.ShowDialog() == DialogResult.OK)
            {
                VideoEdit1.Video_FrameRate = Convert.ToDouble(cbFrameRate.Text);

                // resize if required
                int customWidth  = 0;
                int customHeight = 0;

                if (cbResize.Checked)
                {
                    customWidth  = Convert.ToInt32(edWidth.Text);
                    customHeight = Convert.ToInt32(edHeight.Text);
                }

                foreach (var s in OpenDialog1.FileNames)
                {
                    lbFiles.Items.Add(s);

                    if ((string.Compare(GetFileExt(s), ".BMP", StringComparison.OrdinalIgnoreCase) == 0) ||
                        (string.Compare(GetFileExt(s), ".JPG", StringComparison.OrdinalIgnoreCase) == 0) ||
                        (string.Compare(GetFileExt(s), ".JPEG", StringComparison.OrdinalIgnoreCase) == 0) ||
                        (string.Compare(GetFileExt(s), ".GIF", StringComparison.OrdinalIgnoreCase) == 0) ||
                        (string.Compare(GetFileExt(s), ".PNG", StringComparison.OrdinalIgnoreCase) == 0) ||
                        (string.Compare(GetFileExt(s), ".TIF", StringComparison.OrdinalIgnoreCase) == 0) ||
                        (string.Compare(GetFileExt(s), ".TIFF", StringComparison.OrdinalIgnoreCase) == 0))
                    {
                        await VideoEdit1.Input_AddImageFileAsync(s, TimeSpan.FromMilliseconds(2000), null, VideoEditStretchMode.Letterbox, 0, customWidth, customHeight);
                    }
                    else if ((string.Compare(GetFileExt(s), ".WAV", StringComparison.OrdinalIgnoreCase) == 0) ||
                             (string.Compare(GetFileExt(s), ".MP3", StringComparison.OrdinalIgnoreCase) == 0) ||
                             (string.Compare(GetFileExt(s), ".OGG", StringComparison.OrdinalIgnoreCase) == 0) ||
                             (string.Compare(GetFileExt(s), ".AAC", StringComparison.OrdinalIgnoreCase) == 0) ||
                             (string.Compare(GetFileExt(s), ".M4A", StringComparison.OrdinalIgnoreCase) == 0) ||
                             (string.Compare(GetFileExt(s), ".WMA", StringComparison.OrdinalIgnoreCase) == 0))
                    {
                        var audioFile = new AudioSource(s, null, null, string.Empty, 0, 1.0);
                        await VideoEdit1.Input_AddAudioFileAsync(audioFile, null, 0);
                    }
                    else
                    {
                        var audioFile = new AudioSource(s, null, null, s, 0, 1.0);
                        var videoFile = new VideoSource(s, null, null, VideoEditStretchMode.Letterbox, 0, 1.0);

                        await VideoEdit1.Input_AddVideoFileAsync(videoFile, null, 0, customWidth, customHeight);

                        await VideoEdit1.Input_AddAudioFileAsync(audioFile, null, 0);
                    }
                }
            }
        }
Example #21
0
        /// <summary>
        /// 停止播放
        /// </summary>
        /// <param name="player"></param>
        public virtual void Stop()
        {
            if (Player == null)
            {
                return;
            }


            Player?.SignalToStop();
            VideoSource?.Stop();
            VideoSource = null;
            PlayerState = Enums.PlayerStates.Stop;
        }
Example #22
0
        private VideoTrack CreateVideoTrack(IVideoCapturer videoCapturer)
        {
            _surfaceTextureHelper = SurfaceTextureHelper.Create("CaptureThread", _eglBase.EglBaseContext);

            _videoSource = _factory.CreateVideoSource(true);
            videoCapturer.Initialize(_surfaceTextureHelper, _context, _videoSource.CapturerObserver);
            videoCapturer.StartCapture(VideoWidth, VideoHeight, 30);

            _localVideoTrack = _factory.CreateVideoTrack("ARDAMSv0", _videoSource);
            _localVideoTrack.SetEnabled(true);
            _localVideoTrack.AddSink(_localVideoSink);
            return(_localVideoTrack);
        }
Example #23
0
        protected override void OnPropertyChanged([CallerMemberName] string propertyName = null)
        {
            base.OnPropertyChanged(propertyName);

            if (propertyName == "IsFullScreen")
            {
                ToggleFullScreen();
            }
            if (propertyName == "Anime")
            {
                VideoPlayer.Source = VideoSource.FromUri(Anime.Links);
            }
        }
Example #24
0
        public void OpenCameraDetailsHandler(object param)
        {
            VideoSource.SignalToStop();
            var cameraDetail = new CameraDetailsView();

            cameraDetail.DataContext = new CameraDetailsViewModel(param as string)
            {
                CloseAction = ((CameraDetailsViewModel)cameraDetail.DataContext).CloseAction
            };

            cameraDetail.ShowDialog();
            VideoSource.Start();
        }
    //------------------------------------------------------------------------------------
    //------------------------------------------------------------------------------------
    public DeviceManager()
    {
        m_DevicesCollection = new FilterInfoCollection(FilterCategory.VideoInputDevice);
        if (m_DevicesCollection != null)
        {
            for (int i = 0; i < m_DevicesCollection.Count; ++i)
            {
                VideoSource source = new VideoSource(m_DevicesCollection[i].Name, m_DevicesCollection[i].MonikerString);

                m_VideoSources.Add(source);
            }
        }
    }
        public PlayerStateController(IFrameDisplay frameDisplay)
        {
            _source = new VideoSource();

            CurrentState = InitialState = new InitialState(this, _source, frameDisplay);
            StoppedState = new StoppedState(this, _source, frameDisplay);
            PlayingState = new PlayingState(this, _source, frameDisplay);
            PausedState = new PausedState(this, _source, frameDisplay);
            ReccordingState = new ReccordingState(this, _source, frameDisplay);
            PauseReccordingState = new PauseReccordingState(this, _source, frameDisplay);
            RewindingState = new RewindingState(this, _source, frameDisplay);
            ForwardingState = new ForwardingState(this, _source, frameDisplay);
        }
        public PlayerStateController(IFrameDisplay frameDisplay)
        {
            _source = new VideoSource();

            CurrentState         = InitialState = new InitialState(this, _source, frameDisplay);
            StoppedState         = new StoppedState(this, _source, frameDisplay);
            PlayingState         = new PlayingState(this, _source, frameDisplay);
            PausedState          = new PausedState(this, _source, frameDisplay);
            ReccordingState      = new ReccordingState(this, _source, frameDisplay);
            PauseReccordingState = new PauseReccordingState(this, _source, frameDisplay);
            RewindingState       = new RewindingState(this, _source, frameDisplay);
            ForwardingState      = new ForwardingState(this, _source, frameDisplay);
        }
Example #28
0
        /// <summary>
        /// Adds an already created camera.
        /// </summary>
        /// <param name="camera">The camera to add</param>
        public void AddCamera(VideoSource camera)
        {
            string name = camera.Name;

            lock (m_lockObject)
            {
                if (m_primarySourceName == null)
                {
                    m_primarySourceName = name;
                }
                m_sources.Add(name, camera);
            }
        }
        public ReccordingState(PlayerStateController playerStateController, VideoSource videoSource, IFrameDisplay frameDisplay) : base(playerStateController, videoSource, frameDisplay)
        {
            _videoTransfert        = new VideoTranfert();
            _process               = Process.Start("VideoReccorder");
            _process.PriorityClass = ProcessPriorityClass.AboveNormal;

            // Lance le thread qui va tranférer les frames à l'autre process
            _threadTransfertFrame = new Thread(TransfertToRecorder)
            {
                Priority = ThreadPriority.AboveNormal
            };
            _threadTransfertFrame.Start();
        }
        public static bool IsAvailable(this VideoSource src, DeviceProfile profile, params InternalPortType[] ignorePortTypes)
        {
            if (!src.IsValid())
            {
                return(false);
            }

            VideoSourceTypeAttribute props = src.GetAttribute <VideoSource, VideoSourceTypeAttribute>();

            if (ignorePortTypes.Contains(props.PortType))
            {
                return(false);
            }

            switch (props.PortType)
            {
            case InternalPortType.Auxiliary:
                return(props.Me1Index <= profile.Auxiliaries);

            case InternalPortType.Black:
            case InternalPortType.ColorBars:
                return(true);

            case InternalPortType.ColorGenerator:
                return(props.Me1Index <= profile.ColorGenerators);

            case InternalPortType.External:
                return(props.Me1Index <= profile.Sources.Count);

            case InternalPortType.MEOutput:
                return(props.Me1Index <= profile.MixEffectBlocks);

            case InternalPortType.Mask:
                if (profile.MixEffectBlocks > 1)
                {
                    return(props.Me2Index <= profile.UpstreamKeys);
                }
                return(props.Me1Index <= profile.UpstreamKeys);

            case InternalPortType.MediaPlayerFill:
            case InternalPortType.MediaPlayerKey:
                return(props.Me1Index <= profile.MediaPlayers);

            case InternalPortType.SuperSource:
                return(props.Me1Index <= profile.SuperSource);

            default:
                Debug.Fail(String.Format("Invalid source type:{0}", props.PortType));
                return(false);
            }
        }
Example #31
0
        public numbers(string numberInput)

        {
            InitializeComponent();
            number = numberInput;

            if (number == "1")
            {
                videoPlayer.Source = VideoSource.FromResource("Numbers - 1.mp4");
            }

            else if (number == "2")
            {
                videoPlayer.Source = VideoSource.FromResource("Numbers - 2.mp4");
            }

            else if (number == "3")
            {
                videoPlayer.Source = VideoSource.FromResource("Numbers - 3.mp4");
            }

            else if (number == "4")
            {
                videoPlayer.Source = VideoSource.FromResource("Numbers - 4.mp4");
            }

            else if (number == "5")
            {
                videoPlayer.Source = VideoSource.FromResource("Numbers - 5.mp4");
            }

            else if (number == "6")
            {
                videoPlayer.Source = VideoSource.FromResource("Numbers - 6.mp4");
            }

            else if (number == "7")
            {
                videoPlayer.Source = VideoSource.FromResource("Numbers - 7.mp4");
            }

            else if (number == "8")
            {
                videoPlayer.Source = VideoSource.FromResource("Numbers - 8.mp4");
            }
            else if (number == "9")
            {
                videoPlayer.Source = VideoSource.FromResource("Numbers - 9.mp4");
            }
        }
Example #32
0
        public MediaPage(DataStatus status, int inx = 0)
        {
            viewing = status;
            Style   = (Style)Application.Current.Resources["backgroundStyle"];

            if (status.ExtendMedias[0].Type == "photo")
            {
                foreach (var media in status.ExtendMedias)
                {
                    Children.Add(new ContentPage()
                    {
                        Content = MakeMediaView(media)
                    });
                }
                CurrentPage = Children[inx];
            }
            else
            {
                videoView = new VideoPlayer();
                if (status.ExtendMedias[0].Type == "animated_gif")
                {
                    videoView.IsVideoOnly = true;
                }
                var gesture = new PanGestureRecognizer();
                gesture.PanUpdated += (sender, e) =>
                {
                    switch (e.StatusType)
                    {
                    case GestureStatus.Running:
                        videoView.TranslationX = e.TotalX;
                        videoView.TranslationY = e.TotalY;
                        break;

                    case GestureStatus.Completed:
                        if (Math.Abs(videoView.TranslationY) > (Height / 8))
                        {
                            App.Navigation.RemovePage(this);
                        }
                        videoView.TranslateTo(0, 0);
                        break;
                    }
                };
                videoView.GestureRecognizers.Add(gesture);
                Children.Add(new ContentPage()
                {
                    Content = videoView
                });
                videoView.Source = VideoSource.FromUri(PickVideoVariant(status.ExtendMedias[0].Video.Variants).URL);
            }
        }
Example #33
0
        private void RTSP_ProcessOptionsRequest(RtspRequestOptions message, RtspListener listener)
        {
            String requested_url = message.RtspUri.ToString();

            _logger.Info($"Connection {listener.ConnectionId} requested for url: {requested_url}");

            _videoSource = _requestUrlVideoSourceResolverStrategy.ResolveVideoSource(requested_url);
            OnConnectionAdded?.Invoke(Id, _videoSource); //treat connection useful when VideoSource determined

            // Create the reponse to OPTIONS
            Rtsp.Messages.RtspResponse options_response = message.CreateResponse(_logger);
            // Rtsp.Messages.RtspResponse options_response = OnRtspMessageReceived?.Invoke(message as Rtsp.Messages.RtspRequest,targetConnection);
            listener.SendMessage(options_response);
        }
Example #34
0
        //SEARCH: RENDER STEREO FRAME TO VIDEO FORM
        private void StereoStreamFrameRender(VideoSource.StereoFrameSequenceElement stereoFrame)
        {
            this.UpdateCurPrevMEMSOrient();
            if (!stereoFrame.IsNotFullFrame)
            {
                var leftImg = new Image<Bgr, byte>(stereoFrame.LeftRawFrame);
                var rightImg = new Image<Bgr, byte>(stereoFrame.RightRawFrame);
                Bitmap stuff1Bmp = null;

                if (this.useCalibratedStereoRenderCheckBox.Checked)
                {
                    if (this.StereoCameraParams != null)
                    {
                        var tmpLeft = this.StereoCameraParams.LeftIntrinsicCameraParameters.Undistort(leftImg);
                        var tmpRight = this.StereoCameraParams.RightIntrinsicCameraParameters.Undistort(rightImg);

                        CvInvoke.cvRemap(tmpLeft, leftImg, this.StereoCameraParams.LeftMapX, this.StereoCameraParams.LeftMapY, (int)INTER.CV_INTER_LINEAR | (int)WARP.CV_WARP_FILL_OUTLIERS, new MCvScalar(0));
                        CvInvoke.cvRemap(tmpRight, rightImg, this.StereoCameraParams.RightMapX, this.StereoCameraParams.RightMapY, (int)INTER.CV_INTER_LINEAR | (int)WARP.CV_WARP_FILL_OUTLIERS, new MCvScalar(0));

                        //Image<Gray, short> dispImg;
                        //var points = this.Get3DFeatures(this.StereoCameraParams, stereoFrame, out dispImg);
                        //var centroid = this.GetPoint3DCloudCentroid(points);
                        //Console.WriteLine("Centr: {0}, {1}, {2};", centroid.x, centroid.y, centroid.z);
                        //this.videoForm.RenderStereoFrame(dispImg.ToBitmap(), null);
                    }

                }

                var leftGrayImg = leftImg.Convert<Gray, byte>();
                var rightGrayImg = rightImg.Convert<Gray, byte>();

                Bitmap leftFrameRender;
                Bitmap rightFrameRender;

                if (this.renderGrayCheckBox.Checked)
                {
                    leftFrameRender = new Bitmap(leftGrayImg.ToBitmap());
                    rightFrameRender = new Bitmap(rightGrayImg.ToBitmap());
                }
                else
                {
                    leftFrameRender = new Bitmap(leftImg.ToBitmap());
                    rightFrameRender = new Bitmap(rightImg.ToBitmap());
                }

                if (this.showDepthMapCheckBox.Checked)
                {
                    //var features = this.opticFlowProcessor.GetFeaturesToTrack(
                    //    stereoFrame: frame,
                    //    useGpu: true);
                    var depthMap = this.opticFlowProcessor.GetDispMap(leftGrayImg, rightGrayImg, this.useGPUCheckBox.Checked, this.GetParametersForStereoMapSolver(this.useGPUCheckBox.Checked));
                    stuff1Bmp = depthMap.ToBitmap();

                    //update frame
                    this.prevStereoDepthFrame = this.currStereoDepthFrame;
                    this.currStereoDepthFrame = new DataSource.OpticFlowFrameContainer()
                    {
                        DepthMapImg = depthMap,
                        StereoFrame = new VideoSource.StereoFrameSequenceElement(stereoFrame)
                    };
                    ////
                }

                //try to use odometry
                if (this.perfOdometryCheckBox.Checked)
                {
                    if (this.StereoCameraParams != null)
                    {
                        var rotMatrix = new Matrix<double>(3, 3);
                        rotMatrix.SetIdentity();
                        var rotMatr = Utils.CvHelper.MatrixToArray(rotMatrix);
                        rotMatrix.Dispose();
                        if (this.prevMEMSRotMatr != null && this.currentMEMSRotMatr != null)
                        {
                            rotMatr = this.OrientationCalc.GetRotationMatrixBetweenTwoStates(this.prevMEMSRotMatr, this.currentMEMSRotMatr, this.orientCalibMatrix);
                        }
                        List<PointF> currFreatures;
                        List<PointF> prevFeatures;
                        Matrix<double> resRotation;
                        var featuresToTrackParams = this.GetVisualOdometerFeaturesToTrackParams();
                        var featuresOpticFlowParams = this.GetVisualOdometerFeaturesOpticFlowParams();
                        var disparitiesParams = this.GetVisualOdometerDisparitiesParams();
                        var tDiff = this.visualOdometer.GetTranslationAndRotation(
                            rotMatrArray: rotMatr,
                            prevFrame: this.prevStereoDepthFrame,
                            currFrame: this.currStereoDepthFrame,
                            cameraParams: this.StereoCameraParams,
                            currFeaturesList: out currFreatures,
                            prevFeaturesList: out prevFeatures,
                            resRotation: out resRotation,
                            featuresToTrackParams: featuresToTrackParams,
                            featuresOpticFlowParams: featuresOpticFlowParams,
                            disparitiesParams: disparitiesParams
                            );

                        if (resRotation != null)
                        {
                            this.svdDiffRotMatrix = resRotation.Mul(this.svdDiffRotMatrix);
                        }

                        if (tDiff != null)
                        {
                            if (!(double.IsNaN(tDiff.Value.x) || double.IsNaN(tDiff.Value.y) || double.IsNaN(tDiff.Value.z)))
                            {
                                this.position3d.x += tDiff.Value.x;
                                this.position3d.y += tDiff.Value.y;
                                this.position3d.z += tDiff.Value.z;

                                this.RenderTranslatoin(this.position3d);
                            }
                            Console.WriteLine("TRANSLATION: X={0}; Y={1}; Z={2}", tDiff.Value.x, tDiff.Value.y, tDiff.Value.z);
                            Console.WriteLine("POSITION: X={0}; Y={1}; Z={2}", position3d.x, position3d.y, position3d.z);
                        }
                        if (this.renderFraturesCheckBox.Checked)
                        {
                            if (currFreatures != null && prevFeatures != null)
                            {
                                var dotSize = new Size(10, 10);
                                var g = Graphics.FromImage(leftFrameRender);
                                for (int i = 0; i < currFreatures.Count; ++i)
                                {
                                    g.DrawEllipse(Pens.Red, currFreatures[i].X - dotSize.Width / 2, currFreatures[i].Y - dotSize.Height / 2, dotSize.Width, dotSize.Height);
                                    g.DrawLine(Pens.Red, currFreatures[i], prevFeatures[i]);
                                }
                            }
                        }
                    }
                }
                ////
                this.memsRenderForm.Invoke((MethodInvoker)delegate { this.RenderOrientationTransformation(Utils.CvHelper.MatrixToArray(this.svdDiffRotMatrix)); });
                //general lr render
                this.videoForm.RenderStereoFrame(leftFrameRender, rightFrameRender);

                if (stuff1Bmp != null)
                {
                    this.videoForm.RenderToStuffPictureBox1(stuff1Bmp);
                }
                stereoFrame.Dispose();
            }
        }
 public PauseReccordingState(PlayerStateController playerStateController, VideoSource videoSource, IFrameDisplay frameDisplay)
     : base(playerStateController, videoSource, frameDisplay)
 {
 }
 private void SetActiveTab(int num)
 {
     activeVideoSource = (VideoSource)_videoSources[num];
     activeVideoSource.TabSelected();
 }
Example #37
0
        private void mjpegToolStripMenuItem_Click(object sender, EventArgs e)
        {
            URLForm form = new URLForm();
            form.Description = "Enter URL of an updating JPEG from a web camera";

            //Load recent URLs
            String[] urls = new String[recentURLs.Count];
            recentURLs.CopyTo(urls, 0);
            form.URLs = urls;

            form.StartPosition = FormStartPosition.CenterScreen;
            if (form.ShowDialog(this) == DialogResult.OK)
            {
                //remove existing item (so it will be placed at top of list)
                if (recentURLs.Contains(form.URL))
                {
                    recentURLs.Remove(form.URL);
                }
                //update recent URLs
                if (recentURLs.Count == RECENT_URL_LIMIT)
                {
                    recentURLs.RemoveAt(RECENT_URL_LIMIT - 1);
                }
                recentURLs.Add(form.URL);

                //open the stream
                String sourceName = showGetSourceNameBox();
                if (sourceName != null) //user didn't select cancel
                {
                    MJPEGStream s = new MJPEGStream();
                    s.Source = form.URL;
                    Log.info(String.Format("Video source: {0}", s.Source));
                    VideoSource v = new VideoSource(sourceName, s);
                    sources.Add(v);
                    v.RemoveSelected += new VideoSource.RemoveEventHandler(r_RemoveSelected);
                    v.setViewerGlobalStickey(showAllToolStripMenuItem.Checked);
                    globalOptions.updateViewer(v);
                    v.startCamera(); //start camera by default
                }
            }
        }
Example #38
0
        private void cameraToolStripMenuItem_Click(object sender, EventArgs e)
        {
            CaptureDeviceForm form = new CaptureDeviceForm();
            form.StartPosition = FormStartPosition.CenterScreen;

            if (form.ShowDialog(this) == DialogResult.OK)
            {
                // create video source
                VideoCaptureDevice c = new VideoCaptureDevice();
                c.Source = form.Device;

                String sourceName = showGetSourceNameBox();
                if (sourceName != null) //user didn't cancel
                {
                    Log.info(String.Format("Video source: {0}",c.Source));
                    VideoSource r = new VideoSource(sourceName, c);
                    sources.Add(r);
                    r.RemoveSelected += new VideoSource.RemoveEventHandler(r_RemoveSelected);
                    sourcesToolStripMenuItem.DropDown.Items.Add(r.ContextMenu);
                    r.setViewerGlobalStickey(showAllToolStripMenuItem.Checked);
                    globalOptions.updateViewer(r);
                    r.startCamera(); //start camera by default
                }
            }
        }
Example #39
0
        void StereoVideoStreamProvider_NewStereoFrameEvent(object sender, VideoSource.NewStereoFrameEventArgs e)
        {
            if (this.isNewStereoFrameInProcess)
            {
                return;
            }
            this.isNewStereoFrameInProcess = true;

            if (this.StereoVideoStreamProvider.IsFunctioning())
            {
                this.StereoStreamFrameRender(e.NewStereoFrame);
            }

            this.isNewStereoFrameInProcess = false;
        }
Example #40
0
 protected TimerState(PlayerStateController playerStateController, VideoSource videoSource, IFrameDisplay frameDisplay)
     : base(playerStateController, videoSource, frameDisplay)
 {
     _timer = new MultimediaTimer {Mode = TimerMode.Periodic};
     _timeProc = new MultimediaTimer.TimeProc(Tick);
 }
Example #41
0
        /// <summary>
        /// Adds an <see cref="IVideoSource"/> to the database. If an <see cref="IVideoSource"/> with the same path ID exists,
        /// it will be updated with new settings.
        /// </summary>
        /// <param name="newSource"><see cref="EditableVideoSource"/> containing the new source data.</param>
        public async Task AddSourceAsync(EditableVideoSource newSource)
        {
            if (!dbConnection.IsInitialized)
            {
                await dbConnection.InitializeAsync();
            }

            await AddPathAsync(newSource.Path);
            var pathId = await GetPathIdAsync(newSource.Path);
            var source = new VideoSource
            {
                PathId = pathId,
                Name = newSource.Name,
                ContentType = newSource.ContentType,
                InfoSource = newSource.InfoSource,
                NoUpdate = newSource.NoUpdate
            };

            var storedModel = await dbConnection.FindAsync<VideoSource>(sm => sm.PathId == source.PathId);
            if (storedModel != null)
            {
                if (storedModel.Name != source.Name || storedModel.ContentType != source.ContentType ||
                    storedModel.InfoSource != source.InfoSource || storedModel.NoUpdate != source.NoUpdate)
                {
                    storedModel.Name = source.Name;
                    storedModel.ContentType = source.ContentType;
                    storedModel.InfoSource = source.InfoSource;
                    storedModel.NoUpdate = source.NoUpdate;
                    await dbConnection.UpdateAsync(storedModel);
                }
            }
            else
            {
                await dbConnection.InsertAsync(source);
            }
        }
Example #42
0
 public void SetVideo(int v)
 {
     a = v;
     video = Videos [v];
 }
Example #43
0
 public void updateViewer(VideoSource v)
 {
     if (v.Options.UseGlobal)
     {
         v.ViewerOpacity = globalOpacity;
         v.Options.EnableRecording = globalEnableRecording;
         v.Options.RecordFolder = globalRecordFolder;
         v.Options.EnableAlertSound = globalEnableAlertSound;
         v.Options.AlertSoundFile = globalAlertSoundFile;
         v.Options.Codec = globalCodec;
         v.Options.EnableAlwaysShow = globalEnableAlwaysShow;
         v.Options.EnableMotionAlert = globalEnableMotionAlert;
         v.Options.DetectorType = globalDetectorType;
     }
 }
Example #44
0
 protected BaseState(PlayerStateController playerStateController,VideoSource videoSource,IFrameDisplay frameDisplay)
 {
     _playerStateController = playerStateController;
     _videoSource = videoSource;
     _frameDisplay = frameDisplay;
 }
Example #45
0
 public RewindingState(PlayerStateController playerStateController, VideoSource videoSource, IFrameDisplay frameDisplay)
     : base(playerStateController, videoSource, frameDisplay)
 {
 }
Example #46
0
 void StereoMEMSDataProvider_NewStereoFrameEvent(object sender, VideoSource.NewStereoFrameEventArgs e)
 {
     this.StereoStreamFrameRender(e.NewStereoFrame);
 }
Example #47
0
 /// <summary>
 /// Parse all the values that are shared between shows and movies
 /// </summary>
 /// <param name="input">Input string</param>
 /// <param name="dir">Switch whenever it is a directory name.</param>
 private void ParseShared(String input, Boolean dir = false)
 {
     String inputCl = helperDictionary.CleanFileName(input);
     Int32 TmpStart;
     String TmpString;
     #region videoQuality
     if (videoQuality == VideoQuality.Unknown)
     {
         TmpString = Check(inputCl, helperDictionary.VideoQualityStrings, out TmpStart);
         videoQuality = helperDictionary.StrToVideoQuality(TmpString);
         if (TmpString.Length > 0)
             Index.Add(new StringLocation(TmpString, TmpStart, TmpString.Length, true, dir));
     }
     #endregion
     #region videoSource
     if (videoSource == VideoSource.Unknown)
     {
         TmpString = Check(inputCl, helperDictionary.VideoSourceStrings, out TmpStart);
         videoSource = helperDictionary.StrToVideoSource(TmpString);
         if (TmpString.Length > 0)
             Index.Add(new StringLocation(TmpString, TmpStart, TmpString.Length, true, dir));
     }
     #endregion
     #region container
     if (container == Container.Unknown & !dir)
     {
         //TmpString = Check(fileExt, helperDictionary.ContainerStrings, out TmpStart);
         container = helperDictionary.StrToContainer(fileExt);
         //if (TmpString.Length > 0)
         //Index.Add(new StringLocation(TmpString, TmpStart, TmpString.Length, true, dir));
     }
     #endregion
     #region videoCodec
     if (videoCodec == VideoCodec.Unknown)
     {
         TmpString = Check(inputCl, helperDictionary.VideoCodecStrings, out TmpStart);
         videoCodec = helperDictionary.StrToVideoCodec(TmpString);
         if (TmpString.Length > 0)
             Index.Add(new StringLocation(TmpString, TmpStart, TmpString.Length, true, dir));
     }
     #endregion
     #region audioCodec
     if (audioCodec == AudioCodec.Unknown)
     {
         TmpString = Check(inputCl, helperDictionary.AudioCodecStrings, out TmpStart);
         audioCodec = helperDictionary.StrToAudioCodec(TmpString);
         if (TmpString.Length > 0)
             Index.Add(new StringLocation(TmpString, TmpStart, TmpString.Length, true, dir));
     }
     #endregion
     #region sample
     //Check if our file is a sample
     if (!sample)
     {
         TmpStart = inputCl.IndexOf("sample");
         if (TmpStart > -1 & (fileSize < 1024 * 1024 * 1024))
         {
             sample = true;
             Index.Add(new StringLocation("sample", TmpStart, 6, true, dir));
         }
     }
     #endregion
 }
Example #48
0
 public InitialState(PlayerStateController playerStateController, VideoSource videoSource, IFrameDisplay frameDisplay)
     : base(playerStateController, videoSource, frameDisplay)
 {
 }
Example #49
0
 public ShortSide(VideoSource.CaptureDevice webcam, string ip, int poort)
 {
     video = new VideoInput(webcam);
     socket = new TCPOut(ip, poort);
 }