Ejemplo n.º 1
0
        private void SetupMarkerTracking(VideoBrush videoBrush)
        {
            IVideoCapture captureDevice = null;

            captureDevice = new PhoneCameraCapture(videoBrush);
            captureDevice.InitVideoCapture(0, FrameRate._30Hz, Resolution._640x480, ImageFormat.B8G8R8A8_32, false);
            ((PhoneCameraCapture)captureDevice).UseLuminance = true;

            if (betterFPS)
            {
                captureDevice.MarkerTrackingImageResizer = new HalfResizer();
            }

            // Add this video capture device to the scene so that it can be used for the marker tracker
            scene.AddVideoCaptureDevice(captureDevice);

            NyARToolkitIdTracker tracker = new NyARToolkitIdTracker();

            if (captureDevice.MarkerTrackingImageResizer != null)
            {
                tracker.InitTracker((int)(captureDevice.Width * captureDevice.MarkerTrackingImageResizer.ScalingFactor),
                                    (int)(captureDevice.Height * captureDevice.MarkerTrackingImageResizer.ScalingFactor),
                                    "camera_para.dat");
            }
            else
            {
                tracker.InitTracker(captureDevice.Width, captureDevice.Height, "camera_para.dat");
            }

            // Set the marker tracker to use for our scene
            scene.MarkerTracker = tracker;
        }
Ejemplo n.º 2
0
 /// <summary>
 /// 增加一个摄像机抓拍实例
 /// </summary>
 /// <param name="videoType">摄像机类型</param>
 /// <param name="capture">摄像机抓拍实例</param>
 public void Add(int videoType, IVideoCapture capture)
 {
     lock (_ListLock)
     {
         _videoCaptures.Add(videoType, capture);
     }
 }
Ejemplo n.º 3
0
        /// <summary>
        /// Media muxing thread function.
        /// </summary>
        private void MuxingProcess()
        {
            while (videoCaptures.Count > 0 || videoFiles.Count > 0)
            {
                if (videoFiles.Count > 0)
                {
                    string videoFile;
                    lock (videoFiles)
                    {
                        videoFile = videoFiles.Dequeue();
                    }

                    IVideoCapture videoCapture = null;
                    for (int i = 0; i < videoCaptures.Count; i++)
                    {
                        // Check if match with attached VideoCapture
                        if (videoCaptures[i].GetEncoder().videoSavePath == videoFile)
                        {
                            videoCapture = videoCaptures[i];
                            lock (videoCaptures)
                            {
                                videoCaptures.RemoveAt(i);
                            }
                        }
                    }

                    if (videoCapture == null)
                    {
                        // Skip if no match VideoCapture
                        continue;
                    }

                    // Start muxing process
                    if (!StartMux(videoCapture))
                    {
                        // Skip if not success
                        continue;
                    }
                }
                else
                {
                    muxReady.WaitOne();
                }
            }

            // Clean audio file
            if (File.Exists(audioFile))
            {
                File.Delete(audioFile);
                audioFile = null;
            }
            //if (File.Exists(audioFile2))
            //{
            //  File.Delete(audioFile2);
            //  audioFile2 = null;
            //}

            muxInitiated = false;
        }
Ejemplo n.º 4
0
        private void SetupMarkerTracking(VideoBrush videoBrush)
        {
            IVideoCapture captureDevice = null;

            if (useStaticImage)
            {
                captureDevice = new NullCapture();
                captureDevice.InitVideoCapture(0, FrameRate._30Hz, Resolution._320x240,
                                               ImageFormat.B8G8R8A8_32, false);
                if (useSingleMarker)
                {
                    ((NullCapture)captureDevice).StaticImageFile = "MarkerImageHiro.jpg";
                }
                else
                {
                    ((NullCapture)captureDevice).StaticImageFile = "MarkerImage_320x240";
                }

                scene.ShowCameraImage = true;
            }
            else
            {
                captureDevice = new PhoneCameraCapture(videoBrush);
                captureDevice.InitVideoCapture(0, FrameRate._30Hz, Resolution._640x480,
                                               ImageFormat.B8G8R8A8_32, false);
                ((PhoneCameraCapture)captureDevice).UseLuminance = true;

                if (betterFPS)
                {
                    captureDevice.MarkerTrackingImageResizer = new HalfResizer();
                }
            }

            // Add this video capture device to the scene so that it can be used for
            // the marker tracker
            scene.AddVideoCaptureDevice(captureDevice);

#if USE_PATTERN_MARKER
            NyARToolkitTracker tracker = new NyARToolkitTracker();
#else
            NyARToolkitIdTracker tracker = new NyARToolkitIdTracker();
#endif

            if (captureDevice.MarkerTrackingImageResizer != null)
            {
                tracker.InitTracker((int)(captureDevice.Width * captureDevice.MarkerTrackingImageResizer.ScalingFactor),
                                    (int)(captureDevice.Height * captureDevice.MarkerTrackingImageResizer.ScalingFactor),
                                    "camera_para.dat");
            }
            else
            {
                tracker.InitTracker(captureDevice.Width, captureDevice.Height, "camera_para.dat");
            }

            // Set the marker tracker to use for our scene
            scene.MarkerTracker = tracker;
        }
Ejemplo n.º 5
0
 public void AttachVideoCapture(IVideoCapture videoCapture)
 {
     if (!videoCaptures.Contains(videoCapture))
     {
         lock (videoCaptures)
         {
             videoCaptures.Add(videoCapture);
         }
     }
 }
Ejemplo n.º 6
0
        private void SetupCalibration()
        {
            leftCaptureDevice = new DirectShowCapture2();
            leftCaptureDevice.InitVideoCapture(leftDeviceID, FrameRate._30Hz, Resolution._640x480,
                                               ImageFormat.R8G8B8_24, false);

            // Add left video capture device to the scene for rendering left eye image
            scene.AddVideoCaptureDevice(leftCaptureDevice);

            rightCaptureDevice = new DirectShowCapture2();
            rightCaptureDevice.InitVideoCapture(rightDeviceID, FrameRate._30Hz, Resolution._640x480,
                                                ImageFormat.R8G8B8_24, false);

            // Add right video capture device to the scene for rendering right eye image
            scene.AddVideoCaptureDevice(rightCaptureDevice);

            // Create holders for retrieving the captured video images
            leftImagePtr  = Marshal.AllocHGlobal(leftCaptureDevice.Width * leftCaptureDevice.Height * 3);
            rightImagePtr = Marshal.AllocHGlobal(rightCaptureDevice.Width * rightCaptureDevice.Height * 3);

            // Associate each video devices to each eye
            scene.LeftEyeVideoID  = 0;
            scene.RightEyeVideoID = 1;

            scene.ShowCameraImage = true;

            float markerSize = 32.4f;

            // Initialize a marker tracker for tracking an marker array used for calibration
            markerTracker = new ALVARMarkerTracker();
            markerTracker.MaxMarkerError = 0.02f;
            markerTracker.ZNearPlane     = 0.1f;
            markerTracker.ZFarPlane      = 1000;
            markerTracker.InitTracker(leftCaptureDevice.Width, leftCaptureDevice.Height, LEFT_CALIB, markerSize);
            ((StereoCamera)scene.CameraNode.Camera).LeftProjection = markerTracker.CameraProjection;

            // Add another marker detector for tracking right video capture device
            ALVARDllBridge.alvar_add_marker_detector(markerSize, 5, 2);

            ALVARDllBridge.alvar_add_camera(RIGHT_CALIB, rightCaptureDevice.Width, rightCaptureDevice.Height);
            double[] projMat = new double[16];
            double   cameraFovX = 0, cameraFovY = 0;

            ALVARDllBridge.alvar_get_camera_params(1, projMat, ref cameraFovX, ref cameraFovY, 1000, 0.1f);
            ((StereoCamera)scene.CameraNode.Camera).RightProjection = new Matrix(
                (float)projMat[0], (float)projMat[1], (float)projMat[2], (float)projMat[3],
                (float)projMat[4], (float)projMat[5], (float)projMat[6], (float)projMat[7],
                (float)projMat[8], (float)projMat[9], (float)projMat[10], (float)projMat[11],
                (float)projMat[12], (float)projMat[13], (float)projMat[14], (float)projMat[15]);

            // Add a marker array to be tracked
            markerID = markerTracker.AssociateMarker("ALVARGroundArray.xml");

            relativeTransforms = new List <Matrix>();
        }
Ejemplo n.º 7
0
 public MainViewModel()
 {
     _webCam = (new DefaultVideoCaptureFactory()).CreateVideoCapture(
         DeviceManager.GetVideoInputDevices().First().Name,
         new DefaultVideoResolutionSelector
     {
         MaxWidth    = 1600,
         MinBitCount = 24
     });
     _webCam.CaptureFrame += OnFrameCaptured;
     UpdateCaptureState(_canCapture);
 }
Ejemplo n.º 8
0
 public DebugViewModel(IGame game,
                       IUiRepository repo,
                       ISettings settings,
                       IVideoCapture capture)
 {
     Run            = new DelegateCommand(RunCommand);
     Drag           = new DelegateCommand(ExecuteDrag);
     StartRecording = new DelegateCommand(ExecuteStartRecording);
     EndRecording   = new DelegateCommand(ExecuteEndRecording);
     _repo          = repo;
     _game          = game;
     _settings      = settings;
     _capture       = capture;
 }
Ejemplo n.º 9
0
 public override void ClearSnapShot()
 {
     try
     {
         IVideoCapture capture = VideoCaptureManager.Instance[(int)VideoServerType.DaHua];
         if (capture != null)
         {
             capture.ClearCapture(this.VideoSource);
         }
     }
     catch (Exception ex)
     {
         Ralid.GeneralLibrary.ExceptionHandling.ExceptionPolicy.HandleException(ex);
     }
 }
Ejemplo n.º 10
0
        /// <summary>
        /// Processes the video image captured from an initialized video capture device.
        /// </summary>
        /// <param name="captureDevice">An initialized video capture device</param>
        public void ProcessImage(IVideoCapture captureDevice, IntPtr imagePtr)
        {
            String channelSeq = "";
            int    nChannles  = 1;

            switch (captureDevice.Format)
            {
            case ImageFormat.R5G6B5_16:
            case ImageFormat.R8G8B8_24:
                channelSeq = "RGB";
                nChannles  = 3;
                break;

            case ImageFormat.R8G8B8A8_32:
                channelSeq = "RGBA";
                nChannles  = 4;
                break;

            case ImageFormat.B8G8R8_24:
                channelSeq = "BGR";
                nChannles  = 3;
                break;

            case ImageFormat.B8G8R8A8_32:
                channelSeq = "BGRA";
                nChannles  = 4;
                break;

            case ImageFormat.A8B8G8R8_32:
                channelSeq = "ARGB";
                nChannles  = 4;
                break;
            }

            featureFound = ALVARDllBridge.alvar_detect_feature(0, nChannles, channelSeq, channelSeq,
                                                               imagePtr, MinInlierRatio, MinMappedPoints, ref inlierRatio, ref mappedPoints);

            if (featureFound)
            {
                ALVARDllBridge.alvar_get_feature_pose(poseMats);

                lastMarkerMatrix = new Matrix(
                    (float)poseMats[0], (float)poseMats[1], (float)poseMats[2], (float)poseMats[3],
                    (float)poseMats[4], (float)poseMats[5], (float)poseMats[6], (float)poseMats[7],
                    (float)poseMats[8], (float)poseMats[9], (float)poseMats[10], (float)poseMats[11],
                    (float)poseMats[12], (float)poseMats[13], (float)poseMats[14], (float)poseMats[15]);
            }
        }
Ejemplo n.º 11
0
        /// <summary>
        /// Video transcode thread function.
        /// </summary>
        private void TranscodeProcess()
        {
            while (videoCaptures.Count > 0 || videoFiles.Count > 0)
            {
                if (videoFiles.Count > 0)
                {
                    string videoFile;
                    lock (videoFiles)
                    {
                        videoFile = videoFiles.Dequeue();
                    }

                    IVideoCapture videoCapture = null;
                    for (int i = 0; i < videoCaptures.Count; i++)
                    {
                        // Check if match with attached VideoCapture
                        if (videoCaptures[i].GetEncoder().videoSavePath == videoFile)
                        {
                            videoCapture = videoCaptures[i];
                            lock (videoCaptures)
                            {
                                videoCaptures.RemoveAt(i);
                            }
                        }
                    }

                    if (videoCapture == null)
                    {
                        // Skip if no match VideoCapture
                        continue;
                    }

                    // Start muxing process
                    if (!StartTranscode(videoCapture))
                    {
                        // Skip if not success
                        continue;
                    }
                }
                else
                {
                    transcodeReady.WaitOne();
                }
            }

            transcodeInitiated = false;
        }
Ejemplo n.º 12
0
        // Start video/audio muxing process, this is blocking function
        public bool StartMuxProcess(IVideoCapture videoCapture)
        {
            FFmpegEncoder ffmpegEncoder = videoCapture.GetFFmpegEncoder();
            string        videoSavePath = string.Format("{0}capture_{1}x{2}_{3}_{4}.mp4",
                                                        Config.saveFolder,
                                                        ffmpegEncoder.outputFrameWidth, ffmpegEncoder.outputFrameHeight,
                                                        Utils.GetTimeString(),
                                                        Utils.GetRandomString(5));
            IntPtr nativeAPI = FFmpegEncoder_StartMuxProcess(
                ffmpegEncoder.bitrate,
                videoSavePath,
                ffmpegEncoder.videoSavePath,
                audioSavePath,
                ffmpegPath);

            if (nativeAPI == IntPtr.Zero)
            {
                OnError(EncoderErrorCode.MUXING_FAILED_TO_START);
                return(false);
            }
            // Make sure generated the merge file
            int waitCount = 0;

            while (!File.Exists(videoSavePath))
            {
                if (waitCount++ < 100)
                {
                    Thread.Sleep(500);
                }
                else
                {
                    OnError(EncoderErrorCode.MUXING_FAILED);
                    FFmpegEncoder_CleanMuxProcess(nativeAPI);
                    return(false);
                }
            }

            FFmpegEncoder_CleanMuxProcess(nativeAPI);
            //Debug.LogFormat(LOG_FORMAT, "Muxing process finish!");

            // Video capture callback
            videoCapture.OnAudioMuxingComplete(videoSavePath);

            return(true);
        }
Ejemplo n.º 13
0
        private void SetupMarkerTracking()
        {
            IVideoCapture captureDevice = null;

            if (useStaticImage)
            {
                captureDevice = new NullCapture();
                captureDevice.InitVideoCapture(1, FrameRate._30Hz, Resolution._800x600,
                                               ImageFormat.R8G8B8_24, false);
                ((NullCapture)captureDevice).StaticImageFile = "MarkerImage";
            }
            else
            {
                // Create our video capture device that uses DirectShow library. Note that
                // the combinations of resolution and frame rate that are allowed depend on
                // the particular video capture device. Thus, setting incorrect resolution
                // and frame rate values may cause exceptions or simply be ignored, depending
                // on the device driver.  The values set here will work for a Microsoft VX 6000,
                // and many other webcams.
                captureDevice = new DirectShowCapture2();
                captureDevice.InitVideoCapture(0, FrameRate._30Hz, Resolution._640x480,
                                               ImageFormat.R8G8B8_24, false);
            }

            // Add this video capture device to the scene so that it can be used for
            // the marker tracker
            scene.AddVideoCaptureDevice(captureDevice);

            IMarkerTracker tracker = null;


            // Create an optical marker tracker that uses ALVAR library
            tracker = new ALVARMarkerTracker();
            ((ALVARMarkerTracker)tracker).MaxMarkerError = 0.02f;
            tracker.InitTracker(captureDevice.Width, captureDevice.Height, "calib.xml", 9.0);

            // Set the marker tracker to use for our scene
            scene.MarkerTracker = tracker;

            // Display the camera image in the background. Note that this parameter should
            // be set after adding at least one video capture device to the Scene class.
            scene.ShowCameraImage = true;
        }
Ejemplo n.º 14
0
        /// <summary>
        /// Processes the video image captured from an initialized video capture device.
        /// </summary>
        /// <param name="captureDevice">An initialized video capture device</param>
        public void ProcessImage(IVideoCapture captureDevice, IntPtr imagePtr)
        {
            String channelSeq = "";
            int    nChannles  = 1;

            switch (captureDevice.Format)
            {
            case ImageFormat.R5G6B5_16:
            case ImageFormat.R8G8B8_24:
                channelSeq = "RGB";
                nChannles  = 3;
                break;

            case ImageFormat.R8G8B8A8_32:
                channelSeq = "RGBA";
                nChannles  = 4;
                break;

            case ImageFormat.B8G8R8_24:
                channelSeq = "BGR";
                nChannles  = 3;
                break;

            case ImageFormat.B8G8R8A8_32:
                channelSeq = "BGRA";
                nChannles  = 4;
                break;

            case ImageFormat.A8B8G8R8_32:
                channelSeq = "ARGB";
                nChannles  = 4;
                break;
            }

            int interestedMarkerNums = singleMarkerIDs.Count;
            int foundMarkerNums      = 0;

            ALVARDllBridge.alvar_detect_marker(detectorID, cameraID, nChannles, channelSeq, channelSeq,
                                               imagePtr, singleMarkerIDsPtr, ref foundMarkerNums, ref interestedMarkerNums,
                                               max_marker_error, max_track_error);

            Process(interestedMarkerNums, foundMarkerNums);
        }
Ejemplo n.º 15
0
        /// <summary>
        /// 抓拍图片
        /// </summary>
        /// <param name="path"></param>
        /// <param name="timeout">超时时间(ms)</param>
        /// <returns></returns>
        public override bool SnapShotTo(ref string path, int timeout, bool force)
        {
            bool success = false;

            try
            {
                //lock (_StatusLock)
                //{
                //    if (video.Image != null)
                //    {
                //        Ralid.GeneralLibrary.ImageHelper.SaveImage(video.Image, path);
                //        success = true;
                //    }
                //}

                if (_IsReadyForSnapshot)
                {
                    //不打开视频时抓拍
                    IVideoCapture capture = VideoCaptureManager.Instance[(int)VideoServerType.XinLuTong];
                    if (capture != null)
                    {
                        path    = capture.CapturePicture(this.VideoSource, force);
                        success = !string.IsNullOrEmpty(path);
                    }
                }
                else
                {
                    //打开视频时抓拍
                    if (video.Image != null)
                    {
                        Ralid.GeneralLibrary.ImageHelper.SaveImage(video.Image, path);
                        success = true;
                    }
                }
            }
            catch (Exception ex)
            {
                Ralid.GeneralLibrary.ExceptionHandling.ExceptionPolicy.HandleException(ex);
            }
            return(success);
        }
Ejemplo n.º 16
0
        private void SetupCalibration()
        {
            if (useImageSequence)
            {
                captureDevice = new NullCapture();
            }
            else
            {
                captureDevice = new DirectShowCapture();
            }
            captureDevice.InitVideoCapture(0, FrameRate._30Hz, Resolution._640x480,
                                           ImageFormat.R8G8B8_24, false);
            if (useImageSequence)
            {
                imageNames = Directory.GetFiles(imageDirectory);

                if (imageNames != null && imageNames.Length > 0)
                {
                    ((NullCapture)captureDevice).StaticImageFile = imageNames[0];
                }
                else
                {
                    MessageBox.Show("No images are found in " + imageDirectory + " for static image calibration");
                    this.Exit();
                }
            }

            // Add this video capture device to the scene so that it can be used for
            // the marker tracker
            scene.AddVideoCaptureDevice(captureDevice);

            imagePtr = Marshal.AllocHGlobal(captureDevice.Width * captureDevice.Height * 3);

            scene.ShowCameraImage = true;

            // Initializes ALVAR camera
            ALVARDllBridge.alvar_init();
            ALVARDllBridge.alvar_add_camera(null, captureDevice.Width, captureDevice.Height);
        }
Ejemplo n.º 17
0
        private void SetupMarkerTracking(VideoBrush videoBrush)
        {
            IVideoCapture captureDevice = null;

            if (useStaticImage)
            {
                captureDevice = new NullCapture();
                captureDevice.InitVideoCapture(0, FrameRate._30Hz, Resolution._320x240,
                                               ImageFormat.B8G8R8A8_32, false);
                if (useSingleMarker)
                {
                    ((NullCapture)captureDevice).StaticImageFile = "MarkerImageHiro.jpg";
                }
                else
                {
                    ((NullCapture)captureDevice).StaticImageFile = "MarkerImage_320x240";
                }

                scene.ShowCameraImage = true;
            }
            else
            {
                captureDevice = new PhoneCameraCapture(videoBrush);
                captureDevice.InitVideoCapture(0, FrameRate._30Hz, Resolution._640x480,
                                               ImageFormat.B8G8R8A8_32, false);
                ((PhoneCameraCapture)captureDevice).UseLuminance = true;

                if (betterFPS)
                {
                    captureDevice.MarkerTrackingImageResizer = new HalfResizer();
                }
            }

            // Add this video capture device to the scene so that it can be used for
            // the marker tracker
            scene.AddVideoCaptureDevice(captureDevice);

#if USE_PATTERN_MARKER
            NyARToolkitTracker tracker = new NyARToolkitTracker();
#else
            NyARToolkitIdTracker tracker = new NyARToolkitIdTracker();
#endif

            if (captureDevice.MarkerTrackingImageResizer != null)
            {
                tracker.InitTracker((int)(captureDevice.Width * captureDevice.MarkerTrackingImageResizer.ScalingFactor),
                                    (int)(captureDevice.Height * captureDevice.MarkerTrackingImageResizer.ScalingFactor),
                                    "camera_para.dat");
            }
            else
            {
                tracker.InitTracker(captureDevice.Width, captureDevice.Height, "camera_para.dat");
            }

            // Set the marker tracker to use for our scene
            scene.MarkerTracker = tracker;

#if USE_PATTERN_MARKER
            if (useSingleMarker)
            {
                groundMarkerNode = new MarkerNode(scene.MarkerTracker, "patt.hiro", 16, 16, markerSize, 0.7f);
            }
            else
            {
                groundMarkerNode = new MarkerNode(scene.MarkerTracker, "NyARToolkitGroundArray.xml",
                                                  NyARToolkitTracker.ComputationMethod.Average);
            }
#else
            //groundMarkerNode = new MarkerNode(scene.MarkerTracker, "CurveballArray.xml",
            //    NyARToolkitTracker.ComputationMethod.Average);
            groundMarkerNode = new MarkerNode(scene.MarkerTracker, "NyARToolkitIDGroundArray.xml",
                                              NyARToolkitTracker.ComputationMethod.Average);
#endif
            scene.RootNode.AddChild(groundMarkerNode);
        }
Ejemplo n.º 18
0
 public void AttachVideoCapture(IVideoCapture videoCapture)
 {
     videoCaptures.Add(videoCapture);
 }
Ejemplo n.º 19
0
        // Start video/audio muxing process, this is blocking function
        private bool StartMux(IVideoCapture videoCapture)
        {
            EncoderBase encoder       = videoCapture.GetEncoder();
            string      videoSavePath = string.Format("{0}capture_{1}x{2}_{3}_{4}.{5}",
                                                      saveFolderFullPath,
                                                      encoder.outputFrameWidth, encoder.outputFrameHeight,
                                                      Utils.GetTimeString(),
                                                      Utils.GetRandomString(5),
                                                      Utils.GetEncoderPresetExt(encoder.encoderPreset));

            if (customFileName != null)
            {
                videoSavePath = string.Format("{0}{1}.{2}",
                                              saveFolderFullPath,
                                              customFileName,
                                              Utils.GetEncoderPresetExt(encoder.encoderPreset));
            }

            // Make sure generated the merge file
            int waitCount = 0;

            while (!File.Exists(videoSavePath) && waitCount++ < 10)
            {
                Thread.Sleep(1000);
                IntPtr nativeAPI = IntPtr.Zero;
                if (audioFile2 != null)
                {
                    nativeAPI = FFmpegEncoder_StartMuxingProcess2(
                        encoder.encoderPreset,
                        encoder.bitrate,
                        verticalFlip,
                        horizontalFlip,
                        videoSavePath,
                        encoder.videoSavePath,
                        audioFile,
                        audioFile2,
                        ffmpegFullPath,
                        false);
                }
                else
                {
                    nativeAPI = FFmpegEncoder_StartMuxingProcess(
                        encoder.encoderPreset,
                        encoder.bitrate,
                        verticalFlip,
                        horizontalFlip,
                        videoSavePath,
                        encoder.videoSavePath,
                        audioFile,
                        ffmpegFullPath,
                        false);
                }

                if (nativeAPI == IntPtr.Zero)
                {
                    OnError(EncoderErrorCode.MUXING_FAILED_TO_START);
                    return(false);
                }

                FFmpegEncoder_CleanMuxingProcess(nativeAPI);
            }

            if (waitCount >= 10)
            {
                return(false);
            }

            // VideoCapture muxer complete callback
            videoCapture.OnMuxerComplete(videoSavePath);
            OnComplete(videoSavePath);

            // Clean video files with no sound
            if (File.Exists(encoder.videoSavePath))
            {
                File.Delete(encoder.videoSavePath);
                encoder.videoSavePath = "";
            }

            //Debug.LogFormat(LOG_FORMAT, "Muxing process finish!");

            return(true);
        }
Ejemplo n.º 20
0
        public void ProcessImage(IVideoCapture captureDevice, byte[] imagePtr)
        {
            if (captureDevice.Format != ImageFormat.B8G8R8A8_32)
                throw new MarkerException("Only ImageFormat.B8G8R8A8_32 format is acceptable for NyARToolkitTracker");

            // initialize the detector right before the image processing
            if (!started)
            {
                nyARSensor = new NyARSensor(nyARIntSize);

                nyARRaster = new DsBGRX32Raster(nyARIntSize.w, nyARIntSize.h);
                nyARSensor.update(nyARRaster);
                started = true;
            }

            nyARRaster.SetBuffer(imagePtr);
            nyARSensor.update(nyARRaster);
            nyARSensor.updateTimeStamp();
            
            markerSystem.update(nyARSensor);
            
            UpdateMarkerTransforms();
        }
Ejemplo n.º 21
0
        /// <summary>
        /// Processes the video image captured from an initialized video capture device. 
        /// </summary>
        /// <param name="captureDevice">An initialized video capture device</param>
        public void ProcessImage(IVideoCapture captureDevice, IntPtr imagePtr)
        {
            String channelSeq = "";
            int nChannles = 1;
            switch(captureDevice.Format)
            {
                case ImageFormat.R5G6B5_16:
                case ImageFormat.R8G8B8_24:
                    channelSeq = "RGB";
                    nChannles = 3;
                    break;
                case ImageFormat.R8G8B8A8_32:
                    channelSeq = "RGBA";
                    nChannles = 4;
                    break;
                case ImageFormat.B8G8R8_24:
                    channelSeq = "BGR";
                    nChannles = 3;
                    break;
                case ImageFormat.B8G8R8A8_32:
                    channelSeq = "BGRA";
                    nChannles = 4;
                    break;
                case ImageFormat.A8B8G8R8_32:
                    channelSeq = "ARGB";
                    nChannles = 4;
                    break;
            }

            int interestedMarkerNums = singleMarkerIDs.Count;
            int foundMarkerNums = 0;

            ALVARDllBridge.alvar_detect_marker(detectorID, cameraID, nChannles, channelSeq, channelSeq, 
                imagePtr, singleMarkerIDsPtr, ref foundMarkerNums, ref interestedMarkerNums,
                max_marker_error, max_track_error);

            Process(interestedMarkerNums, foundMarkerNums);
        }
Ejemplo n.º 22
0
        private void SetupCalibration()
        {
            leftCaptureDevice = new DirectShowCapture2();
            leftCaptureDevice.InitVideoCapture(leftDeviceID, FrameRate._30Hz, Resolution._640x480,
                ImageFormat.R8G8B8_24, false);

            // Add left video capture device to the scene for rendering left eye image
            scene.AddVideoCaptureDevice(leftCaptureDevice);

            rightCaptureDevice = new DirectShowCapture2();
            rightCaptureDevice.InitVideoCapture(rightDeviceID, FrameRate._30Hz, Resolution._640x480,
                ImageFormat.R8G8B8_24, false);

            // Add right video capture device to the scene for rendering right eye image
            scene.AddVideoCaptureDevice(rightCaptureDevice);

            // Create holders for retrieving the captured video images
            leftImagePtr = Marshal.AllocHGlobal(leftCaptureDevice.Width * leftCaptureDevice.Height * 3);
            rightImagePtr = Marshal.AllocHGlobal(rightCaptureDevice.Width * rightCaptureDevice.Height * 3);

            // Associate each video devices to each eye
            scene.LeftEyeVideoID = 0;
            scene.RightEyeVideoID = 1;

            scene.ShowCameraImage = true;

            float markerSize = 32.4f;

            // Initialize a marker tracker for tracking an marker array used for calibration
            markerTracker = new ALVARMarkerTracker();
            markerTracker.MaxMarkerError = 0.02f;
            markerTracker.ZNearPlane = 0.1f;
            markerTracker.ZFarPlane = 1000;
            markerTracker.InitTracker(leftCaptureDevice.Width, leftCaptureDevice.Height, LEFT_CALIB, markerSize);
            ((StereoCamera)scene.CameraNode.Camera).LeftProjection = markerTracker.CameraProjection;

            // Add another marker detector for tracking right video capture device
            ALVARDllBridge.alvar_add_marker_detector(markerSize, 5, 2);

            ALVARDllBridge.alvar_add_camera(RIGHT_CALIB, rightCaptureDevice.Width, rightCaptureDevice.Height);
            double[] projMat = new double[16];
            double cameraFovX = 0, cameraFovY = 0;
            ALVARDllBridge.alvar_get_camera_params(1, projMat, ref cameraFovX, ref cameraFovY, 1000, 0.1f);
            ((StereoCamera)scene.CameraNode.Camera).RightProjection = new Matrix(
                (float)projMat[0], (float)projMat[1], (float)projMat[2], (float)projMat[3],
                (float)projMat[4], (float)projMat[5], (float)projMat[6], (float)projMat[7],
                (float)projMat[8], (float)projMat[9], (float)projMat[10], (float)projMat[11],
                (float)projMat[12], (float)projMat[13], (float)projMat[14], (float)projMat[15]);

            // Add a marker array to be tracked
            markerID = markerTracker.AssociateMarker("ALVARGroundArray.xml");

            relativeTransforms = new List<Matrix>();
        }
Ejemplo n.º 23
0
        /// <summary>
        /// 抓拍图片
        /// </summary>
        /// <param name="path"></param>
        /// <param name="timeout">超时时间(ms)</param>
        /// <returns></returns>
        public override bool SnapShotTo(ref string path, int timeout, bool force)
        {
            bool success = false;

            try
            {
                if (_IsReadyForSnapshot)
                {
                    //不打开视频时抓拍
                    IVideoCapture capture = VideoCaptureManager.Instance[(int)VideoServerType.DaHua];
                    if (capture != null)
                    {
                        path    = capture.CapturePicture(this.VideoSource, force);
                        success = !string.IsNullOrEmpty(path);
                    }
                }
                else
                {
                    lock (_StatusLock)
                    {
                        //////发起订阅设备事件消息
                        ////if (m_nRealLoadPic == 0)
                        ////{
                        ////    m_nRealLoadPic = DHClient.DHRealLoadPicture(m_nLoginID, VideoSource.Channel, EventIvs.EVENT_IVS_ALL, anaCallback, 0);
                        ////}
                        //////设置上传图片的存储路径
                        ////snapPath = path;

                        //////触发手动抓拍测试
                        ////MANUAL_SNAP_PARAMETER snap = new MANUAL_SNAP_PARAMETER();
                        ////snap.nChannel = VideoSource.Channel;
                        ////IntPtr ptr = Marshal.AllocHGlobal(Marshal.SizeOf(snap));
                        ////Marshal.StructureToPtr(snap, ptr, false);
                        ////bool bRet = DHClient.DHControlDevice(m_nLoginID, CtrlType.DH_MANUAL_SNAP, ptr, 1000);
                        ////System.Runtime.InteropServices.Marshal.FreeHGlobal(ptr);

                        //////等待图片上传
                        ////_SnapEvent.WaitOne(timeout);

                        //////清除上传图片的存储路径
                        ////snapPath = string.Empty;

                        ////////取消订阅事件消息
                        //////if (m_nRealLoadPic != 0)
                        //////{
                        //////    DHClient.DHStopLoadPic(m_nRealLoadPic);
                        //////    m_nRealLoadPic = 0;
                        //////}

                        //if (m_realPlayH != 0)
                        //{
                        //    success = DHClient.DHCapturePicture(m_realPlayH, path);
                        //}

                        //打开视频时抓拍
                        if (m_realPlayH != 0)
                        {
                            success = DHClient.DHCapturePicture(m_realPlayH, path);
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                Ralid.GeneralLibrary.ExceptionHandling.ExceptionPolicy.HandleException(ex);
            }
            return(success);
        }
Ejemplo n.º 24
0
        public void ProcessImage(IVideoCapture captureDevice, byte[] imagePtr)
        {
            if (captureDevice.Format != ImageFormat.B8G8R8A8_32)
                throw new MarkerException("Only ImageFormat.B8G8R8A8_32 format is acceptable for NyARToolkitTracker");

            // initialize the detector right before the image processing
            if (!started)
            {
                multiDetector = new MarkerDetector(param, codes.ToArray(), pattSizes.ToArray(),
                    codes.Count, raster.getBufferType());
                multiDetector.setContinueMode(continuousMode);
                started = true;
            }

            raster.SetBuffer(imagePtr);

            UpdateMarkerTransforms();
        }
        /// <summary>
        /// Initializes a new instance of the <see cref="MediaCaptureService"/> class.
        /// </summary>
        public MediaCaptureService()
        {
#if UWP
            this.videoCaptureManager = new VideoCaptureManager();
#endif
        }
Ejemplo n.º 26
0
        private void SetupCalibration()
        {
            if (useImageSequence)
                captureDevice = new NullCapture();
            else
                captureDevice = new DirectShowCapture2();
            captureDevice.InitVideoCapture(0, FrameRate._30Hz, Resolution._640x480,
                ImageFormat.R8G8B8_24, false);
            if (useImageSequence)
            {
                imageNames = Directory.GetFiles(imageDirectory);

                if (imageNames != null && imageNames.Length > 0)
                {
                    ((NullCapture)captureDevice).StaticImageFile = imageNames[0];
                }
                else
                {
                    MessageBox.Show("No images are found in " + imageDirectory + " for static image calibration");
                    this.Exit();
                }
            }

            // Add this video capture device to the scene so that it can be used for
            // the marker tracker
            scene.AddVideoCaptureDevice(captureDevice);

            imagePtr = Marshal.AllocHGlobal(captureDevice.Width * captureDevice.Height * 3);

            scene.ShowCameraImage = true;

            // Initializes ALVAR camera
            ALVARDllBridge.alvar_init();
            ALVARDllBridge.alvar_add_camera(null, captureDevice.Width, captureDevice.Height);
        }