コード例 #1
0
        void OnPostRender()
        {
            if (isRecording)
            {
                if (frameCount >= maxframeCount ||
                    recordingFrameRgbMat.width() != Screen.width || recordingFrameRgbMat.height() != Screen.height)
                {
                    OnRecButtonClick();
                    return;
                }

                frameCount++;

                // Take screen shot.
                //deberia ser ahi, no?
                screenCapture.ReadPixels(new UnityEngine.Rect(0, 0, Screen.width, Screen.height), 0, 0);
                screenCapture.Apply();

                Utils.texture2DToMat(screenCapture, recordingFrameRgbMat);
                Imgproc.cvtColor(recordingFrameRgbMat, recordingFrameRgbMat, Imgproc.COLOR_RGB2BGR);

                Imgproc.putText(recordingFrameRgbMat, frameCount.ToString(), new Point(recordingFrameRgbMat.cols() - 70, 30), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255), 2, Imgproc.LINE_AA, false);
                Imgproc.putText(recordingFrameRgbMat, "SavePath:", new Point(5, recordingFrameRgbMat.rows() - 30), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(0, 0, 255), 2, Imgproc.LINE_AA, false);
                Imgproc.putText(recordingFrameRgbMat, savePath, new Point(5, recordingFrameRgbMat.rows() - 8), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255), 0, Imgproc.LINE_AA, false);

                writer.write(recordingFrameRgbMat);
            }
        }
コード例 #2
0
 public void Write(Mat img)
 {
     _writer.write(img);
 }
コード例 #3
0
        public IEnumerator MakeVideo(TMPro.TMP_Text progressDisplay, TMPro.TMP_Text statusDisplay)
        {
            // 1. Create video capture
            m_vidCapturer = new VideoCapture(m_vidName);
            // capture validity check
            if (!m_vidCapturer.isOpened())
            {
                m_vidCapturer.release();
                m_vidCapturer = null;
                DanbiUtils.LogErr($"Failed to open the selected video at {m_vidName}");
                yield break;
            }

            // 2. init persistant resources
            receivedFrameMat  = new Mat((int)m_vidCapturer.get(4), (int)m_vidCapturer.get(3), CvType.CV_8UC4); // CV_8UC4 (RGBA).
            distortedFrameMat = new Mat((int)m_vidCapturer.get(4), (int)m_vidCapturer.get(3), CvType.CV_8UC4); // CV_8UC4 (RGBA).
            texForVideoFrame  = new Texture2D((int)m_vidCapturer.get(3), (int)m_vidCapturer.get(4), TextureFormat.RGBA32, false);

            // 4. calc video frame counts.
            m_currentFrameCount = 0;
            m_maxFrameCount     = (int)m_vidCapturer?.get(DanbiOpencvVideoCapturePropID.frame_count);

            // 5. get a codec for a video writer.
            // MJPG -> error!
            int codec_fourcc = DanbiOpencvVideoCodec_fourcc.get_fourcc_videoCodec(m_videoCodec);

            if (codec_fourcc == -999)
            {
                DanbiUtils.LogErr($"codec is invalid! codec propID -> {codec_fourcc}");
                yield break;
            }

            // 6. create a video writer
            var frameSize = new Size(m_vidCapturer.get(3), m_vidCapturer.get(4)); // width , height

            m_vidWriter = new VideoWriter(m_savedVideoPathAndName, codec_fourcc, m_targetFrameRate, frameSize, true);

            // while (m_currentFrameCount < m_dbgMaxFrameCount)
            while (m_currentFrameCount < m_maxFrameCount - 1)
            {
                if (m_isSaving)
                {
                    break;
                }

                // read the new Frame into 'newFrameMat'.
                if (!m_vidCapturer.read(receivedFrameMat))
                {
                    DanbiUtils.LogErr($"Failed to read the current video frame! <No next frame>");
                    break;
                }

                // testRT = new Mat((int)receivedFrameMat.get(4), (int)receivedFrameMat.get(3), CvType.CV_8UC4);
                // OpenCVForUnity.ImgprocModule.Imgproc.cvtColor(receivedFrameMat, testRT, OpenCVForUnity.ImgprocModule.Imgproc.COLOR_RGBA)

                if (receivedFrameMat.empty())
                {
                    DanbiUtils.LogErr("Frame failed to receive the captured frame from the video!");
                    break;
                }

                Utils.matToTexture2D(receivedFrameMat, texForVideoFrame);

                yield return(StartCoroutine(DistortCurrentFrame(texForVideoFrame)));

                Utils.texture2DToMat(texForVideoFrame, distortedFrameMat);

                if (distortedFrameMat.empty())
                {
                    DanbiUtils.LogErr("Frame failed to receive the distorted result!");
                    break;
                }

                // write the newFrameMat into the video writer
                m_vidWriter.write(distortedFrameMat);

                // TODO: update the text with DanbiStatusDisplayHelper
                // progressDisplayText.text = $"Start to warp" +
                //   "(500 / 25510) " +
                //   "(1.96001%)";
                // TODO: update the text with DanbiStatusDisplayHelper
                // statusDisplayText.text = "Image generating succeed!";

                ++m_currentFrameCount;
            }

            // dispose resources.
            m_vidCapturer.release();
            m_vidWriter.release();
            receivedFrameMat.release();
            distortedFrameMat.release();
            texForVideoFrame = null;

            // reset flags
            DanbiManager.instance.renderFinished = false;
            m_isSaving = false;

            Application.runInBackground = false;

            // wait the saved file
            yield return(new WaitUntil(() => new System.IO.FileInfo(m_savedVideoPathAndName).Exists));

            System.Diagnostics.Process.Start(@"" + m_savedVideoPath);
        }