Example #1
0
        /// <summary>
        /// Save capture LMFrame JSON data to .txt file
        /// </summary>
        /// <param name="content">LMFrame JSON data</param>
        /// <param name="humanSign">Gesture name</param>
        void saveFile(string content, string humanSign)
        {
            string folderPath  = FileHelper.captureDataFolderPath(humanSign);
            string fileName    = captureDataFileNameGetNext(folderPath);
            string txtFilePath = folderPath + "\\" + fileName + ".txt";

            Debug.WriteLine("Begin save");
            // pass in true to cause writes to be appended
            StreamWriter sw = new StreamWriter(txtFilePath, true);

            // use writeline so we get a newline character written
            sw.WriteLine(content);

            // Ensure data is written to disk
            sw.Close();

            string rawFile = folderPath + "\\" + fileName + ".raw";

            byte[] rawData           = currentLeapMotionFrame.Serialize;
            bool   isSuccessSaveFile = ByteArrayToFile(rawFile, rawData);

            //UNCOMMENT TO ENABLE COMPRESSION
            //Helper.FileHelper.zipFileSaveInFolder(txtFilePath);

            mCaptureStatus = CaptureStatus.PreCapture;
            updateUIByCaptureStatus();
            Debug.WriteLine("Save successful");
        }
Example #2
0
        private void treeViewMouseDoubleClick(object sender, MouseEventArgs e)
        {
            //
            // Get the selected node.
            //
            TreeNode node = trvHumanSign.SelectedNode;
            //
            // Render message box.
            //
            string nodeStr = node.Text;

            if (nodeStr.Contains("("))
            {
                nodeStr = clearNodeName(nodeStr);
                // selected Sign
                choosedSign         = nodeStr;
                lbSelectedSign.Text = "Selected Sign :" + nodeStr;
                txbHumanSign.Text   = nodeStr;
                isGenerateImage     = false;
                mCaptureStatus      = CaptureStatus.PreviewSign;
                updateUIByCaptureStatus();
            }
            else
            {
                mCaptureStatus = CaptureStatus.PreviewSign;
                updateUIByCaptureStatus();
                // load json - nodeStr = fileName
                loadCaptureData(nodeStr);
            }
        }
Example #3
0
 private void btnAutoCapture_Click(object sender, EventArgs e)
 {
     if (checkAutoCaptureSettings())
     {
         mCaptureStatus = CaptureStatus.PreAutoCapture;
         updateUIByCaptureStatus();
     }
 }
Example #4
0
        /// <summary>
        /// Handle audio process complete when capture audio.
        /// </summary>
        /// <param name="savePath">Final muxing video path.</param>
        public void OnAudioMuxingComplete(string savePath)
        {
            status = CaptureStatus.READY;

            OnComplete(this, savePath);

            Debug.LogFormat(LOG_FORMAT, "Video generated success!");
        }
    public void BeginCapture(CaptureHeadbox headbox, string capture_dir, int max_frames, CaptureStatus status_interface)
    {
        start_time_ = Time.realtimeSinceStartup;

        headbox_          = headbox;
        dynamic_range_    = headbox_.dynamic_range_;
        samples_per_face_ = (int)headbox_.samples_per_face_;
        capture_dir_      = capture_dir;
        capture_frame_    = 0;
        status_interface_ = status_interface;
        max_frames_       = max_frames;
        status_interface_.SendProgress("Capturing Images...", 0.0f);
        List <Vector3> samples = new List <Vector3>();

        // Use Hammersly point set to distribute samples.
        for (int position_sample_index = 0; position_sample_index < samples_per_face_; ++position_sample_index)
        {
            Vector3 headbox_position = new Vector3(
                (float)position_sample_index / (float)(samples_per_face_ - 1),
                RadicalInverse((ulong)position_sample_index, 2),
                RadicalInverse((ulong)position_sample_index, 3));
            headbox_position.Scale(headbox.size_);
            headbox_position -= headbox.size_ * 0.5f;
            // Headbox samples are in camera space; transform to world space.
            headbox_position = headbox.transform.TransformPoint(headbox_position);
            samples.Add(headbox_position);
        }

        // Sort samples by distance from center of the headbox.
        samples.Sort(delegate(Vector3 a, Vector3 b) {
            float length_a = a.sqrMagnitude;
            float length_b = b.sqrMagnitude;
            return(length_a.CompareTo(length_b));
        });
        // Replace the sample closest to the center of the headbox with a sample at
        // exactly the center. This is important because Seurat requires
        // sampling information at the center of the headbox.
        samples[0] = headbox.transform.position;

        samples_ = samples;
        // Note this uses a modified version of Unity's standard internal depth
        // capture shader. See the shader in Assets/builtin_shaders/
        // DefaultResourcesExtra/Internal-DepthNormalsTexture.shader.
        render_depth_shader_ = Shader.Find("GoogleVR/Seurat/CaptureEyeDepth");

        capture_manifest_ = new JsonManifest.Capture();

        // Setup cameras
        color_camera_ = headbox_.ColorCamera;

        depth_camera_object_ = new GameObject("Depth Camera");
        depth_camera_        = depth_camera_object_.AddComponent <Camera>();
        //Checks if we are using HDRP, if so, we need to add additional components.
    #if UNITY_RENDER_PIPELINE_HDRP
        OverrideMaterialRenderer overrideMaterialRenderer = depth_camera_object_.AddComponent <OverrideMaterialRenderer>();
        overrideMaterialRenderer.EnableOverride();
    #endif
    }
Example #6
0
 void Start()
 {
     Debug.Log(TAG + " Start() thread ID is " + Thread.CurrentThread.ManagedThreadId);
     udpKeyboard = GetComponent <UDPKeyboardInput>();
     frameSaver  = GetComponent <FrameSaver>();
     Application.targetFrameRate = 60;
     captureStatus = CaptureStatus.Clean;
     InitializeMediaCaptureAsyncWrapper();
 }
Example #7
0
        private void StartCapture(Button _button, CaptureStatus captureStatus)
        {
            myCaptureStatus   = captureStatus;
            _button.BackColor = Color.LightBlue;

            this.Visible = false;
            this.Hide();
            this.Invalidate();

            System.Console.WriteLine("hide main form");
            myCaptureForm.StartCapture();
        }
    private async Task <bool> StopFrameReaderAsync()
    {
        if (captureStatus != CaptureStatus.Running)
        {
            Debug.Log(TAG + ": StopFrameReaderAsync() fails because of incorrect status");
            return(false);
        }
        await frameReader.StopAsync();

        captureStatus = CaptureStatus.Initialized;
        Debug.Log(TAG + ": StopFrameReaderAsync() is successful");
        return(true);
    }
Example #9
0
        public FrameDataForm()
        {
            InitializeComponent();
            // We will create new AppSetting.txt if this file doesnot exist.
            AppSetting.CheckAndCreateIfHaveNoAppSetting();
            JObject settObj = AppSetting.GetAppSetting();

            LMSingleton.Instance.compareDownPercent = (float)settObj[AppSetting.settingKeyCompareDownPercent];
            LMSingleton.Instance.compareTopPercent  = (float)settObj[AppSetting.settingKeyCompareTopPercent];

            LMSingleton.Instance.currentForm = LMSingleton.FormName.Training;

            float cDownPercent = LMSingleton.Instance.compareDownPercent;
            float cTopPercent  = LMSingleton.Instance.compareTopPercent;

            dictHumanSign   = new Dictionary <string, Model.LMFrame>();
            this.controller = new Controller();
            this.listener   = new LeapEventListener(this);
            controller.AddListener(listener);
            mCaptureStatus = CaptureStatus.PreviewSign;
            updateUIByCaptureStatus();
            Debug.WriteLine("Load Done");

            mTrain = new TrainModule();


            string[] signFolders = FileHelper.CheckSignFolders();
            // We will create alphabet folders if there is no folder found.
            if (signFolders.Length == 0)
            {
                FileHelper.createABCFolderForFirstTime();

                signFolders = FileHelper.CheckSignFolders();
            }
            LMSingleton.Instance.signFolders = signFolders;

            for (int i = 0; i < signFolders.Length; i++)
            {
                TreeNode node = new TreeNode(signFolders[i]);
                trvHumanSign.Nodes.Add(node);
            }

            reloadListHumanSign();

            trvHumanSign.SelectedNode = trvHumanSign.Nodes[0];
            choosedSign         = clearNodeName(trvHumanSign.SelectedNode.Text);
            lbSelectedSign.Text = "Selected Gesture :" + choosedSign;

            initView();
        }
Example #10
0
        /// <summary>
        /// Handle callbacks for the video encoder complete.
        /// </summary>
        /// <param name="savePath">Video save path.</param>
        private void OnEncoderComplete(string savePath)
        {
            if (hardwareEncoding || !captureAudio) // No audio capture required, done!
            {
                status = CaptureStatus.READY;

                OnComplete(this, savePath);

                Debug.LogFormat(LOG_FORMAT, "Video capture session success!");
            }
            else
            {
                // Pending for ffmpeg audio capture and muxing
                status = CaptureStatus.PENDING;
            }
        }
    void Start()
    {
        Debug.Log(TAG + " Start() thread ID is " + Thread.CurrentThread.ManagedThreadId);
        udpKeyboard = GetComponent <UDPKeyboardInput>();
        frameSaver  = GetComponent <FrameSaver>();
        Application.targetFrameRate = 60;
        captureStatus = CaptureStatus.Clean;
        InitializeMediaCaptureAsyncWrapper();

        // Cache values to target width and height from media capture
        // profiles enum.
        switch (mediaCaptureProfiles)
        {
        case MediaCaptureProfiles.HL1_1280x720:
            _targetVideoWidth  = 1280;
            _targetVideoHeight = 720;
            break;

        case MediaCaptureProfiles.HL1_1408x792:
            _targetVideoWidth  = 1408;
            _targetVideoHeight = 792;
            break;

        case MediaCaptureProfiles.HL1_1344x756:
            _targetVideoWidth  = 1344;
            _targetVideoHeight = 756;
            break;

        case MediaCaptureProfiles.HL1_896x504:
            _targetVideoWidth  = 896;
            _targetVideoHeight = 504;
            break;

        case MediaCaptureProfiles.HL2_2272x1278:
            _targetVideoWidth  = 2272;
            _targetVideoHeight = 1278;
            break;

        case MediaCaptureProfiles.HL2_896x504:
            _targetVideoWidth  = 896;
            _targetVideoHeight = 504;
            break;

        default:
            break;
        }
    }
Example #12
0
        private void Awake()
        {
            if (ffmpegEncoder == null)
            {
                ffmpegEncoder = GetComponentInChildren <FFmpegEncoder>(true);
                if (ffmpegEncoder == null)
                {
                    Debug.LogErrorFormat(LOG_FORMAT,
                                         "Component FFmpegEncoder not found, please use prefab or follow the document to set up video capture.");
                    return;
                }
            }

            if (ffmpegEncoder != null)
            {
                ffmpegEncoder.OnComplete += OnEncoderComplete;
            }

            if (gpuEncoder == null)
            {
                gpuEncoder = GetComponentInChildren <GPUEncoder>(true);
                if (gpuEncoder == null)
                {
                    Debug.LogErrorFormat(LOG_FORMAT,
                                         "Component hardware encoder not found, please use prefab or follow the document to set up video capture.");
                }
            }

#if UNITY_STANDALONE_WIN || UNITY_EDITOR_WIN
            if (gpuEncoder != null)
            {
                gpuEncoder.gameObject.SetActive(true);
                gpuEncoder.OnComplete += OnEncoderComplete;
            }
#endif

            status = CaptureStatus.READY;

            if (startOnAwake)
            {
                StartCapture();
            }
        }
    private async Task <bool> StartFrameReaderAsync()
    {
        Debug.Log(TAG + " StartFrameReaderAsync() thread ID is " + Thread.CurrentThread.ManagedThreadId);
        if (captureStatus != CaptureStatus.Initialized)
        {
            Debug.Log(TAG + ": StartFrameReaderAsync() fails because of incorrect status");
            return(false);
        }

        MediaFrameReaderStartStatus status = await frameReader.StartAsync();

        if (status == MediaFrameReaderStartStatus.Success)
        {
            Debug.Log(TAG + ": StartFrameReaderAsync() is successful");
            captureStatus = CaptureStatus.Running;
            return(true);
        }
        else
        {
            Debug.Log(TAG + ": StartFrameReaderAsync() is successful, status = " + status);
            return(false);
        }
    }
Example #14
0
        /// <summary>
        /// Stop capturing and produce the finalized video. Note that the video file may not be completely written when this method returns. In order to know when the video file is complete, register <c>OnComplete</c> delegate.
        /// </summary>
        public bool StopCapture()
        {
            if (status != CaptureStatus.STARTED)
            {
                Debug.LogWarningFormat(LOG_FORMAT, "Video capture session not start yet!");
                return(false);
            }

            if (offlineRender)
            {
                // Restore maximumDeltaTime states.
                Time.maximumDeltaTime = originalMaximumDeltaTime;
            }

            // pending for video encoding process
            status = CaptureStatus.STOPPED;

            if (hardwareEncoding && gpuEncoder.captureStarted)
            {
                gpuEncoder.StopCapture();
            }

            if (!hardwareEncoding && ffmpegEncoder.captureStarted)
            {
                ffmpegEncoder.StopCapture();

                if (captureAudio && FFmpegMuxer.singleton && FFmpegMuxer.singleton.captureStarted)
                {
                    FFmpegMuxer.singleton.StopCapture();
                }

                Debug.LogFormat(LOG_FORMAT, "Video capture session stopped, generating video...");
            }

            return(true);
        }
Example #15
0
        private void btnStartAutCapture_Click(object sender, EventArgs e)
        {
            string btnText = ((Button)sender).Text;

            if (btnText == "Stop")
            {
                mCaptureStatus = CaptureStatus.PreviewSign;
                updateUIByCaptureStatus();
            }
            else
            {
                lastAutocaptureTime = Helper.TimeHelper.currentTime();
                autoCaptureCount    = 0;
                mCaptureStatus      = CaptureStatus.PreCapture;

                if (checkAutoCaptureSettings())
                {
                    autoCaptureTimeOut    = int.Parse(txbTimeOut.Text);
                    autoCaptureFileCount  = int.Parse(txbFileCount.Text);
                    autoCaptureStartIndex = int.Parse(txbStartIndex.Text);
                    updateUIByCaptureStatus();
                }
            }
        }
 void Start()
 {
     captureStatus = CaptureStatus.Clean;
     InitializeMediaCaptureAsyncWrapper();
 }
Example #17
0
 private void StopCapture(Button _button)
 {
     myCaptureStatus   = CaptureStatus.CAPTURE_NOT;
     _button.BackColor = defaultColor;
     button_copyP_Click(null, null);
 }
    private async Task <bool> InitializeMediaCaptureAsync()
    {
        if (captureStatus != CaptureStatus.Clean)
        {
            Debug.Log(TAG + ": InitializeMediaCaptureAsync() fails because of incorrect status");
            return(false);
        }

        if (mediaCapture != null)
        {
            return(false);
        }

        var allGroups = await MediaFrameSourceGroup.FindAllAsync();

        int selectedGroupIndex = -1;

        for (int i = 0; i < allGroups.Count; i++)
        {
            var group = allGroups[i];
            Debug.Log(group.DisplayName + ", " + group.Id);
            // for HoloLens 1
            if (group.DisplayName == "MN34150")
            {
                selectedGroupIndex = i;
                HL = 1;
                Debug.Log(TAG + ": Selected group " + i + " on HoloLens 1");
                break;
            }
            // for HoloLens 2
            else if (group.DisplayName == "QC Back Camera")
            {
                selectedGroupIndex = i;
                HL = 2;
                Debug.Log(TAG + ": Selected group " + i + " on HoloLens 2");
                break;
            }
        }

        if (selectedGroupIndex == -1)
        {
            Debug.Log(TAG + ": InitializeMediaCaptureAsyncTask() fails because there is no suitable source group");
            return(false);
        }

        // Initialize mediacapture with the source group.
        mediaCapture = new MediaCapture();
        MediaStreamType mediaStreamType = MediaStreamType.VideoPreview;

        if (HL == 1)
        {
            var settings = new MediaCaptureInitializationSettings {
                SourceGroup = allGroups[selectedGroupIndex],
                // This media capture can share streaming with other apps.
                SharingMode = MediaCaptureSharingMode.SharedReadOnly,
                // Only stream video and don't initialize audio capture devices.
                StreamingCaptureMode = StreamingCaptureMode.Video,
                // Set to CPU to ensure frames always contain CPU SoftwareBitmap images
                // instead of preferring GPU D3DSurface images.
                MemoryPreference = MediaCaptureMemoryPreference.Cpu
            };
            await mediaCapture.InitializeAsync(settings);

            Debug.Log(TAG + ": MediaCapture is successfully initialized in SharedReadOnly mode for HoloLens 1.");
            mediaStreamType = MediaStreamType.VideoPreview;
        }
        else if (HL == 2)
        {
            string deviceId = allGroups[selectedGroupIndex].Id;
            // Look up for all video profiles
            IReadOnlyList <MediaCaptureVideoProfile> profileList = MediaCapture.FindAllVideoProfiles(deviceId);
            //MediaCaptureVideoProfile selectedProfile;
            //IReadOnlyList<MediaCaptureVideoProfile> profileList = MediaCapture.FindKnownVideoProfiles(deviceId, KnownVideoProfile.VideoConferencing);

            // Initialize mediacapture with the source group.
            var settings = new MediaCaptureInitializationSettings {
                SourceGroup = allGroups[selectedGroupIndex],
                //VideoDeviceId = deviceId,
                //VideoProfile = profileList[0],
                // This media capture can share streaming with other apps.
                SharingMode = MediaCaptureSharingMode.ExclusiveControl,
                // Only stream video and don't initialize audio capture devices.
                StreamingCaptureMode = StreamingCaptureMode.Video,
                // Set to CPU to ensure frames always contain CPU SoftwareBitmap images
                // instead of preferring GPU D3DSurface images.
                MemoryPreference = MediaCaptureMemoryPreference.Cpu
            };
            await mediaCapture.InitializeAsync(settings);

            Debug.Log(TAG + ": MediaCapture is successfully initialized in ExclusiveControl mode for HoloLens 2.");
            mediaStreamType = MediaStreamType.VideoRecord;
        }



        try {
            var mediaFrameSourceVideo         = mediaCapture.FrameSources.Values.Single(x => x.Info.MediaStreamType == mediaStreamType);
            MediaFrameFormat targetResFormat  = null;
            float            framerateDiffMin = 60f;
            foreach (var f in mediaFrameSourceVideo.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height))
            {
                // Check current media frame source resolution versus target resolution
                if (f.VideoFormat.Width == _targetVideoWidth && f.VideoFormat.Height == _targetVideoHeight)
                {
                    if (targetResFormat == null)
                    {
                        targetResFormat  = f;
                        framerateDiffMin = Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - _targetVideoFrameRate);
                    }
                    else if (Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - _targetVideoFrameRate) < framerateDiffMin)
                    {
                        targetResFormat  = f;
                        framerateDiffMin = Mathf.Abs(f.FrameRate.Numerator / f.FrameRate.Denominator - _targetVideoFrameRate);
                    }
                }
            }
            if (targetResFormat == null)
            {
                targetResFormat = mediaFrameSourceVideo.SupportedFormats[0];
                Debug.Log(TAG + ": Unable to choose the selected format, fall back");
            }
            // choose the smallest resolution
            //var targetResFormat = mediaFrameSourceVideoPreview.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height).FirstOrDefault();
            // choose the specific resolution
            //var targetResFormat = mediaFrameSourceVideoPreview.SupportedFormats.OrderBy(x => (x.VideoFormat.Width == 1344 && x.VideoFormat.Height == 756)).FirstOrDefault();
            await mediaFrameSourceVideo.SetFormatAsync(targetResFormat);

            Debug.Log(TAG + ": mediaFrameSourceVideo.SetFormatAsync()");
            frameReader = await mediaCapture.CreateFrameReaderAsync(mediaFrameSourceVideo, targetResFormat.Subtype);

            Debug.Log(TAG + ": mediaCapture.CreateFrameReaderAsync()");
            frameReader.FrameArrived += OnFrameArrived;
            videoWidth  = Convert.ToInt32(targetResFormat.VideoFormat.Width);
            videoHeight = Convert.ToInt32(targetResFormat.VideoFormat.Height);
            Debug.Log(TAG + ": FrameReader is successfully initialized, " + videoWidth + "x" + videoHeight +
                      ", Framerate: " + targetResFormat.FrameRate.Numerator + "/" + targetResFormat.FrameRate.Denominator);
        }
        catch (Exception e) {
            Debug.Log(TAG + ": FrameReader is not initialized");
            Debug.Log(TAG + ": Exception: " + e);
            return(false);
        }

        captureStatus = CaptureStatus.Initialized;
        return(true);
    }
Example #19
0
        /// <summary>
        /// Leap Motion frame handling
        /// </summary>
        /// <param name="frame">A frame get from leap motion</param>
        void newFrameHandler(Frame frame)
        {
            long currentMillis = Helper.TimeHelper.currentTime();

            // capture only 1 frame for 1  humnan sign

            if (mCaptureStatus == CaptureStatus.PreAutoCapture)
            {
                if (currentMillis - lastMillisecond > delayMillisecondToRecord)
                {
                    Debug.WriteLine("Show image");
                    lastMillisecond = currentMillis;
                    showFrameInUIByFrame(frame);
                }

                return;
            }

            if (mCaptureStatus == CaptureStatus.PreCapture)
            {
                // auto capture each time. status change to capture
                long autoCaptureCountdown = currentMillis - lastAutocaptureTime;
                if (autoCaptureCountdown > autoCaptureTimeOut)
                {
                    lastAutocaptureTime = currentMillis;
                    mCaptureStatus      = CaptureStatus.Capture;
                    lbCaptureText.Text  = "Capturing ....";
                }
                else
                {
                    lbCaptureText.Text = "Capture in " + autoCaptureCountdown + " milliseconds";

                    if (currentMillis - lastMillisecond > delayMillisecondToRecord)
                    {
                        Debug.WriteLine("Show image");
                        lastMillisecond = currentMillis;
                        showFrameInUIByFrame(frame);
                    }

                    return;
                }
            }
            if (mCaptureStatus != CaptureStatus.Capture)
            {
                return;
            }
            mCaptureStatus = CaptureStatus.FinishCapture;

            // Leap Motion Frame will be convert to LMFrame
            LMFrame lmFrame;

            try
            {
                lmFrame = new LMFrame(frame, choosedSign);
            }
            catch (Exception e)
            {
                FileHelper.saveDebugString("LMFrame create : " + "frame data: " + frame.ToString() + "\\n" + e.Data.ToString());
                return;
            }

            if (lmFrame.LeftCamImg == null)
            {
                return;
            }


            // Save test data
            currentLeapMotionFrame = frame;

            showFrameInUI(lmFrame);

            saveJSONData(lmFrame);

            if (autoCaptureCount + 2 > autoCaptureFileCount)
            {
                lbCaptureText.Text = "Auto capture finished.";
                mCaptureStatus     = CaptureStatus.PreviewSign;
                reloadListHumanSign();
                updateUIByCaptureStatus();
            }
            else
            {
                autoCaptureCount++;
            }
        }
    /// <summary>
    ///
    /// </summary>
    /// <returns></returns>
    private async Task <bool> InitializeMediaCaptureAsync()
    {
        if (captureStatus != CaptureStatus.Clean)
        {
            Debug.Log(TAG + " " + id + ": InitializeMediaCaptureAsync() fails because of incorrect status");
            return(false);
        }

        if (mediaCapture != null)
        {
            return(false);
        }

        var allGroups = await MediaFrameSourceGroup.FindAllAsync();

        foreach (var group in allGroups)
        {
            Debug.Log(group.DisplayName + ", " + group.Id);
        }

        if (allGroups.Count <= 0)
        {
            Debug.Log(TAG + " " + id + ": InitializeMediaCaptureAsync() fails because there is no MediaFrameSourceGroup");
            return(false);
        }

        // Initialize mediacapture with the source group.
        mediaCapture = new MediaCapture();
        var settings = new MediaCaptureInitializationSettings {
            SourceGroup = allGroups[0],
            // This media capture can share streaming with other apps.
            SharingMode = MediaCaptureSharingMode.SharedReadOnly,
            // Only stream video and don't initialize audio capture devices.
            StreamingCaptureMode = StreamingCaptureMode.Video,
            // Set to CPU to ensure frames always contain CPU SoftwareBitmap images
            // instead of preferring GPU D3DSurface images.
            MemoryPreference = MediaCaptureMemoryPreference.Cpu
        };

        await mediaCapture.InitializeAsync(settings);

        Debug.Log(TAG + " " + id + ": MediaCapture is successfully initialized in shared mode.");

        // logging all frame source information
        string logString = "";

        foreach (var frameSource in mediaCapture.FrameSources)
        {
            var info = frameSource.Value.Info;
            logString += info.Id + ", " + info.MediaStreamType + ", " + info.SourceKind + "\n";
            logString += "Total number of SupportedFormats is " + frameSource.Value.SupportedFormats.Count + "\n";
            foreach (var format in frameSource.Value.SupportedFormats)
            {
                logString += format.VideoFormat.Width + " x " + format.VideoFormat.Height + ", Major type: " + format.MajorType + ", Subtype: " + format.Subtype +
                             ", Framerate: " + format.FrameRate.Numerator + "/" + format.FrameRate.Denominator + "\n";
            }
        }
        Debug.Log(logString);
        MediaFrameSource targetFrameSource = mediaCapture.FrameSources.Values.ElementAt(id);
        MediaFrameFormat targetResFormat   = targetFrameSource.SupportedFormats[0];

        try {
            // choose the smallest resolution
            //var targetResFormat = mediaFrameSourceVideoPreview.SupportedFormats.OrderBy(x => x.VideoFormat.Width * x.VideoFormat.Height).FirstOrDefault();
            // choose the specific resolution
            //var targetResFormat = mediaFrameSourceVideoPreview.SupportedFormats.OrderBy(x => (x.VideoFormat.Width == 1344 && x.VideoFormat.Height == 756)).FirstOrDefault();
            await targetFrameSource.SetFormatAsync(targetResFormat);

            frameReader = await mediaCapture.CreateFrameReaderAsync(targetFrameSource, targetResFormat.Subtype);

            frameReader.FrameArrived += OnFrameArrived;
            videoWidth  = Convert.ToInt32(targetResFormat.VideoFormat.Width);
            videoHeight = Convert.ToInt32(targetResFormat.VideoFormat.Height);
            Debug.Log(TAG + " " + id + ": FrameReader is successfully initialized, " + videoWidth + "x" + videoHeight +
                      ", Framerate: " + targetResFormat.FrameRate.Numerator + "/" + targetResFormat.FrameRate.Denominator +
                      ", Major type: " + targetResFormat.MajorType + ", Subtype: " + targetResFormat.Subtype);
        }
        catch (Exception e) {
            Debug.Log(TAG + " " + id + ": FrameReader is not initialized");
            Debug.Log(TAG + " " + id + ": Exception: " + e);
            return(false);
        }

        captureStatus = CaptureStatus.Initialized;
        return(true);
    }
    private async Task <bool> InitializeMediaCaptureAsync()
    {
        if (captureStatus != CaptureStatus.Clean)
        {
            Debug.Log(": InitializeMediaCaptureAsync() fails because of incorrect status");
            return(false);
        }

        if (mediaCapture != null)
        {
            return(false);
        }

        var allGroups = await MediaFrameSourceGroup.FindAllAsync();

        foreach (var group in allGroups)
        {
            Debug.Log(group.DisplayName + ", " + group.Id);
        }

        if (allGroups.Count <= 0)
        {
            Debug.Log(": InitializeMediaCaptureAsync() fails because there is no MediaFrameSourceGroup");
            return(false);
        }

        // Initialize mediacapture with the source group.
        mediaCapture = new MediaCapture();
        var settings = new MediaCaptureInitializationSettings {
            SourceGroup = allGroups[group_id],
            // This media capture can share streaming with other apps.
            SharingMode = MediaCaptureSharingMode.SharedReadOnly,
            // Only stream video and don't initialize audio capture devices.
            StreamingCaptureMode = StreamingCaptureMode.Video,
            // Set to CPU to ensure frames always contain CPU SoftwareBitmap images
            // instead of preferring GPU D3DSurface images.
            MemoryPreference = MediaCaptureMemoryPreference.Cpu
        };

        await mediaCapture.InitializeAsync(settings);

        Debug.Log(": MediaCapture is successfully initialized in shared mode.");

        MediaFrameSource targetFrameSource = mediaCapture.FrameSources.Values.ElementAt(id);
        MediaFrameFormat targetResFormat   = targetFrameSource.SupportedFormats[0];

        try {
            await targetFrameSource.SetFormatAsync(targetResFormat);

            frameReader = await mediaCapture.CreateFrameReaderAsync(targetFrameSource, targetResFormat.Subtype);

            frameReader.FrameArrived += OnFrameArrived;
            videoWidth  = Convert.ToInt32(targetResFormat.VideoFormat.Width);
            videoHeight = Convert.ToInt32(targetResFormat.VideoFormat.Height);
            Debug.Log(": FrameReader is successfully initialized, " + videoWidth + "x" + videoHeight +
                      ", Framerate: " + targetResFormat.FrameRate.Numerator + "/" + targetResFormat.FrameRate.Denominator +
                      ", Major type: " + targetResFormat.MajorType + ", Subtype: " + targetResFormat.Subtype);
        }
        catch (Exception e) {
            Debug.Log(": FrameReader is not initialized");
            Debug.Log(": Exception: " + e);
            return(false);
        }

        captureStatus = CaptureStatus.Initialized;
        return(true);
    }
Example #22
0
 private void btnCapture_Click(object sender, EventArgs e)
 {
     mCaptureStatus = CaptureStatus.Capture;
     updateUIByCaptureStatus();
 }
Example #23
0
        /// <summary>
        /// Initialize the attributes of the capture session and start capture.
        /// </summary>
        public bool StartCapture()
        {
            if (status != CaptureStatus.READY)
            {
                Debug.LogWarningFormat(LOG_FORMAT, "Previous video capture session not finish yet!");
                OnError(this, CaptureErrorCode.VIDEO_CAPTURE_ALREADY_IN_PROGRESS);
                return(false);
            }

            if (!File.Exists(Config.ffmpegPath))
            {
                Debug.LogErrorFormat(LOG_FORMAT,
                                     "FFmpeg not found, please follow document and add ffmpeg executable before start capture!");
                OnError(this, CaptureErrorCode.FFMPEG_NOT_FOUND);
                return(false);
            }

            if (inputTexture == null)
            {
                Debug.LogErrorFormat(LOG_FORMAT, "Input render texture not found, please attach input render texture!");
                OnError(this, CaptureErrorCode.INPUT_TEXTURE_NOT_FOUND);
                return(false);
            }

            if (string.IsNullOrEmpty(saveFolder))
            {
                saveFolder = Config.saveFolder;
            }
            else
            {
                Config.saveFolder = saveFolder;
            }

            if (frameRate < 18)
            {
                frameRate = 18;
                Debug.LogFormat(LOG_FORMAT, "Minimum frame rate is 18, set frame rate to 18.");
            }

            if (frameRate > 120)
            {
                frameRate = 120;
                Debug.LogFormat(LOG_FORMAT, "Maximum frame rate is 120, set frame rate to 120.");
            }

            if (captureAudio && offlineRender)
            {
                Debug.LogFormat(LOG_FORMAT, "Audio capture not supported in offline render mode, disable audio capture!");
                captureAudio = false;
            }

#if UNITY_STANDALONE_WIN || UNITY_EDITOR_WIN
            if (Config.isFreeTrial())
            {
                Debug.LogFormat(LOG_FORMAT, "GPU encoding is not supported in free trial version, fall back to software encoding.");
                hardwareEncoding = false;
            }
            else if (!softwareEncodingOnly &&
                     gpuEncoder.instantiated &&
                     gpuEncoder.IsSupported())
            {
                hardwareEncoding = true;
            }
            else
            {
                Debug.LogFormat(LOG_FORMAT, "GPU encoding is not supported in this device, fall back to software encoding.");
            }
#endif

#if UNITY_STANDALONE_OSX || UNITY_EDITOR_OSX
            Debug.LogFormat(LOG_FORMAT, "GPU encoding is not supported on macOS system, fall back to software encoding.");
            hardwareEncoding = false;
#endif

            // Init ffmpeg audio capture
            if (!hardwareEncoding && captureAudio && !FFmpegMuxer.singleton)
            {
                AudioListener listener = FindObjectOfType <AudioListener>();
                if (!listener)
                {
                    Debug.LogFormat(LOG_FORMAT, "AudioListener not found, disable audio capture!");
                    captureAudio = false;
                }
                else
                {
                    listener.gameObject.AddComponent <FFmpegMuxer>();
                }
            }

            if (hardwareEncoding)
            {
                // init GPU encoding settings
                GPUEncoderSettings();

                if (!gpuEncoder.StartCapture())
                {
                    OnError(this, CaptureErrorCode.VIDEO_CAPTURE_START_FAILED);
                    return(false);
                }
            }
            else
            {
                // init ffmpeg encoding settings
                FFmpegEncoderSettings();

                if (!ffmpegEncoder.StartCapture())
                {
                    OnError(this, CaptureErrorCode.VIDEO_CAPTURE_START_FAILED);
                    return(false);
                }

                if (captureAudio)
                {
                    // start ffmpeg audio encoding
                    if (!FFmpegMuxer.singleton.captureStarted)
                    {
                        FFmpegMuxer.singleton.StartCapture();
                    }
                    FFmpegMuxer.singleton.AttachVideoCapture(this);
                }
            }

            // Update current status.
            status = CaptureStatus.STARTED;

            // Start garbage collect thread.
            if (!garbageThreadRunning)
            {
                garbageThreadRunning = true;

                if (garbageCollectionThread != null &&
                    garbageCollectionThread.IsAlive)
                {
                    garbageCollectionThread.Abort();
                    garbageCollectionThread = null;
                }

                garbageCollectionThread              = new Thread(GarbageCollectionProcess);
                garbageCollectionThread.Priority     = System.Threading.ThreadPriority.Lowest;
                garbageCollectionThread.IsBackground = true;
                garbageCollectionThread.Start();
            }

            if (offlineRender)
            {
                // Backup maximumDeltaTime states.
                originalMaximumDeltaTime = Time.maximumDeltaTime;
                Time.maximumDeltaTime    = Time.fixedDeltaTime;
            }

            Debug.LogFormat(LOG_FORMAT, "Video capture session started.");
            return(true);
        }
        public override void OnToolGUI(EditorWindow window)
        {
            //window.wantsMouseMove = true;


            Event current = Event.current;


            if (current.type == EventType.Layout)
            {
                HandleUtility.AddDefaultControl(0);
            }


            if (current.type == EventType.MouseDown && currentCaptureStatus != CaptureStatus.Idle)
            {
                Vector2 p = HandleUtility.GUIPointToScreenPixelCoordinate(current.mousePosition);

                //Debug.Log(Screen.height);
                //Debug.Log(Camera.current.pixelHeight);

                mouseRect.x = current.mousePosition.x;
                mouseRect.y = current.mousePosition.y;

                //mouseRect.x = p.x;
                //mouseRect.y = Camera.current.pixelHeight - p.y;

                current.Use();

                Debug.Log(mouseRect);
                //Debug.Log(Camera.current.pixelWidth);
                //Debug.Log(Camera.current.scaledPixelWidth);

                //Debug.Log(GUIUtility.GUIToScreenPoint(current.mousePosition));

                //Debug.Log("Wat : " + HandleUtility.GUIPointToScreenPixelCoordinate(current.mousePosition));



                //Debug.Log(GUIUtility.GUIToScreenPoint(current.mousePosition) - new Vector2(SceneView.currentDrawingSceneView.position.x, SceneView.currentDrawingSceneView.position.y));

                //Debug.Log("SceneView Position : " );

                //Debug.Log("Window Pos : " + window.position);
                //Debug.Log("Window Max Size : " + window.maxSize);
            }

            if (current.type == EventType.MouseDrag && currentCaptureStatus != CaptureStatus.Idle)
            {
                Vector2 p = HandleUtility.GUIPointToScreenPixelCoordinate(current.mousePosition);


                mouseRect.width  = Mathf.Abs(current.mousePosition.x - mouseRect.x);
                mouseRect.height = Mathf.Abs(current.mousePosition.y - mouseRect.y);

                currentCaptureStatus = CaptureStatus.CapturingOnArea;

                current.Use();
            }

            if (current.type == EventType.MouseUp && currentCaptureStatus != CaptureStatus.Idle)
            {
                current.Use();


                currentCaptureStatus = CaptureStatus.Idle;

                SimpleScreencapture.DoCaptureOnSelection(mouseRect);
                Debug.Log(mouseRect + " : Captured.");
                mouseRect = Rect.zero;
            }


            if (current.type == EventType.MouseLeaveWindow && currentCaptureStatus == CaptureStatus.CapturingOnArea)
            {
                //Debug.Log("out!");
                current.Use();

                SimpleScreencapture.DoCaptureOnSelection(mouseRect);
                Debug.Log(mouseRect + " : Captured.");

                mouseRect            = Rect.zero;
                currentCaptureStatus = CaptureStatus.Idle;
            }


            Handles.BeginGUI();

            if (currentCaptureStatus != CaptureStatus.Idle)
            {
                Handles.DrawSolidRectangleWithOutline(mouseRect, new Color(0.5f, 0.5f, 0.5f, 0.25f), Color.yellow);
            }


            GUILayout.Window(1234, new Rect(window.position.width - 170, window.position.height - 125, 160, 115), (id) =>
            {
                GUILayout.BeginVertical("Box");

                status = EditorGUILayout.BeginFoldoutHeaderGroup(status, new GUIContent("Resolution"));

                if (status)
                {
                    SimpleScreencapture.selectedResolution = EditorGUILayout.Popup(SimpleScreencapture.selectedResolution, SimpleScreencapture.GetRenderResolutions());

                    if (SimpleScreencapture.selectedResolution == 4)
                    {
                        hSliderValue = EditorGUILayout.Slider(hSliderValue, 0.05f, 8f);
                    }
                }

                EditorGUILayout.EndFoldoutHeaderGroup();

                GUILayout.EndVertical();

                GUILayout.BeginHorizontal("Box");

                GUIStyle style = new GUIStyle(GUI.skin.button);

                if (GUILayout.Button(captureIconContent, style, GUILayout.Width(32), GUILayout.Height(32)))
                {
                    currentCaptureStatus = CaptureStatus.Idle;
                    SimpleScreencapture.DoCaptureOnCurrentSceneBySize(SimpleScreencapture.selectedResolution, hSliderValue);
                }

                if (GUILayout.Button(new GUIContent(selCaptureIcon, "Capture Selected"), GUILayout.Width(32), GUILayout.Height(32)))
                {
                    currentCaptureStatus = CaptureStatus.PrepareForCaptureArea;
                    SceneView.lastActiveSceneView.ShowNotification(new GUIContent("Select an area for capturing."), 3);
                }

                if (GUILayout.Button(multiCaptureIconContent, style, GUILayout.Width(32), GUILayout.Height(32)))
                {
                    currentCaptureStatus = CaptureStatus.Idle;
                    SimpleScreencapture.DoCaptureOnActiveCameras(hSliderValue);
                }

                if (GUILayout.Button(exploreIconContent, style, GUILayout.Width(32), GUILayout.Height(32)))
                {
                    currentCaptureStatus = CaptureStatus.Idle;
                    SimpleScreencapture.DoExploreScreenshotDepot();
                }

                /*
                 * if (GUILayout.Button(settingIconContent, style, GUILayout.Width(32), GUILayout.Height(32)))
                 * {
                 *  currentCaptureStatus = CaptureStatus.Idle;
                 * }
                 */

                GUILayout.EndHorizontal();

                //GUILayout.BeginHorizontal("Box");



                //GUILayout.EndHorizontal();


                GUI.DragWindow();
            }, new GUIContent("Screen Capture"), GUILayout.Width(150));

            Handles.EndGUI();
        }