示例#1
0
    void StartVideoCaptureTest()
    {
        DebugManager.Instance.PrintToInfoLog("Cam Init");
        Resolution cameraResolution = VideoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();

        DebugManager.Instance.PrintToInfoLog(cameraResolution.ToString());

        float cameraFramerate = VideoCapture.GetSupportedFrameRatesForResolution(cameraResolution).OrderByDescending((fps) => fps).First();

        DebugManager.Instance.PrintToInfoLog("Res: " + cameraResolution + ", fps:" + cameraFramerate);

        VideoCapture.CreateAsync(false, delegate(VideoCapture videoCapture)
        {
            if (videoCapture != null)
            {
                m_VideoCapture = videoCapture;
                DebugManager.Instance.PrintToInfoLog("Created VideoCapture Instance!");

                CameraParameters cameraParameters       = new CameraParameters();
                cameraParameters.hologramOpacity        = 0.0f;
                cameraParameters.frameRate              = cameraFramerate;
                cameraParameters.cameraResolutionWidth  = cameraResolution.width;
                cameraParameters.cameraResolutionHeight = cameraResolution.height;
                cameraParameters.pixelFormat            = CapturePixelFormat.BGRA32;

                m_VideoCapture.StartVideoModeAsync(cameraParameters,
                                                   VideoCapture.AudioState.ApplicationAndMicAudio,
                                                   OnStartedVideoCaptureMode);
            }
            else
            {
                DebugManager.Instance.PrintToInfoLog("Failed to create VideoCapture Instance!");
            }
        });
    }
示例#2
0
    public void OnVideoCapture()
    {
        VideoCapture_Icon.SetActive(false);
        VideoCapture_Icon2.SetActive(true);

        Resolution cameraResolution = VideoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();

        //print(cameraResolution);

        VideoCapture.CreateAsync(false, delegate(VideoCapture videoCapture) {
            //print("hi1");
            if (videoCapture != null)
            {
                //print("hi2");
                m_VideoCapture        = videoCapture;
                float cameraFramerate = VideoCapture.GetSupportedFrameRatesForResolution(cameraResolution).OrderByDescending((fps) => fps).First();

                CameraParameters cameraParameters       = new CameraParameters();
                cameraParameters.hologramOpacity        = 0.0f;
                cameraParameters.frameRate              = cameraFramerate;
                cameraParameters.cameraResolutionWidth  = cameraResolution.width;
                cameraParameters.cameraResolutionHeight = cameraResolution.height;
                cameraParameters.pixelFormat            = CapturePixelFormat.BGRA32;

                m_VideoCapture.StartVideoModeAsync(cameraParameters,
                                                   VideoCapture.AudioState.MicAudio,
                                                   OnStartedVideoCaptureMode);
            }
        });
    }
    private void Awake()
    {
        if (instance != null)
        {
            Debug.LogError("Cannot create two instances of CamStreamManager.");
            return;
        }

        instance = this;
        VideoCapture.CreateAsync(OnVideoCaptureInstanceCreated, new SourceKind[] { SourceKind.COLOR });
    }
示例#4
0
    void Start()
    {
        Application.targetFrameRate = 60;

        m_RuntimeModel = ModelLoader.Load(inputModel, false);
        m_Worker       = WorkerFactory.CreateWorker(WorkerFactory.Type.ComputePrecompiled, m_RuntimeModel, false);

#if (WEBCAM)
#if UNITY_WSA
        Resolution cameraResolution = VideoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();
        Debug.Log(cameraResolution);

        float cameraFramerate = VideoCapture.GetSupportedFrameRatesForResolution(cameraResolution).OrderByDescending((fps) => fps).First();
        Debug.Log(cameraFramerate);

        VideoCapture.CreateAsync(false, delegate(VideoCapture videoCapture)
        {
            if (videoCapture != null)
            {
                m_VideoCapture = videoCapture;
                //Debug.Log("Created VideoCapture Instance!");

                CameraParameters cameraParameters       = new CameraParameters();
                cameraParameters.hologramOpacity        = 0.0f;
                cameraParameters.frameRate              = cameraFramerate;
                cameraParameters.cameraResolutionWidth  = cameraResolution.width;
                cameraParameters.cameraResolutionHeight = cameraResolution.height;
                cameraParameters.pixelFormat            = CapturePixelFormat.BGRA32;

                m_VideoCapture.StartVideoModeAsync(cameraParameters,
                                                   VideoCapture.AudioState.ApplicationAndMicAudio,
                                                   OnStartedVideoCaptureMode);
            }
            else
            {
                Debug.LogError("Failed to create VideoCapture Instance!");
            }
        });
#else
        m_WebcamTexture = new WebCamTexture();
        m_WebcamTexture.Play();
#endif
#else
        var targetRT = RenderTexture.GetTemporary(inputResolutionX, inputResolutionY, 0);
        Graphics.Blit(inputImage, targetRT, postprocessMaterial);
        m_Input = new Tensor(targetRT, 3);

        //m_Input = new Tensor(1, inputResolutionY, inputResolutionX, 3);
#endif
    }
示例#5
0
    public void TakeVideo()
    {
        //VideoImage.gameObject.SetActive(false);
        //ShowImage.gameObject.SetActive(false);

        if (!isRecording)
        {
            isRecording = true;
            print("开始录像...");
            if (Application.platform == RuntimePlatform.WSAPlayerX86)
            {
                VideoCapture.CreateAsync(false, StartVideoCapture);
            }
        }
    }
示例#6
0
        void StartVideoCaptureTest()
        {
#if UNITY_METRO && !UNITY_EDITOR
            VideoCapture.CreateAsync(true, delegate(VideoCapture videoCapture)
            {
                if (videoCapture != null)
                {
                    m_VideoCapture = videoCapture;
                    Debug.Log("Created VideoCapture Instance!");

                    m_VideoCapture.StartVideoModeAsync(cameraParameters,
                                                       VideoCapture.AudioState.ApplicationAndMicAudio,
                                                       OnStartedVideoCaptureMode);
                }
                else
                {
                    Debug.LogError("Failed to create VideoCapture Instance!");
                }
            });
#endif
        }
示例#7
0
 private void StartVideoCapture()
 {
     VideoCapture.CreateAsync(false, OnVideoCaptureCreated);
 }
示例#8
0
 public void TakeVideo()
 {
     VideoCapture.CreateAsync(false, OnVideoCaptureCreated);
 }
示例#9
0
 // Use this for initialization
 void Start()
 {
     VideoCapture.CreateAsync(true, onVideoCaptureCreated);
     recordingStarted = true;
     Debug.Log("--- Trying to start recording!");
 }
示例#10
0
 public void TakeVideo(AssetManager am)
 {
     assetManager = am;
     VideoCapture.CreateAsync(false, OnVideoCaptureCreated);
 }
示例#11
0
 public void StartRecordingVideo()
 {
     VideoCapture.CreateAsync(true, OnVideoCaptureCreated);
 }
示例#12
0
 //from https://docs.microsoft.com/en-us/windows/mixed-reality/locatable-camera-in-unity
 //se supone que empieza a grabar en algun momento, cuando haya videocapture
 void Start()
 {
     Debug.Log("Pre Create Async");
     VideoCapture.CreateAsync(false, OnVideoCaptureCreated);
     Debug.Log("Post create async");
 }