Esempio n. 1
0
    private async void Start()
    {
        try
        {
            // Get components
            _user = GetComponent <UserInput>();

            // Load model
            StatusBlock.text = $"Starting";
#if ENABLE_WINMD_SUPPORT
            // Configure camera to return frames fitting the model input size
            _mediaCapturer = new MediaCapturer();
            await _mediaCapturer.StartCapturing();

            StatusBlock.text = $"Camera started. Running!";

            // Run processing loop in separate parallel Task
            _isRunning = true;
            await Task.Run(async() =>
            {
                var client = new CSClient();
                while (_isRunning)
                {
                    using (var videoFrame = _mediaCapturer.GetLatestFrame())
                    {
                        var bitmap = videoFrame?.SoftwareBitmap;
                        if (bitmap != null)
                        {
                            var data   = await EncodedBytes(bitmap, Windows.Graphics.Imaging.BitmapEncoder.JpegEncoderId);
                            var result = await client.FaceAnalysesAsync(data);

                            StringBuilder builder = new StringBuilder();
                            if (result != null)
                            {
                                foreach (var item in result)
                                {
                                    builder.AppendLine($"Face ID: {item.faceId}");
                                    builder.AppendLine("Emotions");
                                    builder.AppendLine($"Anger: {item.faceAttributes.emotion.anger}");
                                    builder.AppendLine($"Happy: {item.faceAttributes.emotion.happiness}");
                                }
                            }
                            UnityEngine.WSA.Application.InvokeOnAppThread(() =>
                            {
                                StatusBlock.text = builder.ToString();
                            }, true);
                        }
                    }
                }
            });
#endif
        }
        catch (Exception ex)
        {
            StatusBlock.text = $"Error init: {ex.Message}";
            Debug.LogError(ex);
        }
    }
Esempio n. 2
0
    async void Start()
    {
        try
        {
            // Get components
            _user = GetComponent <UserInput>();

            // Load model
            StatusBlock.text = $"Loading ONNX ...";
#if ENABLE_WINMD_SUPPORT
            _file = await Package.Current.InstalledLocation.GetFileAsync("model.onnx");

            _dnnModel = new ObjectDetection(new[] { "Arc Mouse", "Surface Book", "Surface Pro" });
            await _dnnModel.Init(_file);

            StatusBlock.text = $"Loaded model. Starting camera...";


            // Configure camera to return frames fitting the model input size
            _mediaCapturer = new MediaCapturer();
            await _mediaCapturer.StartCapturing(416, 416);

            StatusBlock.text = $"Camera started. Running!";

            // Run processing loop in separate parallel Task
            _isRunning = true;
            await Task.Run(async() =>
            {
                while (_isRunning)
                {
                    using (var videoFrame = _mediaCapturer.GetLatestFrame())
                    {
                        if (videoFrame != null)
                        {
                            await EvaluateFrame(videoFrame);
                        }
                    }
                }
            });
#endif
        }
        catch (Exception ex)
        {
#if ENABLE_WINMD_SUPPORT
            string filename = _file != null ? _file.Name : "nofile";
            StatusBlock.text = $"File: {filename}, Error init: {ex.Message}";
#endif
            Debug.LogError(ex);
        }
    }
    async void Start()
    {
        try
        {
            // Get components
            _tts          = GetComponent <TextToSpeech>();
            _user         = GetComponent <UserInput>();
            _user.Tapped += SayLastSeenObject;

            // Load model
            StatusBlock.text = $"Loading {SqueezeNetModel.ModelFileName} ...";
            _dnnModel        = new SqueezeNetModel();
            await _dnnModel.LoadModelAsync(ShouldUseGpu);

            StatusBlock.text = $"Loaded model. Starting camera...";

#if ENABLE_WINMD_SUPPORT
            // Configure camera to return frames fitting the model input size
            _mediaCapturer = new MediaCapturer();
            await _mediaCapturer.StartCapturing(
                _dnnModel.InputWidth,
                _dnnModel.InputHeight);

            StatusBlock.text = $"Camera started. Running!";

            // Run processing loop in separate parallel Task
            _isRunning = true;
            await Task.Run(async() =>
            {
                while (_isRunning)
                {
                    using (var videoFrame = _mediaCapturer.GetLatestFrame())
                    {
                        await EvaluateFrame(videoFrame);
                    }
                }
            });
#endif
        }
        catch (Exception ex)
        {
            StatusBlock.text = $"Error init: {ex.Message}";
            Debug.LogError(ex);
        }
    }
Esempio n. 4
0
    async void Start()
    {
        try
        {
            // Get components
            _tts  = GetComponent <TextToSpeech>();
            _user = GetComponent <UserInput>();

            // Load model
            StatusBlock.text = $"Loading {SqueezeNetModel.ModelFileName} ...";
            _dnnModel        = new SqueezeNetModel();
            await _dnnModel.LoadModelAsync(ShouldUseGpu);

            StatusBlock.text = $"Loaded model. Starting camera...";

#if ENABLE_WINMD_SUPPORT
            // Configure camera to return frames fitting the model input size
            try
            {
                _mediaCapturer = new MediaCapturer();
                await _mediaCapturer.StartCapturing(
                    _dnnModel.InputWidth,
                    _dnnModel.InputHeight);

                StatusBlock.text = $"Camera started. Running!";
            }
            catch (Exception ex)
            {
                StatusBlock.text = $"Failed to start camera: {ex.Message}. Using loaded/picked image.";
            }
            // Load fallback frame if there's no camera like when testing with the emulator
            if (!_mediaCapturer.IsCapturing)
            {
                var loadedFrame = await _mediaCapturer.GetTestFrame();
            }

            // Run processing loop in separate parallel Task
            _isRunning = true;
            await Task.Run(async() =>
            {
                while (_isRunning)
                {
                    if (_mediaCapturer.IsCapturing)
                    {
                        using (var videoFrame = _mediaCapturer.GetLatestFrame())
                        {
                            await EvaluateFrame(videoFrame);
                        }
                    }
                    // Use fallback if there's no camera like when testing with the emulator
                    else
                    {
                        var loadedFrame = await _mediaCapturer.GetTestFrame();
                        await EvaluateFrame(loadedFrame);
                    }
                }
            });
#endif
        }
        catch (Exception ex)
        {
            StatusBlock.text = $"Error init: {ex.Message}";
            Debug.LogError(ex);
        }
    }