Пример #1
0
    protected void RunSimulationInternal(DataContainer data)
    {
        //Application.targetFrameRate = 30;
        this.EmotionEngine = new EmotionEngine();
        this.Seed          = 14041956 + System.DateTime.Now.Millisecond;
        this.RNG           = new System.Random(Seed);

        if (data != null)
        {
            this.CurrentTime      = 0f;
            this.Running          = true;
            this.musicSource.clip = musicTrack;
            this.musicSource.Play();

            EmotionEngine.Initialize(musicTrack.length, audioSignal, data, 1024);
            EmotionEngine.Precompute();

            this.EventDispatcher = gameObject.AddComponent <ProceduralEventDispatcher>();
            this.EventDispatcher.Initialize();

            debugPanel.ShowPanel();

            // Director must be alive before the scene is built
            ProceduralCameraDirector.Instance.InitializeDirector(EmotionEngine);

            ItemFactory.Instance.BuildItem(Quaternion.identity, Vector3.forward, Vector3.forward, sceneRootId);

            // ... and update the spatial grid after that
            ProceduralCameraDirector.Instance.InitializeGrid();
            ProceduralCameraDirector.Instance.StartFirstShot();
        }
    }
Пример #2
0
 public void Initialize(UITimeline timeline, float[] samples, DataContainer container)
 {
     this.timeline = timeline;
     engine        = new EmotionEngine();
     engine.Initialize(timeline.Duration, samples, container, 1024);
     engine.Precompute();
 }
Пример #3
0
    public EmotionSpectrum GetCurrentEmotion()
    {
        if (Running)
        {
            return(EmotionEngine.GetSpectrum(CurrentTimeNormalized));
        }

        return(new EmotionSpectrum());
    }
    public void InitializeDirector(EmotionEngine engine)
    {
        this.emotionEngine = engine;
        this.nextShot      = new ShotInformation();
        this.grid          = GetComponent <InterestPointGrid>();

        ProceduralEngine.Instance.EventDispatcher.AddListener(this);

        this.currentShot       = new ShotInformation();
        this.currentShot.valid = false;
    }
Пример #5
0
        private async void BtnAnalizaFoto_Click(object sender, System.EventArgs e)
        {
            if (Plugin.Connectivity.CrossConnectivity.Current.IsConnected)
            {
                try
                {
                    if (streamCopy != null)
                    {
                        btnAnalizaFoto.Visibility = Android.Views.ViewStates.Invisible;
                        Toast.MakeText(this, "Detectando emociones... ", ToastLength.Short).Show();

                        using (EmotionEngine emoEngine = new EmotionEngine(ApiKey))
                        {
                            streamCopy.Seek(0, SeekOrigin.Begin);
                            FaceEmotion[] emotionFaces = await emoEngine.CalculateEmotion(this.GetImageAsByteArray(imagePath));

                            if (emotionFaces.Length > 0)
                            {
                                emocionProminente = emoEngine.DetectEmocion(emotionFaces[0].scores);

                                emocionProminente = emocionProminente.Replace("Un rostro parece estar ", "");

                                emocionProminente = emocionProminente.Replace("Un rostro parece no mostrar emociones (Neutral)", "NEUTRAL");

                                txtResultado.Text += "Emoción prominente: " + emocionProminente;
                                btnRegistrarResultados.Visibility = Android.Views.ViewStates.Visible;
                            }
                            else
                            {
                                Toast.MakeText(this, "No se detectaron caras.", ToastLength.Short).Show();
                            }
                        }
                    }
                    else
                    {
                        txtResultado.Text = "--no se ha seleccionado una imagen---";
                    }
                }
                catch (Exception ex)
                {
                    Toast.MakeText(this, "Error: " + ex.Message, ToastLength.Short).Show();
                }
            }
            else
            {
                Toast.MakeText(this, "No hay una conexión disponible.", ToastLength.Long).Show();
            }
        }
Пример #6
0
        private async void BrowseButton_Click(object sender, RoutedEventArgs e)
        {
            var openDlg = new Microsoft.Win32.OpenFileDialog();

            openDlg.Filter = "JPEG Image(*.jpg)|*.jpg";
            bool?result = openDlg.ShowDialog(this);

            if (!(bool)result)
            {
                return;
            }

            string filePath = openDlg.FileName;

            Uri         fileUri      = new Uri(filePath);
            BitmapImage bitmapSource = new BitmapImage();

            bitmapSource.BeginInit();
            bitmapSource.CacheOption = BitmapCacheOption.None;
            bitmapSource.UriSource   = fileUri;
            bitmapSource.EndInit();

            FacePhoto.Source = bitmapSource;

            Title = "Detectando emociones...";

            using (EmotionEngine emotionEngine = new EmotionEngine(this.ClaveSuscripcion))
            {
                FaceEmotion[] emotionFaces = await emotionEngine.CalculateEmotion(this.GetImageAsByteArray(filePath));

                if (emotionFaces.Length > 0)
                {
                    DrawingVisual  visual         = new DrawingVisual();
                    DrawingContext drawingContext = visual.RenderOpen();
                    drawingContext.DrawImage(bitmapSource,
                                             new Rect(0, 0, bitmapSource.Width, bitmapSource.Height));
                    double dpi          = bitmapSource.DpiX;
                    double resizeFactor = 96 / dpi;

                    // pintando caras
                    foreach (FaceEmotion face in emotionFaces)
                    {
                        drawingContext.DrawRectangle(
                            Brushes.Transparent,
                            new Pen(Brushes.Red, 2),
                            new Rect(
                                face.faceRectangle.left * resizeFactor,
                                face.faceRectangle.top * resizeFactor,
                                face.faceRectangle.width * resizeFactor,
                                face.faceRectangle.height * resizeFactor
                                )
                            );

                        // reconociendo emociones
                        MessageBox.Show(emotionEngine.DetectEmocion(face.scores), "Emotion API", MessageBoxButton.OK, MessageBoxImage.Information);
                        Title = $"{emotionFaces.Length} rostro(s) detectados.";
                    }

                    drawingContext.Close();
                    RenderTargetBitmap faceWithRectBitmap = new RenderTargetBitmap(
                        (int)(bitmapSource.PixelWidth * resizeFactor),
                        (int)(bitmapSource.PixelHeight * resizeFactor),
                        96,
                        96,
                        PixelFormats.Pbgra32);

                    faceWithRectBitmap.Render(visual);
                    FacePhoto.Source = faceWithRectBitmap;
                }
                else
                {
                    MessageBox.Show("No existen rostros detectados.", "Emotion API", MessageBoxButton.OK, MessageBoxImage.Information);
                    Title = "Emotion API Demo";
                }
            }
        }
    /// <summary>
    /// This method doesn't say the specific cut, but it constraints
    /// the time for searching interesting events. It is mostly
    /// dependent on current emotion.
    /// </summary>
    public CutRange EvaluateCutRangeForEvent(EmotionEvent e)
    {
        CutRange        range = new CutRange();
        EmotionSpectrum emotionAtEventTime = emotionEngine.GetSpectrum(e.timestamp);
        CoreEmotion     coreEmotion        = EmotionEngine.FindMainEmotion(emotionAtEventTime);

        // In seconds
        switch (coreEmotion)
        {
        case CoreEmotion.Joy:
            range.minCutTime = ProceduralEngine.RandomRange(1f, 2f);
            range.maxCutTime = ProceduralEngine.RandomRange(7f, 8f);
            break;

        case CoreEmotion.Trust:
            range.minCutTime = ProceduralEngine.RandomRange(2f, 5f);
            range.maxCutTime = ProceduralEngine.RandomRange(7f, 10f);
            break;

        case CoreEmotion.Fear:
            range.minCutTime = ProceduralEngine.RandomRange(1f, 2f);
            range.maxCutTime = ProceduralEngine.RandomRange(4f, 6f);
            break;

        case CoreEmotion.Surprise:
            range.minCutTime = ProceduralEngine.RandomRange(1.5f, 2f);
            range.maxCutTime = ProceduralEngine.RandomRange(2f, 4f);
            break;

        case CoreEmotion.Sadness:
            range.minCutTime = ProceduralEngine.RandomRange(1f, 1.5f);
            range.maxCutTime = ProceduralEngine.RandomRange(2f, 4f);
            break;

        case CoreEmotion.Disgust:
            range.minCutTime = ProceduralEngine.RandomRange(1f, 2f);
            range.maxCutTime = ProceduralEngine.RandomRange(3f, 4f);
            break;

        case CoreEmotion.Anger:
            range.minCutTime = ProceduralEngine.RandomRange(.3f, 1f);
            range.maxCutTime = ProceduralEngine.RandomRange(1f, 3f);
            break;

        case CoreEmotion.Anticipation:
            range.minCutTime = ProceduralEngine.RandomRange(2f, 4f);
            range.maxCutTime = ProceduralEngine.RandomRange(4f, 5f);
            break;
        }

        switch (e.type)
        {
        case EmotionEvent.EmotionEventType.Start:
            // Longer cuts when showing for first time
            range.minCutTime *= e.chunkDelimitsSegment ? 1f : .75f;
            range.maxCutTime *= e.chunkDelimitsSegment ? 1f : .75f;
            break;

        case EmotionEvent.EmotionEventType.End:
            // Longer cuts when something disappears for good
            range.minCutTime *= e.chunkDelimitsSegment ? 1.5f : 1f;
            range.maxCutTime *= e.chunkDelimitsSegment ? 1.5f : 1f;
            break;

        case EmotionEvent.EmotionEventType.LocalMaximum:
            range.minCutTime *= 1f;
            range.maxCutTime *= 1f;
            break;

        case EmotionEvent.EmotionEventType.LocalMinimum:
            range.minCutTime *= 2f;
            range.maxCutTime *= 2f;
            break;
        }

        TrackChunkData structureData = emotionEngine.GetCurrentStructureData(e.timestamp);

        if (structureData != null)
        {
            // More intense -> shorter
            float normalizedStructuralIntensity = Mathf.Pow(structureData.GetIntensity(e.timestamp), 2f);
            range.minCutTime *= 1.35f - normalizedStructuralIntensity * .5f;
            range.maxCutTime *= 1.35f - normalizedStructuralIntensity * .5f;

            // TODO: decide if we need further modifications of cut time based on type.
            // Intensity curve should cover most I think
            StructureType currentStructure = emotionEngine.GetStructureAtTime(e.timestamp);

            switch (currentStructure)
            {
            case StructureType.None:
                break;

            case StructureType.Sustain:
                break;

            case StructureType.Increasing:
                break;

            case StructureType.Decreasing:
                break;
            }
        }

        range.minCutTime = Mathf.Max(0.01f, range.minCutTime);
        range.maxCutTime = Mathf.Max(0.02f, range.maxCutTime);

        float tmp = range.minCutTime;

        range.minCutTime = Mathf.Min(range.minCutTime, range.maxCutTime);
        range.maxCutTime = Mathf.Max(tmp, range.maxCutTime);

        // Normalize times
        range.minCutTime /= ProceduralEngine.Instance.Duration;
        range.maxCutTime /= ProceduralEngine.Instance.Duration;
        return(range);
    }
Пример #8
0
        private async void TakePhoto()
        {
            try
            {
                await CrossMedia.Current.Initialize();

                MediaFile file;

                // update  1
                // tomar la foto desde la camara o desde la biblioteca de imagenes
                string imageOption = await App.Current.MainPage.DisplayActionSheet("Emotion API Demo",
                                                                                   "Cancelar", null, "Tomar foto de cámara", "Tomar foto de librería");

                if (imageOption == "Tomar foto de cámara")
                {
                    model.EmotionResult = "Tomando foto...";

                    if (!CrossMedia.Current.IsCameraAvailable || !CrossMedia.Current.IsTakePhotoSupported)
                    {
                        await App.Current.MainPage.DisplayAlert("Emotion API Demo", "Error: Cámara no disponible.", "Ok");

                        model.EmotionResult = "";
                        return;
                    }

                    file = await CrossMedia.Current.TakePhotoAsync(new StoreCameraMediaOptions
                    {
                        DefaultCamera = CameraDevice.Front,
                        Directory     = "Sample",
                        Name          = "test.jpg"
                    });

                    if (file == null)
                    {
                        return;
                    }
                }
                else if (imageOption == "Tomar foto de librería")
                {
                    model.EmotionResult = "Seleccionando foto...";

                    file = await CrossMedia.Current.PickPhotoAsync();

                    if (file == null)
                    {
                        return;
                    }
                }
                else
                {
                    return;
                }

                model.PhotoSource = ImageSource.FromStream(() =>
                {
                    var stream = file.GetStream();
                    return(stream);
                });

                model.EmotionResult = "Detectando emoción(es)...";

                byte[] imageBytes;
                using (MemoryStream ms = new MemoryStream())
                {
                    file.GetStream().CopyTo(ms);
                    imageBytes = ms.ToArray();
                }

                EmotionEngine emotionEngine = new EmotionEngine(EmotionApiSubsctiptionKey);

                FaceEmotion[] emotionFaces = await emotionEngine.CalculateEmotion(imageBytes);

                if (emotionFaces.Length > 0)
                {
                    StringBuilder builder = new StringBuilder();

                    builder.Append($"Número de rostros detectados: {emotionFaces.Length}\n");

                    foreach (var face in emotionFaces)
                    {
                        builder.Append($"{emotionEngine.DetectEmocion(face.scores)}\n");
                    }

                    model.EmotionResult = builder.ToString();
                }
                else
                {
                    await App.Current.MainPage.DisplayAlert("Emotion API Demo", "No existen rostros detectados.", "OK");

                    model.EmotionResult = "";
                }
            }
            catch (Exception ex)
            {
                await App.Current.MainPage.DisplayAlert("Emotion API Demo", $"Error: {ex.Message}.", "Ok");

                model.EmotionResult = "";
            }
        }