protected void DrawEmotionShape(Rect container, Color color, EmotionSpectrum spectrum) { GL.PushMatrix(); lineMaterial.SetPass(0); GL.LoadOrtho(); GL.Begin(GL.LINE_STRIP); // Transform container container.x /= Screen.width; container.width /= Screen.width; container.y /= Screen.height; container.height /= Screen.height; Vector3 scale = new Vector3(container.size.x, container.size.y, 0f); Vector3 offset = new Vector3(container.x, container.y, 0f) + scale * .5f; int subdivisions = EmotionSpectrum.SAMPLE_COUNT; float width = Mathf.Abs(outerRadius - innerRadius); for (int i = 0; i < subdivisions + 1; i++) { float t = i / (float)subdivisions; float angle = t * Mathf.PI * 2f; float r = spectrum[angle] * width + innerRadius; GL.Color(EmotionVector.GetColorForAngle(angle)); Vector3 point = new Vector3(Mathf.Cos(angle), Mathf.Sin(angle), 0f) * r; GL.Vertex(Vector3.Scale(point, scale) + offset); } GL.End(); GL.PopMatrix(); }
/// <summary> /// Smooth the emotion signal a bit so we can find smooth features /// </summary> protected void FilterSignal() { int samples = emotionalSignal.Length; float window = BeatDurationNormalized * 4f; int sampleWindow = (int)(window * samples); int halfWindow = sampleWindow / 2; smoothEmotionSignal = new EmotionSpectrum[samples]; SmoothEnergySignal = new float[samples]; for (int x = 0; x < samples; ++x) { EmotionSpectrum avg = new EmotionSpectrum(); // No time for gaussian ;) for (int dx = -halfWindow; dx <= halfWindow; ++dx) { if (x + dx >= 0 && x + dx < samples) { avg += emotionalSignal[x + dx]; } } smoothEmotionSignal[x] = avg / sampleWindow; SmoothEnergySignal[x] = smoothEmotionSignal[x].GetTotalEnergy(); } }
protected virtual InterestPoint FindInterestPoint(EmotionSpectrum globalEmotion, float normalizedTime) { int tries = 4; for (int i = 0; i < tries; ++i) { InterestPoint p = SampleInterestPoint(globalEmotion, normalizedTime); // Make sure our interest point is not buried under the floor if (p) { Vector3 lowestPossiblePoint = p.transform.position - Vector3.up * p.size * p.transform.lossyScale.y * 2f; Vector3 skyPoint = lowestPossiblePoint; skyPoint.y = grid.bounds.size.y + grid.bounds.center.y; Ray ray = new Ray(skyPoint, -Vector3.up); RaycastHit hit; if (Physics.Raycast(ray, out hit, Vector3.Distance(skyPoint, lowestPossiblePoint), 1 << LayerMask.NameToLayer("Floor"))) { //Debug.DrawLine(skyPoint, hit.point, Color.red, 60f); } else { //Debug.DrawLine(skyPoint + Vector3.right, lowestPossiblePoint + Vector3.right, Color.cyan, 60f); return(p); } } } return(null); }
protected void CompleteShot(ref ShotInformation shot) { if (!shot.valid) { return; } bool failedToFindInterestingStrategies = frames > 10 && shot.interestPoint != null && shot.sampledStrategies.Count == 0; if (failedToFindInterestingStrategies) { shot.interestPoint = null; frames = 0; } if (shot.interestPoint == null) { EmotionSpectrum emotionAtShotStart = emotionEngine.GetSmoothSpectrum(shot.startEvent.timestamp); shot.interestPoint = FindInterestPoint(emotionAtShotStart, shot.startEvent.timestamp); } else { SampleStrategies(shot.sampledStrategies, shot.interestPoint, shot.startEvent, shot.duration * ProceduralEngine.Instance.Duration); } }
// This evaluation has no post process (TODO: in the future make a cache per track?) public EmotionSpectrum EvaluateTrack(TrackData track, float normalizedTime) { EmotionSpectrum result = new EmotionSpectrum(); int lastChunkIndex = 0; for (int i = 0; i < track.chunks.Count; ++i) { TrackChunkData chunk = track.chunks[i]; if (chunk.start <= normalizedTime && chunk.end >= normalizedTime) { result += chunk.Evaluate(normalizedTime); lastChunkIndex = i; } } // First chunk is super important! if (lastChunkIndex == 0) { result *= 2f; } return(result); }
// TODO: if we need more performance, multithread this protected void CacheEmotionSignal() { int sampleCount = audioSignal.Length / downsampleRate; // We don't need very high precision this.emotionalSignal = new EmotionSpectrum[sampleCount]; this.emotionalDerivativeSignal = new EmotionSpectrum[sampleCount]; this.TotalEnergySignal = new float[sampleCount]; float dt = 1f / (float)sampleCount; for (int i = 0; i < sampleCount; ++i) { emotionalSignal[i] = Compute(i * dt); TotalEnergySignal[i] = emotionalSignal[i].GetTotalEnergy(); if (i == 0) { emotionalDerivativeSignal[i] = new EmotionSpectrum(); } else { emotionalDerivativeSignal[i] = (emotionalSignal[i] - emotionalSignal[i - 1]) / dt; } } }
protected override void OnStart() { EmotionSpectrum currentEmotion = ProceduralEngine.Instance.GetCurrentEmotion(); float expectation = currentEmotion.Dot(new EmotionSpectrum(EmotionVector.GetCoreEmotion(CoreEmotion.Anticipation))); camera.RotationDampingTime = .1f + Mathf.Lerp(.4f, 0f, Mathf.Clamp01(expectation - 2f)); camera.PositionDampingTime = .1f + Mathf.Lerp(.4f, 0f, Mathf.Clamp01(expectation - 2f)); camera.SetNoiseParameters(Mathf.Clamp(expectation * .4f, 0f, .25f), .75f); Vector3 boundsAxis = mainInterestPoint.AssociatedItemBounds.size.normalized; List <KeyValuePair <Vector3, float> > possibleDirections = new List <KeyValuePair <Vector3, float> >(); // Chance of following the frustum average possibleDirections.Add(new KeyValuePair <Vector3, float>(mainInterestAxis, .5f)); // Chance of picking a dolly direction based on the item boundaries possibleDirections.Add(new KeyValuePair <Vector3, float>(mainInterestPoint.transform.right * boundsAxis.x, boundsAxis.x)); possibleDirections.Add(new KeyValuePair <Vector3, float>(mainInterestPoint.transform.up * boundsAxis.y, boundsAxis.y)); possibleDirections.Add(new KeyValuePair <Vector3, float>(mainInterestPoint.transform.forward * boundsAxis.z, boundsAxis.z)); // Chance of doing a dolly in/out float inOutDirection = ProceduralEngine.Instance.EmotionEngine.GetStructureAtTime(ProceduralEngine.Instance.CurrentTimeNormalized) == StructureType.Decreasing ? -1f : 1f; possibleDirections.Add(new KeyValuePair <Vector3, float>(GetForward() * inOutDirection, .5f)); movementDirection = ProceduralEngine.SelectRandomWeighted(possibleDirections, x => x.Value).Key.normalized; keepAttention = ProceduralEngine.RandomRange(0f, 1f) > .5f; // TODO: associate this with the rotation smoothness/lag, and with emotions (e.g. sadness lags, expectation keeps) }
protected override void OnUpdateStrategy() { EmotionSpectrum currentEmotion = ProceduralEngine.Instance.GetCurrentEmotion(); CameraPosition = Vector3.Lerp(initialPosition, initialPosition + movementDirection * speed * shotDuration, CameraTimeNormalized); if (keepAttention) { CameraRotation = GetViewDirectionForInterestPoint(mainInterestPoint, Composition); } }
public static EmotionSpectrum operator /(EmotionSpectrum a, EmotionSpectrum b) { EmotionSpectrum r = new EmotionSpectrum(); for (int i = 0; i < SAMPLE_COUNT; ++i) { r.samples[i] = a.samples[i] / b.samples[i]; } return(r); }
public static EmotionSpectrum operator *(EmotionSpectrum a, float scalar) { EmotionSpectrum r = new EmotionSpectrum(); for (int i = 0; i < SAMPLE_COUNT; ++i) { r.samples[i] = a.samples[i] * scalar; } return(r); }
public float Dot(EmotionSpectrum s) { float r = 0f; for (int i = 0; i < SAMPLE_COUNT; ++i) { r += samples[i] * s.samples[i]; } return(r); }
/// <summary> /// This heuristic is useful for two cases: /// - Trying to find the main interest point /// - Trying to evaluate different views for the same interest point (shot heuristic) /// </summary> public float EvaluateHeuristic(EmotionSpectrum currentEmotion, float normalizedTime, bool primaryInterest = false) { // If this GO is inactive just ignore this IP // This is useful for state machines if (!gameObject.activeInHierarchy) { return(0f); } float heuristic = importance; heuristic += emotionalImpact * currentEmotion.Dot(internalSpectrum); // If this is the primary interest point, we want to make sure it is // close to other interesting places, so biasing the importance with a heatmap is useful. if (primaryInterest) { // Interesting: while being on an important place is good, we must not make unimportant elements important. // Thus we need to be careful with this multiplier heuristic += .35f * ProceduralCameraDirector.Instance.GetGrid().GetAverageImportanceForPosition(transform.position); // This is somewhat of a hack. The correct idea is to // predict if this state is going to be triggered, but that is not trivial if (stateMachine != null) { float stateEmotionResponse = stateMachine.GetGlobalAffinityInTime(normalizedTime) + stateMachine.GetTrackAffinityInTime(normalizedTime); heuristic += emotionalImpact * stateEmotionResponse; if (stateMachine.CurrentState == EmotionStateMachineState.State.Enabled || stateMachine.CurrentState == EmotionStateMachineState.State.IntroTransition) { heuristic *= 2f; } } } // Let's try to favor small or big objects based on general flow Vector3 worldScale = transform.lossyScale * size; float worldSizeNormalized = Mathf.Min(worldScale.magnitude, 200f) / 200f; float smoothEnergy = ProceduralEngine.Instance.EmotionEngine.GetSmoothEnergy(normalizedTime) / ProceduralEngine.Instance.EmotionEngine.MaxEnergy; // We assume smoothEnergy to be the normalized size we favor float sizeTargetOffset = Mathf.Abs(smoothEnergy - worldSizeNormalized) * 2f; heuristic = Mathf.Lerp(heuristic * 2f, heuristic, sizeTargetOffset); // TODO: ideas: // - Is it being lit right now? Or in shadow? // - If it is reflective/specular, where would be a good place to look at it from? // - Is it moving? return(heuristic); }
public void OnGUI() { if (Event.current.type != EventType.Repaint) { return; } if (engine != null) { EmotionSpectrum spectrum = engine.GetSpectrum(timeline.CurrentIndicatorNormalized); currentSpectrum = EmotionSpectrum.Lerp(currentSpectrum, spectrum, .3f); rect.GetWorldCorners(shapeCorners); DrawEmotionShape(new Rect(shapeCorners[0], shapeCorners[2] - shapeCorners[0]), Color.magenta, currentSpectrum); } }
public InterestPoint SampleInterestPoint(EmotionSpectrum currentEmotion, float normalizedTime) { float sum = interestPoints.Sum(x => x.EvaluateHeuristic(currentEmotion, normalizedTime, true)); float value = (float)ProceduralEngine.Instance.RNG.NextDouble() * sum; foreach (InterestPoint ip in interestPoints) { value -= ip.EvaluateHeuristic(currentEmotion, normalizedTime, true); if (value <= 0f) { return(ip); } } return(interestPoints.Last()); }
public void Awake() { if (DirectorNotAvailable) { return; } internalSpectrum = new EmotionSpectrum(EmotionVector.GetCoreEmotion(primaryAffinity)); if (ProceduralCameraDirector.IsAvailable()) { ProceduralCameraDirector.Instance.RegisterInterestPoint(this); } else { DirectorNotAvailable = true; } }
// Eh, this is a very simplistic way of approaching the problem public static CoreEmotion FindMainEmotion(EmotionSpectrum e) { CoreEmotion maxEmotion = CoreEmotion.Anger; float maxEmotionValue = 0f; // TODO: Add some fuzziness by picking the best 3 emotions found, or something foreach (CoreEmotion core in System.Enum.GetValues(typeof(CoreEmotion))) { float dot = new EmotionSpectrum(EmotionVector.GetCoreEmotion(core)).Dot(e); if (dot > maxEmotionValue) { maxEmotion = core; maxEmotionValue = dot; } } return(maxEmotion); }
public EmotionSpectrum Compute(float normalizedTime) { List <TrackChunkData> chunks = GetChunksAtTime(normalizedTime); EmotionSpectrum result = new EmotionSpectrum(); foreach (TrackChunkData chunk in chunks) { if (chunk.start <= normalizedTime && chunk.end >= normalizedTime) { result += chunk.Evaluate(normalizedTime); } } foreach (EmotionFilter f in filters) { result = f.Filter(normalizedTime, result); } return(result); }
public static EmotionSpectrum Lerp(EmotionSpectrum a, EmotionSpectrum b, float t) { return(a * (1f - t) + b * t); }
/// <summary> /// This method doesn't say the specific cut, but it constraints /// the time for searching interesting events. It is mostly /// dependent on current emotion. /// </summary> public CutRange EvaluateCutRangeForEvent(EmotionEvent e) { CutRange range = new CutRange(); EmotionSpectrum emotionAtEventTime = emotionEngine.GetSpectrum(e.timestamp); CoreEmotion coreEmotion = EmotionEngine.FindMainEmotion(emotionAtEventTime); // In seconds switch (coreEmotion) { case CoreEmotion.Joy: range.minCutTime = ProceduralEngine.RandomRange(1f, 2f); range.maxCutTime = ProceduralEngine.RandomRange(7f, 8f); break; case CoreEmotion.Trust: range.minCutTime = ProceduralEngine.RandomRange(2f, 5f); range.maxCutTime = ProceduralEngine.RandomRange(7f, 10f); break; case CoreEmotion.Fear: range.minCutTime = ProceduralEngine.RandomRange(1f, 2f); range.maxCutTime = ProceduralEngine.RandomRange(4f, 6f); break; case CoreEmotion.Surprise: range.minCutTime = ProceduralEngine.RandomRange(1.5f, 2f); range.maxCutTime = ProceduralEngine.RandomRange(2f, 4f); break; case CoreEmotion.Sadness: range.minCutTime = ProceduralEngine.RandomRange(1f, 1.5f); range.maxCutTime = ProceduralEngine.RandomRange(2f, 4f); break; case CoreEmotion.Disgust: range.minCutTime = ProceduralEngine.RandomRange(1f, 2f); range.maxCutTime = ProceduralEngine.RandomRange(3f, 4f); break; case CoreEmotion.Anger: range.minCutTime = ProceduralEngine.RandomRange(.3f, 1f); range.maxCutTime = ProceduralEngine.RandomRange(1f, 3f); break; case CoreEmotion.Anticipation: range.minCutTime = ProceduralEngine.RandomRange(2f, 4f); range.maxCutTime = ProceduralEngine.RandomRange(4f, 5f); break; } switch (e.type) { case EmotionEvent.EmotionEventType.Start: // Longer cuts when showing for first time range.minCutTime *= e.chunkDelimitsSegment ? 1f : .75f; range.maxCutTime *= e.chunkDelimitsSegment ? 1f : .75f; break; case EmotionEvent.EmotionEventType.End: // Longer cuts when something disappears for good range.minCutTime *= e.chunkDelimitsSegment ? 1.5f : 1f; range.maxCutTime *= e.chunkDelimitsSegment ? 1.5f : 1f; break; case EmotionEvent.EmotionEventType.LocalMaximum: range.minCutTime *= 1f; range.maxCutTime *= 1f; break; case EmotionEvent.EmotionEventType.LocalMinimum: range.minCutTime *= 2f; range.maxCutTime *= 2f; break; } TrackChunkData structureData = emotionEngine.GetCurrentStructureData(e.timestamp); if (structureData != null) { // More intense -> shorter float normalizedStructuralIntensity = Mathf.Pow(structureData.GetIntensity(e.timestamp), 2f); range.minCutTime *= 1.35f - normalizedStructuralIntensity * .5f; range.maxCutTime *= 1.35f - normalizedStructuralIntensity * .5f; // TODO: decide if we need further modifications of cut time based on type. // Intensity curve should cover most I think StructureType currentStructure = emotionEngine.GetStructureAtTime(e.timestamp); switch (currentStructure) { case StructureType.None: break; case StructureType.Sustain: break; case StructureType.Increasing: break; case StructureType.Decreasing: break; } } range.minCutTime = Mathf.Max(0.01f, range.minCutTime); range.maxCutTime = Mathf.Max(0.02f, range.maxCutTime); float tmp = range.minCutTime; range.minCutTime = Mathf.Min(range.minCutTime, range.maxCutTime); range.maxCutTime = Mathf.Max(tmp, range.maxCutTime); // Normalize times range.minCutTime /= ProceduralEngine.Instance.Duration; range.maxCutTime /= ProceduralEngine.Instance.Duration; return(range); }
public void Awake() { this.internalSpectrum = new EmotionSpectrum(EmotionVector.GetCoreEmotion(emotionAffinity)); this.interestPoints = new List <InterestPoint>(GetComponentsInChildren <InterestPoint>()); OnAwake(); }
public float GetGlobalAffinityInTime(float nT) { EmotionSpectrum globalEmotion = smooth ? ProceduralEngine.Instance.EmotionEngine.GetSmoothSpectrum(nT) : ProceduralEngine.Instance.EmotionEngine.GetSpectrum(nT); return(globalEmotion.Dot(internalSpectrum)); }