IEnumerator SetImportMovieCoroutine(string folder, string filename) { if (Movie == null) { Debug.LogError("You must attach a FaceVideoAVProWMV component to your camera."); yield break; } bool loadMovieSuccess = Movie.LoadMovie(folder, filename, false); if (!loadMovieSuccess) { Debug.LogError("LoadMovie failed. Make sure the codec you're using is supported."); yield break; } while (Movie.OutputTexture == null) { yield return(0); } Movie.PositionFrames = 0; int width = Movie.OutputTexture.width; int height = Movie.OutputTexture.height; frameTexture = new Texture2D(width, height, TextureFormat.ARGB32, false); Logger.Log("FacePlus initializing..."); FacePlus.InitBufferTracker(width, height, Movie.FrameRate); frameBuffer = new byte[3 * width * height]; StartRecordingFromVideo(); }
public void SetImportMovie(string folder, string filename) { if (Live) { StopLiveTracking(); } if (!FacePlus.IsInitStarted) { Logger.Info("FacePlus initializing (video)..."); FacePlus.Init("VGA"); float startTime = Time.time; StartCoroutine(FacePlus.AfterInit((success) => { float timePassed = Time.time - startTime; Logger.Log("FacePlus initialized (success: " + success + ") in " + timePassed + "s"); Logger.Log("Setting import movie from " + folder + "/" + filename); StartCoroutine(SetImportMovieCoroutine(folder, filename)); })); } else { Logger.Log("Setting import movie from " + folder + "/" + filename); StartCoroutine(SetImportMovieCoroutine(folder, filename)); } }
public void StopLiveTracking() { Logger.Info("Stopping live tracking..."); FacePlus.StopTracking(); Live = false; FacePlus.Teardown(); }
public void StartLiveTracking() { //Debug.Log ("Start Live Tracking"); Logger.Log("FacePlus connectivity: " + (FacePlus.Echo(123) == 123 ? "Pass" : "FAIL")); Logger.Log("FacePlus initializing..."); //Logger.Log ("Initializing with: " + "VGA@CAM" + FacePlus.DeviceID.ToString ()); FacePlus.Init("VGA@CAM" + FacePlus.DeviceID.ToString()); Live = true; float startTime = Time.time; StartCoroutine(FacePlus.AfterInit((bool success) => { float timePassed = Time.time - startTime; Logger.Info("FacePlus completed initialization."); Logger.Log("FacePlus initialized (success: " + success + ") in " + timePassed + "s"); if (success) { Live = true; state = CaptureState.Live; Logger.Debug("starting tracking thread"); FacePlus.TrackForeverThreaded(); Logger.Debug("done starting tracking thread"); } else { Live = false; } })); }
public static void Login(string user, string password, Action <string> onSuccess, Action <string, string> onFailure, string client) { if (IsLoggingIn) { return; } IsLoggingIn = true; Thread loginThread = new Thread(() => { int result; result = FacePlus.Login(user, password, client); string login_message = FacePlus.LoginString(); if (!Enum.IsDefined(typeof(LoginCode), result)) { onFailure("Login Failed (Server Code: " + result + ").", login_message); } else { switch ((LoginCode)result) { case LoginCode.Success: User = new UserRecord() { Email = user }; onSuccess(login_message); break; case LoginCode.Forbidden: onFailure("Incorrect username or password.", login_message); break; case LoginCode.InternalError: onFailure("Login Failed, Internal Error (" + result + ").", login_message); break; case LoginCode.ServerError: onFailure("Server Error (500). Try again later.", login_message); break; default: onFailure("Login Failed (Server Code: " + result + ").", login_message); break; } } IsLoggingIn = false; }); loginThread.Start(); while (!loginThread.IsAlive) { } }
void UpdateChannels() { if (!FacePlus.IsInitSuccessful) { return; } if (!FacePlus.IsTracking) { return; } #if !UNITY_4_2 && UNITY_EDITOR if (UnityEditor.AnimationUtility.InAnimationMode()) { return; } #endif float[] channelVector = FacePlus.GetCurrentVector(); var doneKeys = new List <string>(); for (int i = 0; i < channelVector.Length; i++) { string channel = FacePlus.GetChannelName(i); if (channelMapping.ContainsKey(channel)) { doneKeys.Add(channel); float amount = channelMapping[channel].Offset + (channelMapping[channel].Scale * channelVector[i]); /*if(channel.Contains ("Mix::")) * { * float s = Mathf.Clamp((amount+ channelMapping[channel].Offset)/100f, 0f, 1f); * amount = amount * s * s * (3 - 2 * s); * //amount = amount * s * s * s* (s*(6*s - 15)+10); * }*/ channelMapping[channel].Amount = amount; } } foreach (var shape in channelMapping.Keys) { if (!doneKeys.Contains(shape)) { channelMapping[shape].Amount = channelMapping[shape].Offset; } } if (OnChannelsUpdated != null) { OnChannelsUpdated(); } }
bool UpdateFrameBuffer() { if (!FacePlus.IsInitSuccessful) { return(false); } Color32[] colors = frameTexture.GetPixels32(); byte[] frameBuffer = new byte[3 * frameTexture.width * frameTexture.height]; if (colors.Length * 3 != frameBuffer.Length) { Debug.LogError("Frame buffer size mismatch."); return(false); } int pixelX = 320; int pixelY = 240; int width = frameTexture.width; int height = frameTexture.height; // Logger.Info ("Random sample: " + colors[GetUnityIndex(pixelX, pixelY, width, height)].r); for (int x = 0; x < width; x++) { for (int y = 0; y < height; y++) { int unityIndex = GetUnityIndex(x, y, width, height); int bufferIndex = GetBufferIndex(x, y, width, height); frameBuffer[bufferIndex] = colors[unityIndex].r; frameBuffer[bufferIndex + 1] = colors[unityIndex].g; frameBuffer[bufferIndex + 2] = colors[unityIndex].b; } } // Logger.Debug ("Frame buffer sample: " + frameBuffer[GetBufferIndex(pixelX, pixelY, width, height)]); var result = FacePlus.TrackSynchBuffer(frameBuffer, OutputDebugImages); return(result); }
public static void Logout() { User = null; FacePlus.Logout(); }
void OnDisable() { Logger.Debug("Disabling. Stopping tracking..."); FacePlus.StopTracking(); // stop the tracking thread FacePlus.Teardown(); // relinquish control of the camera }