private void AutoAdjust() { if (_vrmRoot == null) { return; } var parameters = new AutoAdjustParameters(); //やること: //1. いま読まれてるモデルの体型からいろんなパラメータを決めてparametersに入れていく //2. 決定したパラメータが疑似的にメッセージハンドラから飛んできたことにして適用 //3. 決定したパラメータをコンフィグ側に送る try { var animator = _vrmRoot.GetComponent <Animator>(); //3つのサブルーチンではanimatorのHumanoidBoneを使うが、部位である程度分けられるので分けておく SetHandSizeRelatedParameters(animator, parameters); //眉毛はブレンドシェイプ SetEyebrowParameters(parameters); AdjustCameraPosition(animator); SendParameterRelatedCommands(parameters); //3. 決定したパラメータをコンフィグ側に送る sender.SendCommand(MessageFactory.Instance.AutoAdjustResults(parameters)); } catch (Exception ex) { LogOutput.Instance.Write(ex); } }
public void Initialize(IMessageReceiver receiver, IMessageSender sender) { _sender = sender; receiver.AssignCommandHandler( VmmCommands.OpenVRoidSdkUi, _ => _sender?.SendCommand(MessageFactory.Instance.VRoidModelLoadCanceled()) ); receiver.AssignCommandHandler( VmmCommands.RequestLoadVRoidWithId, _ => _sender?.SendCommand(MessageFactory.Instance.VRoidModelLoadCanceled()) ); }
private void Start() { _handler.Commands.Subscribe(c => { switch (c.Command) { case MessageCommandNames.LoadMidiNoteToMotionMap: LoadMidiNoteToMotionMap(c.Content); break; case MessageCommandNames.RequireMidiNoteOnMessage: _redirectNoteOnMessageToIpc = c.ToBoolean(); break; } }); _midiInputObserver.NoteOn.Subscribe(noteNumber => { if (_redirectNoteOnMessageToIpc) { _sender?.SendCommand(MessageFactory.Instance.MidiNoteOn(noteNumber)); } if (UseMidiInput && _cooldownCount <= 0 && _noteNumberToMotionMap.ContainsKey(noteNumber)) { RequestExecuteWordToMotionItem?.Invoke(_noteNumberToMotionMap[noteNumber]); _cooldownCount = cooldownTime; } }); }
private bool OnApplicationWantsToQuit() { if (_releaseCompleted.Value) { return(true); } if (_releaseRunning.Value) { return(false); } _releaseRunning.Value = true; //前処理: この時点でMMFとかは既に閉じておく foreach (var item in _releaseItems) { item.ReleaseBeforeCloseConfig(); } _sender?.SendCommand(MessageFactory.Instance.CloseConfigWindow()); //特にリリースするものがないケース: 本来ありえないんだけど、理屈上はほしいので書いておく if (_releaseItems.Count == 0) { _releaseCompleted.Value = true; _releaseRunning.Value = false; return(true); } ReleaseItemsAsync(); return(_releaseCompleted.Value); }
private void AutoAdjust() { if (_vrmRoot == null) { return; } var parameters = new AutoAdjustParameters(); //やること: //1. いま読まれてるモデルの体型からいろんなパラメータを決めてparametersに入れていく //2. 決定したパラメータが疑似的にメッセージハンドラから飛んできたことにして適用 //3. 決定したパラメータをコンフィグ側に送る try { var animator = _vrmRoot.GetComponent <Animator>(); //3つのサブルーチンではanimatorのHumanoidBoneを使うが、部位である程度分けられるので分けておく SetHandSizeRelatedParameters(animator, parameters); AdjustCameraPosition(animator); //デバイスレイアウト調整: これは別途調整が終わるとメッセージが飛ぶ _dispatcher.ReceiveCommand(new ReceivedCommand(VmmCommands.ResetDeviceLayout)); //3. 決定したパラメータをコンフィグ側に送る _sender.SendCommand(MessageFactory.Instance.AutoAdjustResults(parameters)); } catch (Exception ex) { LogOutput.Instance.Write(ex); } }
public FaceControlManagerMessageIo(IMessageReceiver receiver, IMessageSender sender, FaceControlManager faceControlManager) { receiver.AssignCommandHandler( VmmCommands.AutoBlinkDuringFaceTracking, message => faceControlManager.PreferAutoBlinkOnWebCamTracking = message.ToBoolean() ); receiver.AssignCommandHandler( VmmCommands.FaceDefaultFun, message => faceControlManager.DefaultBlendShape.FaceDefaultFunValue = message.ParseAsPercentage() ); receiver.AssignQueryHandler( VmmQueries.GetBlendShapeNames, query => query.Result = string.Join("\t", faceControlManager.BlendShapeStore.GetBlendShapeNames()) ); faceControlManager.VrmInitialized += () => { sender.SendCommand(MessageFactory.Instance.SetBlendShapeNames( string.Join("\t", faceControlManager.BlendShapeStore.GetBlendShapeNames()) )); }; //特に眉まわりのブレンドシェイプ割り当てだけは別途やる var _ = new BlendShapeAssignReceiver(receiver, faceControlManager); }
private void SetTroubleMessage(string message) { if (_troubleMessage != message) { _troubleMessage = message; _sender.SendCommand(MessageFactory.Instance.ExTrackerSetIFacialMocapTroubleMessage(_troubleMessage)); } }
public void SendError(string title, string content, ErrorLevel errorLevel) { var data = JsonUtility.ToJson(new ErrorIndicateData() { title = title, content = content, level = (int)errorLevel, }); _sender.SendCommand(MessageFactory.Instance.RequestShowError(data)); }
private void OnVrmLoaded(VrmLoadedInfo info) { string names = string.Join(",", info.blendShape .BlendShapeAvatar .Clips .Select(c => c.BlendShapeName) .Where(n => !BasicNames.Contains(n)) ); _sender.SendCommand( MessageFactory.Instance.ExtraBlendShapeClipNames(names) ); }
public void Initialize(IVRMLoadable vrmLoadable, IMessageSender sender) { vrmLoadable.VrmLoaded += info => { string names = string.Join(",", info.blendShape .BlendShapeAvatar .Clips .Select(c => c.BlendShapeName) .Where(n => !BasicNames.Contains(n)) ); sender.SendCommand( MessageFactory.Instance.ExtraBlendShapeClipNames(names) ); }; }
public void Run([TimerTrigger("0 */10 * * * *")] TimerInfo myTimer, ILogger log) { try { if (!_crawler.SetupCrawler()) { return; } _sender.SendCommand(_builder.BuildCommand(new List <object> { _crawler.Fetch() }, _crawler.ExposeEndpoint())); log.LogInformation($"BlockchainDataFetcher ran to completion @ {DateTime.UtcNow.AsUtc()}\n"); } catch (Exception ex) { log.LogError($"An error has occured @ {DateTime.UtcNow.AsUtc()} \n Message: {ex.Message} \n Stack Trace: {ex.StackTrace} \n Source: {ex.Source}"); } }
private IEnumerator ActivateWpf() { string path = GetWpfPath(); if (File.Exists(path)) { //他スクリプトの初期化とUpdateが回ってからの方がよいので少しだけ遅らせる yield return(null); yield return(null); var startInfo = new ProcessStartInfo() { FileName = path, Arguments = "/channelId " + MmfChannelIdSource.ChannelId }; #if !UNITY_EDITOR Process.Start(startInfo); _sender.SendCommand(MessageFactory.Instance.SetUnityProcessId(Process.GetCurrentProcess().Id)); #endif } }
public void Calibrate() { //トラッキング前にキャリブすると訳わからないので禁止! if (!Connected) { return; } //NOTE: 現行実装ではcalibrationが即座に終わる + 返却値は全キャリブデータを一括りにした値なので、こんな感じになります CurrentProvider.Calibrate(); var data = new ExternalTrackerCalibrationData() { iFacialMocap = iFacialMocapReceiver.CalibrationData, }; _sender.SendCommand(MessageFactory.Instance.ExTrackerCalibrateComplete( JsonUtility.ToJson(data) )); //POINT: キャリブレーションした直後は値がジャンプしがちになるので、 //対策としてトラッキングロスからの復帰直後と同じように値をブレンドしていく _trackedCount = 0f; }
public MidiToWordToMotion(IMessageReceiver receiver, IMessageSender sender, MidiInputObserver midiObserver) { receiver.AssignCommandHandler( VmmCommands.LoadMidiNoteToMotionMap, c => LoadMidiNoteToMotionMap(c.Content) ); receiver.AssignCommandHandler( VmmCommands.RequireMidiNoteOnMessage, c => _redirectNoteOnMessageToIpc = c.ToBoolean() ); _midiObserver = midiObserver.NoteOn.Subscribe(noteNumber => { if (_redirectNoteOnMessageToIpc) { sender.SendCommand(MessageFactory.Instance.MidiNoteOn(noteNumber)); } if (_noteNumberToMotionMap.ContainsKey(noteNumber)) { RequestExecuteWordToMotionItem?.Invoke(_noteNumberToMotionMap[noteNumber]); } }); }
private void Start() { GetComponent <FaceTracker>().CalibrationCompleted += data => sender.SendCommand(MessageFactory.Instance.SetCalibrateFaceData(data)); }
public CalibrationCompletedDataSender(IMessageSender sender, FaceTracker faceTracker) { faceTracker.CalibrationCompleted += data => sender.SendCommand(MessageFactory.Instance.SetCalibrationFaceData(data)); }
private void SendBlendShapeNames() => sender.SendCommand(MessageFactory.Instance.SetBlendShapeNames( string.Join("\t", TryGetBlendShapeNames()) ));
private bool OnApplicationWantsToQuit() { //NOTE: we do not disturb app quit itself, just request config close too. sender?.SendCommand(MessageFactory.Instance.CloseConfigWindow()); return(true); }