예제 #1
0
        private VmdFacialFrame CreateFacialFrame(float time, [NotNull] string mltdTruncMorphName, float value)
        {
            var n = (int)(time * FrameRate.Mltd);
            int frameIndex;

            if (_conversionConfig.Transform60FpsTo30Fps)
            {
                frameIndex = n / 2;
            }
            else
            {
                frameIndex = n;
            }

            string expressionName;

            if (_conversionConfig.TranslateFacialExpressionNamesToMmd)
            {
                expressionName = MorphUtils.LookupMorphName(mltdTruncMorphName);
            }
            else
            {
                expressionName = mltdTruncMorphName;
            }

            var frame = new VmdFacialFrame(frameIndex, expressionName);

            frame.Weight = value;

            return(frame);
        }
예제 #2
0
        private VmdFacialFrame ReadFacialFrame()
        {
            var frame = new VmdFacialFrame();

            frame.FacialExpressionName = ReadString(15);
            frame.FrameIndex           = _reader.ReadInt32();
            frame.Weight = _reader.ReadSingle();

            return(frame);
        }
예제 #3
0
        private void ReadFacialFrames([NotNull] VmdMotion motion)
        {
            var frameCount = _reader.ReadInt32();
            var frames     = new VmdFacialFrame[frameCount];

            for (var i = 0; i < frameCount; ++i)
            {
                frames[i] = ReadFacialFrame();
            }

            motion.FacialFrames = frames;
        }
예제 #4
0
 private void WriteFacialFrame([NotNull] VmdFacialFrame frame)
 {
     WriteString(frame.FacialExpressionName, 15);
     _writer.Write(frame.FrameIndex);
     _writer.Write(frame.Weight);
 }
        private static IReadOnlyList <VmdFacialFrame> CreateFacialFrames([NotNull] ScenarioObject scenarioObject, int songPosition)
        {
            VmdFacialFrame CreateFacialFrame(float time, string mltdTruncMorphName, float value)
            {
                var n = (int)(time * 60.0f);
                int frameIndex;

                if (ConversionConfig.Current.Transform60FpsTo30Fps)
                {
                    frameIndex = n / 2;
                }
                else
                {
                    frameIndex = n;
                }

                string expressionName;

                if (ConversionConfig.Current.TranslateFacialExpressionNamesToMmd)
                {
                    expressionName = MorphUtils.LookupMorphName(mltdTruncMorphName);
                }
                else
                {
                    expressionName = mltdTruncMorphName;
                }

                var frame = new VmdFacialFrame(frameIndex, expressionName);

                frame.Weight = value;

                return(frame);
            }

            var facialFrameList = new List <VmdFacialFrame>();

            // Lip motion
            {
                var lipSyncControls = scenarioObject.Scenario.Where(s => s.Type == ScenarioDataType.LipSync).ToArray();

                Debug.Assert(lipSyncControls.Length > 0, "Lip-sync controls should exist.");
                Debug.Assert(lipSyncControls[0].Param == 54, "The first control op should be 54.");
                Debug.Assert(lipSyncControls[lipSyncControls.Length - 1].Param == 54, "The last control op should be 54.");

                const float lipTransitionTime = 0.2f;
                float       lastLipSyncTime   = 0;

                for (var i = 0; i < lipSyncControls.Length; i++)
                {
                    var sync        = lipSyncControls[i];
                    var currentTime = (float)sync.AbsoluteTime;
                    var hasNext     = i < lipSyncControls.Length - 1;
                    var hasPrev     = i > 0;

                    switch (sync.Param)
                    {
                    case 0:
                    case 1:
                    case 2:
                    case 3:
                    case 4:
                    case 50:
                        // The whole song ends with a "mouse-closed" (54) op.
                        Debug.Assert(hasNext, "The song should end with control op 54 (mouse closed).");
                        // The whole song starts with a "mouse-closed" (54) op.
                        Debug.Assert(hasPrev, "The song should start with control op 54 (mouse closed).");

                        string morphName;

                        switch (sync.Param)
                        {
                        case 0:
                            morphName = "M_a";
                            break;

                        case 1:
                            morphName = "M_i";
                            break;

                        case 2:
                            morphName = "M_u";
                            break;

                        case 3:
                            morphName = "M_e";
                            break;

                        case 4:
                            morphName = "M_o";
                            break;

                        case 50:
                            morphName = "M_n";
                            break;

                        default:
                            throw new ArgumentOutOfRangeException("Not possible.");
                        }

                        var prevTime = (float)lipSyncControls[i - 1].AbsoluteTime;

                        if (currentTime - prevTime > lipTransitionTime)
                        {
                            facialFrameList.Add(CreateFacialFrame(currentTime - lipTransitionTime, morphName, 0));
                        }
                        else
                        {
                            facialFrameList.Add(CreateFacialFrame(prevTime, morphName, 0));
                        }

                        facialFrameList.Add(CreateFacialFrame(currentTime, morphName, 1));

                        var nextTime = (float)lipSyncControls[i + 1].AbsoluteTime;

                        if (nextTime - currentTime > lipTransitionTime)
                        {
                            facialFrameList.Add(CreateFacialFrame(nextTime - lipTransitionTime, morphName, 1));
                            facialFrameList.Add(CreateFacialFrame(nextTime, morphName, 0));
                        }
                        else
                        {
                            facialFrameList.Add(CreateFacialFrame(nextTime, morphName, 0));
                        }

                        break;

                    case 54:
                        facialFrameList.Add(CreateFacialFrame(currentTime, "M_a", 0));
                        facialFrameList.Add(CreateFacialFrame(currentTime, "M_i", 0));
                        facialFrameList.Add(CreateFacialFrame(currentTime, "M_u", 0));
                        facialFrameList.Add(CreateFacialFrame(currentTime, "M_e", 0));
                        facialFrameList.Add(CreateFacialFrame(currentTime, "M_o", 0));
                        facialFrameList.Add(CreateFacialFrame(currentTime, "M_n", 0));
                        break;

                    default:
                        throw new ArgumentOutOfRangeException(nameof(sync.Param), sync.Param, null);
                    }
                }
            }

            // Facial expression
            {
                var expControls = scenarioObject.Scenario.Where(s => s.Type == ScenarioDataType.FacialExpression && s.Idol == songPosition - 1).ToArray();

                Debug.Assert(expControls.Length > 0, "Expression controls should exist.");

                // Note that here we don't process blinkings (which happens in MLTD)
                for (var i = 0; i < expControls.Length; i++)
                {
                    var exp         = expControls[i];
                    var currentTime = (float)exp.AbsoluteTime;

                    const float eyeBlinkTime = 0.1f;

                    var eyesClosedRatio = exp.EyeClosed ? 1.0f : 0.0f;

                    facialFrameList.Add(CreateFacialFrame(currentTime, "E_metoji_r", eyesClosedRatio));
                    facialFrameList.Add(CreateFacialFrame(currentTime, "E_metoji_l", eyesClosedRatio));

                    if (i > 0)
                    {
                        if (expControls[i - 1].EyeClosed != exp.EyeClosed)
                        {
                            facialFrameList.Add(CreateFacialFrame(currentTime - eyeBlinkTime, "E_metoji_r", 1 - eyesClosedRatio));
                            facialFrameList.Add(CreateFacialFrame(currentTime - eyeBlinkTime, "E_metoji_l", 1 - eyesClosedRatio));
                        }
                    }

                    do
                    {
                        var expressionKey = exp.Param;

                        if (!ConversionConfig.Current.FacialExpressionMappings.ContainsKey(expressionKey))
                        {
                            Trace.TraceWarning("Facial expression key {0} is not found (at time {1}), using default emotion instead.", exp.Param, currentTime);

                            expressionKey = 0;
                        }

                        const float faceTransitionTime = 0.1333333f;

                        foreach (var kv in ConversionConfig.Current.FacialExpressionMappings[expressionKey])
                        {
                            if ((kv.Key != "E_metoji_r" && kv.Key != "E_metoji_l"))
                            {
                                facialFrameList.Add(CreateFacialFrame(currentTime, kv.Key, kv.Value));
                            }
                        }

                        if (i > 0)
                        {
                            if (expControls[i - 1].Param != exp.Param)
                            {
                                var lastExpressionKey = expControls[i - 1].Param;

                                if (!ConversionConfig.Current.FacialExpressionMappings.ContainsKey(lastExpressionKey))
                                {
                                    Trace.TraceWarning("Facial expression key {0} is not found (at time {1}), using default emotion instead.", expControls[i - 1].Param, (float)expControls[i - 1].AbsoluteTime);

                                    lastExpressionKey = 0;
                                }

                                var expectedTransitionStartTime = currentTime - faceTransitionTime;

                                foreach (var kv in ConversionConfig.Current.FacialExpressionMappings[lastExpressionKey])
                                {
                                    // TODO: Actually we should do a more thorough analysis, because in this time window the eye CAN be opened again so we actually need these values.
                                    // But whatever. This case is rare. Fix it in the future.
                                    if ((kv.Key != "E_metoji_r" && kv.Key != "E_metoji_l"))
                                    {
                                        facialFrameList.Add(CreateFacialFrame(expectedTransitionStartTime, kv.Key, kv.Value));
                                    }
                                }
                            }
                        }
                    } while (false);
                }
            }

            return(facialFrameList);
        }
예제 #6
0
        private VmdMotion ReadMotion()
        {
            var signature = ReadString(20);

            if (signature != "Vocaloid Motion Data")
            {
                throw new FormatException("VMD signature is not found.");
            }

            var motion = new VmdMotion();

            var formatVersionString = ReadString(10);

            motion.Version   = Convert.ToInt32(formatVersionString);
            motion.ModelName = ReadString(20);

            ReadBoneFrames();
            ReadFacialFrames();
            ReadCameraFrames();
            ReadLightFrames();

            // Unknown 2
            _reader.ReadBytes(4);

            if (_reader.BaseStream.Position != _reader.BaseStream.Length)
            {
                ReadIKFrames();
            }

            if (_reader.BaseStream.Position != _reader.BaseStream.Length)
            {
                throw new FormatException("The VMD file may contain other data that this reader does not recognize.");
            }

            return(motion);

            void ReadBoneFrames()
            {
                var frameCount = _reader.ReadInt32();
                var frames     = new VmdBoneFrame[frameCount];

                for (var i = 0; i < frameCount; ++i)
                {
                    frames[i] = ReadBoneFrame();
                }

                motion.BoneFrames = frames;
            }

            void ReadFacialFrames()
            {
                var frameCount = _reader.ReadInt32();
                var frames     = new VmdFacialFrame[frameCount];

                for (var i = 0; i < frameCount; ++i)
                {
                    frames[i] = ReadFacialFrame();
                }

                motion.FacialFrames = frames;
            }

            void ReadCameraFrames()
            {
                var frameCount = _reader.ReadInt32();
                var frames     = new VmdCameraFrame[frameCount];

                for (var i = 0; i < frameCount; ++i)
                {
                    frames[i] = ReadCameraFrame();
                }

                motion.CameraFrames = frames;
            }

            void ReadLightFrames()
            {
                var frameCount = _reader.ReadInt32();
                var frames     = new VmdLightFrame[frameCount];

                for (var i = 0; i < frameCount; ++i)
                {
                    frames[i] = ReadLightFrame();
                }

                motion.LightFrames = frames;
            }

            void ReadIKFrames()
            {
                var frameCount = _reader.ReadInt32();
                var frames     = new VmdIKFrame[frameCount];

                for (var i = 0; i < frameCount; ++i)
                {
                    frames[i] = ReadIKFrame();
                }

                motion.IKFrames = frames;
            }
        }