Ejemplo n.º 1
0
    public EmotionEvent GetEventFromStructuralTrack(TrackData track, TrackChunkData chunk, int trackIndex, int chunkIndex)
    {
        EmotionEvent e = new EmotionEvent();

        e.trackIndex           = trackIndex;
        e.chunkIndex           = chunkIndex;
        e.associatedEmotion    = chunk.startData.GetSpectrum();
        e.chunkDelimitsSegment = false;
        e.harmonicDifference   = 0;

        // We only care about the ending of this chunk (for now)
        e.timestamp = chunk.end;
        e.intensity = chunk.GetIntensity(chunk.end);

        if (chunk.intensityCurve == IntensityCurve.LinearIncreasing)
        {
            // Buildup
            e.type = EmotionEvent.EmotionEventType.LocalMaximum;
        }
        else if (chunk.intensityCurve == IntensityCurve.LinearDecreasing)
        {
            // Decreasing
            e.type = EmotionEvent.EmotionEventType.LocalMinimum;
        }
        else
        {
            // Structure didn't change (repetition, etc)
            e.type = EmotionEvent.EmotionEventType.Sustain;
        }

        return(e);
    }
Ejemplo n.º 2
0
    protected void DrawIntensityCurve(Rect container)
    {
        AnimationCurve curve = TrackChunkData.GetAnimationCurve(Chunk.IntensityCurveType);

        GL.PushMatrix();
        lineMaterial.SetPass(0);
        GL.LoadOrtho();
        GL.Begin(GL.LINE_STRIP);

        container.y      += 2f;
        container.height -= 2f;

        // Transform container
        container.x      /= Screen.width;
        container.width  /= Screen.width;
        container.y      /= Screen.height;
        container.height /= Screen.height;

        int subdivisions = 32;

        for (int i = 0; i < subdivisions; ++i)
        {
            float t = (i / (float)subdivisions);
            float y = container.y + curve.Evaluate(t) * container.height;
            float x = container.x + t * container.width;

            GL.Vertex(new Vector3(x, y, 0f));
        }

        GL.End();
        GL.PopMatrix();
    }
Ejemplo n.º 3
0
    // This evaluation has no post process (TODO: in the future make a cache per track?)
    public EmotionSpectrum EvaluateTrack(TrackData track, float normalizedTime)
    {
        EmotionSpectrum result = new EmotionSpectrum();

        int lastChunkIndex = 0;

        for (int i = 0; i < track.chunks.Count; ++i)
        {
            TrackChunkData chunk = track.chunks[i];

            if (chunk.start <= normalizedTime && chunk.end >= normalizedTime)
            {
                result        += chunk.Evaluate(normalizedTime);
                lastChunkIndex = i;
            }
        }

        // First chunk is super important!
        if (lastChunkIndex == 0)
        {
            result *= 2f;
        }

        return(result);
    }
Ejemplo n.º 4
0
        //midi解析&配列格納用メソッド
        private void FileEncoder()
        {
            PianoNoteList.Clear();
            tempoList.Clear();
            //現在の再生位置のファイルパスからファイルをバイナリで開く
            using (FileStream stream = new FileStream(files[counter], FileMode.Open, FileAccess.Read))
                using (BinaryReader reader = new BinaryReader(stream))
                {
                    headerChunk.cunkID = reader.ReadBytes(4);

                    //ヘッダチャンクの解析
                    if (BitConverter.IsLittleEndian)
                    {
                        //リトルエディアンならビットを反転させる

                        byte[] byteArray = reader.ReadBytes(4);
                        Array.Reverse(byteArray);
                        headerChunk.dataLength = BitConverter.ToInt32(byteArray, 0);

                        byteArray = reader.ReadBytes(2);
                        Array.Reverse(byteArray);
                        headerChunk.format = BitConverter.ToInt16(byteArray, 0);

                        byteArray = reader.ReadBytes(2);
                        Array.Reverse(byteArray);
                        headerChunk.tracks = BitConverter.ToInt16(byteArray, 0);

                        byteArray = reader.ReadBytes(2);
                        Array.Reverse(byteArray);
                        headerChunk.division = BitConverter.ToInt16(byteArray, 0);
                    }
                    else
                    {
                        headerChunk.dataLength = BitConverter.ToInt32(reader.ReadBytes(4), 0);
                        headerChunk.format     = BitConverter.ToInt16(reader.ReadBytes(2), 0);
                        headerChunk.tracks     = BitConverter.ToInt16(reader.ReadBytes(2), 0);
                        headerChunk.division   = BitConverter.ToInt16(reader.ReadBytes(2), 0);
                    }

                    TrackChunkData[] trackChunks = new TrackChunkData[headerChunk.tracks];
                    //トラックチャンクの解析
                    for (int i = 0; i < headerChunk.tracks; i++)
                    {
                        trackChunks[i].chunkID = reader.ReadBytes(4);
                        if (BitConverter.IsLittleEndian)
                        {
                            byte[] byteArray = reader.ReadBytes(4);
                            Array.Reverse(byteArray);
                            trackChunks[i].dataLength = BitConverter.ToInt32(byteArray, 0);
                        }
                        else
                        {
                            trackChunks[i].dataLength = BitConverter.ToInt32(reader.ReadBytes(4), 0);
                        }
                        trackChunks[i].data = reader.ReadBytes(trackChunks[i].dataLength);
                        //各トラックデータについてイベントとデルタタイムの抽出
                        TrackDataAnalaysis(trackChunks[i].data);
                    }
                }
        }
Ejemplo n.º 5
0
    public override TrackChunkData GetChunkData()
    {
        TrackChunkData d = base.GetChunkData();

        d.startData = Data;
        d.endData   = Data;
        d.type      = ChunkType.Emotion;
        return(d);
    }
Ejemplo n.º 6
0
    /// <summary>
    /// This method tries to define important aspects of how chunks are placed.
    /// Right now it checks simple patterns, but it can be extended to find interesting arrangements in the future
    /// </summary>
    protected float GetIntensityForChunkStartEvent(TrackData track, TrackChunkData chunk, int chunkIndex)
    {
        // First chunk in track, important
        if (chunkIndex == 0)
        {
            return(2f);
        }

        float          intensity     = chunk.GetIntensity(chunk.start);
        TrackChunkData previousChunk = track.chunks[chunkIndex - 1];

        // We care about any change in variation, because it can visually drive something
        bool  variationChanged      = (previousChunk.isVariation != chunk.isVariation);
        float variationContribution = (variationChanged ? 2f : 1f);

        float timeDifference = chunk.start - previousChunk.end;
        float falloff        = MeasureDurationNormalized * 2f;

        // Basically, the closest the chunk, the more impact it will generate
        // TODO: maybe add nonlinear impact responses?
        float timeImpact             = 1f - Mathf.Clamp01(timeDifference / falloff);
        float timeImpactContribution = Mathf.Lerp(1f, 2f, timeImpact);

        // As harmony progresses, so does intensity. When it restarts, the impact is big
        // Roughly, standard 4 measure progressions will generate at most 2f
        int   hDifference = Mathf.Abs(chunk.harmonySequenceNumber - previousChunk.harmonySequenceNumber);
        float harmonicProgressionContribution = Mathf.Pow(1.2f, hDifference);

        // Harmony is broken if timing is off
        if (timeImpact < .45f)
        {
            harmonicProgressionContribution = 1f;
        }

        // On the contrary, if a long time passed between chunks, we have a nostalgia effect
        float longFalloff  = MeasureDurationNormalized * 12f;
        float memoryImpact = Mathf.Clamp01(timeDifference / longFalloff);

        if (memoryImpact > .5f)
        {
            timeImpactContribution += (memoryImpact - .5f) * 2f;
        }

        // If the actual intensities of the chunks are very different at this point, then it may also be important
        float intensityDifference       = Mathf.Abs(chunk.GetIntensity(chunk.start) - previousChunk.GetIntensity(chunk.start));
        float intensityDiffContribution = Mathf.Lerp(1f, 2f, intensityDifference);

        // TODO in the future: do pattern matching to see if the specific arrangement of chunks changes or is stable,
        // by searching on a window of +-8 measures
        // A simple example: look for repetition in the past 8 measures, for every repeated chunk add some intensity (and also add expectation)

        return(intensity * variationContribution * timeImpactContribution * harmonicProgressionContribution * intensityDiffContribution);
    }
Ejemplo n.º 7
0
    public virtual TrackChunkData GetChunkData()
    {
        TrackChunkData d = new TrackChunkData();

        d.type                  = ChunkType.None;
        d.start                 = Snap ? GetSnappedPosition(Position) : Position;
        d.end                   = Snap ? GetSnappedPosition(Position + Width) : (Position + Width);
        d.intensityCurve        = IntensityCurveType;
        d.harmonySequenceNumber = HarmonySequenceNumber;
        d.isVariation           = IsVariation;
        return(d);
    }
Ejemplo n.º 8
0
        //main--------------------
        public static void ReadMidi(string filePath, int _baseScale, float magniSpeed /*速度倍率*/)
        {
            //リスト生成
            int baseScale = _baseScale; //四分音符の長さ

            var headerData = new HeaderChunkData();

            TrackChunkData[] trackChunks;
            var b_noteDataList = new List <Bfr_NoteData>();
            var b_tempDataList = new List <Bfr_TempData>();

            //ファイル読み込み 読み込み終わるまで出ない!
            using (var file = new FileStream(filePath, FileMode.Open, FileAccess.Read))
                using (var reader = new BinaryReader(file))
                {
                    //-------- ヘッダ解析 -------
                    HeaderDataAnaly(ref headerData, reader);

                    //-------- トラック解析 -------
                    trackChunks = new TrackChunkData[headerData.tracks]; //ヘッダからトラック数を参照

                    for (int i = 0; i < trackChunks.Length; i++)         //トラック数分回す
                    {
                        TrackDataAnaly(ref trackChunks, reader, i);

                        //演奏データ解析へ
                        TrackMusicAnaly(trackChunks[i].data, headerData, ref b_noteDataList, ref b_tempDataList);
                    }
                }

            MstimeFix(ref b_noteDataList, ref b_tempDataList); //曲中のBPM変更に対応

            //欲しいデータに変換
            a_noteDataList = new List <Aftr_NoteData>();
            AftrNoteCreate(b_noteDataList, headerData.timeBase, baseScale);

            a_tempDataList = new List <Aftr_TempData>();
            AftrTempCreate(b_tempDataList, baseScale, magniSpeed);

            //以下ログ
            DataTestLog(headerData);
            DataTestLog(trackChunks);
            DataTestLog(b_tempDataList);
            DataTestLog(b_noteDataList);
            DataTestLog(a_noteDataList);
            DataTestLog(a_tempDataList);
        }
Ejemplo n.º 9
0
    protected bool IsChunkSegmentDelimiter(TrackData track, TrackChunkData chunk, int chunkIndex, EmotionEvent.EmotionEventType type)
    {
        float timeDifference = 0f;

        if (type == EmotionEvent.EmotionEventType.Start && chunkIndex > 0)
        {
            TrackChunkData previousChunk = track.chunks[chunkIndex - 1];
            timeDifference = Mathf.Abs(chunk.start - previousChunk.end);
        }
        else if (type == EmotionEvent.EmotionEventType.End && chunkIndex < track.chunks.Count - 1)
        {
            TrackChunkData nextChunk = track.chunks[chunkIndex + 1];
            timeDifference = Mathf.Abs(nextChunk.start - chunk.end);
        }

        float threshold = MeasureDurationNormalized * 4f;

        return(timeDifference > threshold);
    }
Ejemplo n.º 10
0
    /// <summary>
    /// In contrast with GetIntensityForChunkStartEvent, this method only tries to find
    /// differences a priori, knowing, for example, that it may be the last chunk in some time, etc.
    /// Most of the cases will be covered by GetIntensityForChunkStartEvent.
    /// Start events will usually drive camera cuts, while end events will probably drive only the emotion state machine
    /// </summary>
    protected float GetIntensityForChunkEndEvent(TrackData track, TrackChunkData chunk, int chunkIndex)
    {
        float intensity = chunk.GetIntensity(chunk.end);

        if (chunkIndex < track.chunks.Count - 1)
        {
            TrackChunkData nextChunk = track.chunks[chunkIndex + 1];

            float timeDifference = nextChunk.start - chunk.end;
            float falloff        = MeasureDurationNormalized * 4f;

            // If this is the last chunk in some time, make its ending more impactful
            if (timeDifference > falloff)
            {
                intensity *= 2f;
            }
        }

        return(intensity);
    }
Ejemplo n.º 11
0
    public EmotionEvent GetEventFromChunkStart(TrackData track, TrackChunkData chunk, int trackIndex, int chunkIndex)
    {
        EmotionEvent e = new EmotionEvent();

        e.trackIndex           = trackIndex;
        e.chunkIndex           = chunkIndex;
        e.type                 = EmotionEvent.EmotionEventType.Start;
        e.timestamp            = chunk.start;
        e.intensity            = GetIntensityForChunkStartEvent(track, chunk, chunkIndex);
        e.associatedEmotion    = chunk.startData.GetSpectrum(); // TODO: expectation/surprise
        e.chunkDelimitsSegment = IsChunkSegmentDelimiter(track, chunk, chunkIndex, e.type);
        e.harmonicDifference   = 0;

        if (chunkIndex > 0)
        {
            e.harmonicDifference = Mathf.Abs(chunk.harmonySequenceNumber - track.chunks[chunkIndex - 1].harmonySequenceNumber);
        }

        return(e);
    }
Ejemplo n.º 12
0
    /// <summary>
    /// Events can be found at the beginning and at the end of the processing stage.
    /// At the beginning, chunks are analyzed and used as discrete indicators of events.
    /// At the end, a more complicated search runs, trying to look for local minima/maxima and plot points.
    /// </summary>
    protected void PreAccumulateEvents()
    {
        List <EmotionEvent> foundEvents = new List <EmotionEvent>();

        for (int t = 0; t < container.tracks.Count; t++)
        {
            TrackData track = container.tracks[t];

            for (int c = 0; c < track.chunks.Count; c++)
            {
                TrackChunkData chunk = track.chunks[c];

                if (track == structuralTrack)
                {
                    foundEvents.Add(GetEventFromStructuralTrack(track, chunk, t, c));
                }
                else
                {
                    EmotionEvent startEvent = GetEventFromChunkStart(track, chunk, t, c);
                    EmotionEvent endEvent   = GetEventFromChunkEnd(track, chunk, t, c);

                    foundEvents.Add(startEvent);
                    foundEvents.Add(endEvent);
                }
            }
        }

        events.AddRange(foundEvents);

        foreach (EmotionEvent e in events)
        {
            if (float.IsNaN(e.intensity))
            {
                Debug.LogError("WHAAT" + e);
            }
        }
    }
Ejemplo n.º 13
0
    protected ProceduralCameraStrategy BuildCameraStrategy(InterestPoint point, EmotionEvent e, float shotDuration)
    {
        List <KeyValuePair <ProceduralCameraStrategy, float> > strategies = new List <KeyValuePair <ProceduralCameraStrategy, float> >();

        float normalizedEnergy = ProceduralEngine.Instance.EmotionEngine.GetSmoothEnergy(e.timestamp) / ProceduralEngine.Instance.EmotionEngine.MaxEnergy;

        float overviewWeight = 0.2f;
        float dollyWeight    = 2f + (1f - normalizedEnergy);
        float orbitWeight    = 1f + normalizedEnergy;

        TrackChunkData structureChunk = ProceduralEngine.Instance.EmotionEngine.GetCurrentStructureData(e.timestamp);
        StructureType  structure      = ProceduralEngine.Instance.EmotionEngine.GetStructureAtTime(e.timestamp);

        switch (structure)
        {
        case StructureType.None:
            break;

        case StructureType.Sustain:
            break;

        case StructureType.Increasing:
            overviewWeight += (1f - structureChunk.GetIntensity(e.timestamp)) * 200f;
            break;

        case StructureType.Decreasing:
            overviewWeight += (1f - structureChunk.GetIntensity(e.timestamp)) * 200f;
            break;
        }

        strategies.Add(new KeyValuePair <ProceduralCameraStrategy, float>(new OverviewCameraStrategy(), overviewWeight));
        strategies.Add(new KeyValuePair <ProceduralCameraStrategy, float>(new DollyCameraStrategy(), dollyWeight));
        strategies.Add(new KeyValuePair <ProceduralCameraStrategy, float>(new OrbitCameraStrategy(), orbitWeight));

        return(ProceduralEngine.SelectRandomWeighted(strategies, x => x.Value).Key);
    }
Ejemplo n.º 14
0
    public void LoadMSF(byte[] file)
    {
        noteList.Clear();
        tempoList.Clear();

        // using(var stream = new FileStream(fileAddres, FileMode.Open, FileAccess.Read))
        // using(var reader = new BinaryReader.ReadBytes(file)){

        var reader = file;

        var headerCH = new HeaderChunkData();

        // チャンクID
        headerCH.chunkID = SubArraybyte(file, 4);


        // 自分のPCがリトルエンディアンならバイト順を逆に
        if (BitConverter.IsLittleEndian)
        {
            // ヘッダ部のデータ長(値は6固定)
            var byteArray = SubArraybyte(file, 4);
            Array.Reverse(byteArray);
            headerCH.dataLength = BitConverter.ToInt32(byteArray, 0);
            // フォーマット(2byte)
            byteArray = SubArraybyte(file, 2);
            Array.Reverse(byteArray);
            headerCH.format = BitConverter.ToInt16(byteArray, 0);
            // トラック数(2byte)
            byteArray = SubArraybyte(file, 2);
            Array.Reverse(byteArray);
            headerCH.tracks = BitConverter.ToInt16(byteArray, 0);
            // タイムベース(2byte)
            byteArray = SubArraybyte(file, 2);
            Array.Reverse(byteArray);
            headerCH.division = BitConverter.ToInt16(byteArray, 0);
        }
        else
        {
            // ヘッダ部のデータ長(値は6固定)
            headerCH.dataLength = BitConverter.ToInt32(SubArraybyte(file, 4), 0);
            // フォーマット(2byte)
            headerCH.format = BitConverter.ToInt16(SubArraybyte(file, 2), 0);
            // トラック数(2byte)
            headerCH.tracks = BitConverter.ToInt16(SubArraybyte(file, 2), 0);
            // タイムベース(2byte)
            headerCH.division = BitConverter.ToInt16(SubArraybyte(file, 2), 0);
        }

        // トラックチャンク侵入
        var trackCH = new TrackChunkData[headerCH.tracks];

        // トラック数ぶん
        for (int i = 0; i < headerCH.tracks; i++)
        {
            // チャンクID
            trackCH[i].chunkID = SubArraybyte(file, 4);

            // 自分のPCがリトルエンディアンなら変換する
            if (BitConverter.IsLittleEndian)
            {
                // トラックのデータ長読み込み(値は6固定)
                var byteArray = SubArraybyte(file, 4);
                Array.Reverse(byteArray);
                trackCH[i].dataLength = BitConverter.ToInt32(byteArray, 0);
            }
            else
            {
                trackCH[i].dataLength = BitConverter.ToInt32(SubArraybyte(file, 4), 0);
            }

            // データ部読み込み
            trackCH[i].data = SubArraybyte(file, trackCH[i].dataLength);

            // トラックデータ解析に回す
            TrackDataAnalys(trackCH[i].data, headerCH);
        }



        // using(var stream = new FileStream(fileAddres, FileMode.Open, FileAccess.Read))
        // using(var reader = new BinaryReader.ReadBytes(file)){

        // var reader=file;

        // var headerCH =new HeaderChunkData();

        // // チャンクID
        // headerCH.chunkID = SubArraybyte(4);


        // // 自分のPCがリトルエンディアンならバイト順を逆に
        // if (BitConverter.IsLittleEndian)
        // {
        //     // ヘッダ部のデータ長(値は6固定)
        //     var byteArray = reader.ReadBytes(4);
        //     Array.Reverse(byteArray);
        //     headerCH.dataLength = BitConverter.ToInt32(byteArray, 0);
        //     // フォーマット(2byte)
        //     byteArray = reader.ReadBytes(2);
        //     Array.Reverse(byteArray);
        //     headerCH.format = BitConverter.ToInt16(byteArray, 0);
        //     // トラック数(2byte)
        //     byteArray = reader.ReadBytes(2);
        //     Array.Reverse(byteArray);
        //     headerCH.tracks = BitConverter.ToInt16(byteArray, 0);
        //     // タイムベース(2byte)
        //     byteArray = reader.ReadBytes(2);
        //     Array.Reverse(byteArray);
        //     headerCH.division = BitConverter.ToInt16(byteArray, 0);
        // }
        // else
        // {
        //     // ヘッダ部のデータ長(値は6固定)
        //     headerCH.dataLength = BitConverter.ToInt32(reader.ReadBytes(4), 0);
        //     // フォーマット(2byte)
        //     headerCH.format = BitConverter.ToInt16(reader.ReadBytes(2), 0);
        //     // トラック数(2byte)
        //     headerCH.tracks = BitConverter.ToInt16(reader.ReadBytes(2), 0);
        //     // タイムベース(2byte)
        //     headerCH.division = BitConverter.ToInt16(reader.ReadBytes(2), 0);
        // }

        // // トラックチャンク侵入
        // var trackCH = new TrackChunkData[headerCH.tracks];

        // // トラック数ぶん
        // for(int i=0;i<headerCH.tracks;i++){

        //     // チャンクID
        //     trackCH[i].chunkID = reader.ReadBytes(4);

        //     // 自分のPCがリトルエンディアンなら変換する
        //     if (BitConverter.IsLittleEndian)
        //     {
        //         // トラックのデータ長読み込み(値は6固定)
        //         var byteArray = reader.ReadBytes(4);
        //         Array.Reverse(byteArray);
        //         trackCH[i].dataLength = BitConverter.ToInt32(byteArray, 0);
        //     }
        //     else
        //     {
        //         trackCH[i].dataLength = BitConverter.ToInt32(reader.ReadBytes(4), 0);
        //     }

        //     // データ部読み込み
        //     trackCH[i].data=reader.ReadBytes(trackCH[i].dataLength);

        //     // トラックデータ解析に回す
        //     TrackDataAnalys(trackCH[i].data,headerCH);
        // }
        // }
        // }
    }
Ejemplo n.º 15
0
    /// <summary>
    /// MIDIファイルをロードし必要な情報を返す
    /// </summary>
    /// <returns>ロードに成功したらそのデータ、失敗したらnull</returns>
    public MIDILoadData Load(string fileName)
    {
        var ret = new MIDILoadData();

        try
        {
            using (var stream = new FileStream(fileName, FileMode.Open, FileAccess.Read))
                using (var reader = new BinaryReader(stream))
                {
                    /* ヘッダチャンク侵入 */
                    var headerChunk = new HeaderChunkData();

                    // チャンクID読み込み
                    headerChunk.ChunkType = reader.ReadBytes(4);
                    // お前は本当にヘッダチャンクか?
                    if (
                        headerChunk.ChunkType[0] != 'M' ||
                        headerChunk.ChunkType[1] != 'T' ||
                        headerChunk.ChunkType[2] != 'h' ||
                        headerChunk.ChunkType[3] != 'd')
                    {
                        throw new FormatException("head chunk != MThd.");
                    }
                    // 自分のPCがリトルエンディアンなら変換する
                    if (BitConverter.IsLittleEndian)
                    {
                        // ヘッダ部のデータ長(値は6固定)
                        var byteArray = reader.ReadBytes(4);
                        Array.Reverse(byteArray);
                        headerChunk.DataLength = BitConverter.ToInt32(byteArray, 0);
                        // フォーマット(2byte)
                        byteArray = reader.ReadBytes(2);
                        Array.Reverse(byteArray);
                        headerChunk.Format = BitConverter.ToInt16(byteArray, 0);
                        // トラック数(2byte)
                        byteArray = reader.ReadBytes(2);
                        Array.Reverse(byteArray);
                        headerChunk.Tracks = BitConverter.ToInt16(byteArray, 0);
                        // タイムベース(2byte)
                        byteArray = reader.ReadBytes(2);
                        Array.Reverse(byteArray);
                        headerChunk.Division = BitConverter.ToInt16(byteArray, 0);
                    }
                    else
                    {
                        // ヘッダ部のデータ長(値は6固定)
                        headerChunk.DataLength = BitConverter.ToInt32(reader.ReadBytes(4), 0);
                        // フォーマット(2byte)
                        headerChunk.Format = BitConverter.ToInt16(reader.ReadBytes(2), 0);
                        // トラック数(2byte)
                        headerChunk.Tracks = BitConverter.ToInt16(reader.ReadBytes(2), 0);
                        // タイムベース(2byte)
                        headerChunk.Division = BitConverter.ToInt16(reader.ReadBytes(2), 0);
                    }

                    // 分能値保存
                    ret.Division = headerChunk.Division;

                    // トラックが何もなかったら出ていけぇ!
                    if (headerChunk.Tracks <= 0)
                    {
                        throw new Exception("not exsist tracks.");
                    }

                    /* トラックチャンク侵入 */
                    var trackChunks = new TrackChunkData[headerChunk.Tracks];

                    // トラック数ぶん回す
                    for (int i = 0; i < headerChunk.Tracks; i++)
                    {
                        // チャンクID読み込み
                        trackChunks[i].ChunkType = reader.ReadBytes(4);
                        // お前は本当にトラックチャンクか?
                        if (
                            trackChunks[i].ChunkType[0] != 'M' ||
                            trackChunks[i].ChunkType[1] != 'T' ||
                            trackChunks[i].ChunkType[2] != 'r' ||
                            trackChunks[i].ChunkType[3] != 'k')
                        {
                            throw new FormatException("track chunk != MTrk.");
                        }

                        // 自分のPCがリトルエンディアンなら変換する
                        if (BitConverter.IsLittleEndian)
                        {
                            // トラックのデータ長読み込み(値は6固定)
                            var byteArray = reader.ReadBytes(4);
                            Array.Reverse(byteArray);
                            trackChunks[i].DataLength = BitConverter.ToInt32(byteArray, 0);
                        }
                        else
                        {
                            trackChunks[i].DataLength = BitConverter.ToInt32(reader.ReadBytes(4), 0);
                        }

                        // データ部読み込み
                        trackChunks[i].Data = reader.ReadBytes(trackChunks[i].DataLength);

                        // データ部解析
                        TrackDataAnalysis(
                            trackChunks[i].Data,
                            headerChunk.Division,
                            (n, s, b) =>
                        {
                            ret.NoteArray   = n;
                            ret.SoflanArray = s;
                            ret.BeatArray   = b;
                        },
                            () =>
                        {
                            throw new Exception("track data analysis failure.");
                        });
                    }
                }
        }
        catch (Exception e)
        {
            // エラーメッセージ処理
            Debug.LogError("LoadMIDI Error: " + e);
            return(null);
        }

        return(ret);
    }
Ejemplo n.º 16
0
    /// <summary>
    /// This method doesn't say the specific cut, but it constraints
    /// the time for searching interesting events. It is mostly
    /// dependent on current emotion.
    /// </summary>
    public CutRange EvaluateCutRangeForEvent(EmotionEvent e)
    {
        CutRange        range = new CutRange();
        EmotionSpectrum emotionAtEventTime = emotionEngine.GetSpectrum(e.timestamp);
        CoreEmotion     coreEmotion        = EmotionEngine.FindMainEmotion(emotionAtEventTime);

        // In seconds
        switch (coreEmotion)
        {
        case CoreEmotion.Joy:
            range.minCutTime = ProceduralEngine.RandomRange(1f, 2f);
            range.maxCutTime = ProceduralEngine.RandomRange(7f, 8f);
            break;

        case CoreEmotion.Trust:
            range.minCutTime = ProceduralEngine.RandomRange(2f, 5f);
            range.maxCutTime = ProceduralEngine.RandomRange(7f, 10f);
            break;

        case CoreEmotion.Fear:
            range.minCutTime = ProceduralEngine.RandomRange(1f, 2f);
            range.maxCutTime = ProceduralEngine.RandomRange(4f, 6f);
            break;

        case CoreEmotion.Surprise:
            range.minCutTime = ProceduralEngine.RandomRange(1.5f, 2f);
            range.maxCutTime = ProceduralEngine.RandomRange(2f, 4f);
            break;

        case CoreEmotion.Sadness:
            range.minCutTime = ProceduralEngine.RandomRange(1f, 1.5f);
            range.maxCutTime = ProceduralEngine.RandomRange(2f, 4f);
            break;

        case CoreEmotion.Disgust:
            range.minCutTime = ProceduralEngine.RandomRange(1f, 2f);
            range.maxCutTime = ProceduralEngine.RandomRange(3f, 4f);
            break;

        case CoreEmotion.Anger:
            range.minCutTime = ProceduralEngine.RandomRange(.3f, 1f);
            range.maxCutTime = ProceduralEngine.RandomRange(1f, 3f);
            break;

        case CoreEmotion.Anticipation:
            range.minCutTime = ProceduralEngine.RandomRange(2f, 4f);
            range.maxCutTime = ProceduralEngine.RandomRange(4f, 5f);
            break;
        }

        switch (e.type)
        {
        case EmotionEvent.EmotionEventType.Start:
            // Longer cuts when showing for first time
            range.minCutTime *= e.chunkDelimitsSegment ? 1f : .75f;
            range.maxCutTime *= e.chunkDelimitsSegment ? 1f : .75f;
            break;

        case EmotionEvent.EmotionEventType.End:
            // Longer cuts when something disappears for good
            range.minCutTime *= e.chunkDelimitsSegment ? 1.5f : 1f;
            range.maxCutTime *= e.chunkDelimitsSegment ? 1.5f : 1f;
            break;

        case EmotionEvent.EmotionEventType.LocalMaximum:
            range.minCutTime *= 1f;
            range.maxCutTime *= 1f;
            break;

        case EmotionEvent.EmotionEventType.LocalMinimum:
            range.minCutTime *= 2f;
            range.maxCutTime *= 2f;
            break;
        }

        TrackChunkData structureData = emotionEngine.GetCurrentStructureData(e.timestamp);

        if (structureData != null)
        {
            // More intense -> shorter
            float normalizedStructuralIntensity = Mathf.Pow(structureData.GetIntensity(e.timestamp), 2f);
            range.minCutTime *= 1.35f - normalizedStructuralIntensity * .5f;
            range.maxCutTime *= 1.35f - normalizedStructuralIntensity * .5f;

            // TODO: decide if we need further modifications of cut time based on type.
            // Intensity curve should cover most I think
            StructureType currentStructure = emotionEngine.GetStructureAtTime(e.timestamp);

            switch (currentStructure)
            {
            case StructureType.None:
                break;

            case StructureType.Sustain:
                break;

            case StructureType.Increasing:
                break;

            case StructureType.Decreasing:
                break;
            }
        }

        range.minCutTime = Mathf.Max(0.01f, range.minCutTime);
        range.maxCutTime = Mathf.Max(0.02f, range.maxCutTime);

        float tmp = range.minCutTime;

        range.minCutTime = Mathf.Min(range.minCutTime, range.maxCutTime);
        range.maxCutTime = Mathf.Max(tmp, range.maxCutTime);

        // Normalize times
        range.minCutTime /= ProceduralEngine.Instance.Duration;
        range.maxCutTime /= ProceduralEngine.Instance.Duration;
        return(range);
    }
Ejemplo n.º 17
0
 public override void InitializeFromSerializedData(UITimeline timeline, AbstractDataTrack <EmotionData> track, Rect container, TrackChunkData chunk)
 {
     base.InitializeFromSerializedData(timeline, track, container, chunk);
     this.Data = chunk.startData;
 }
Ejemplo n.º 18
0
    public virtual void InitializeFromSerializedData(UITimeline timeline, AbstractDataTrack <T> track, Rect container, TrackChunkData chunk)
    {
        this.track                 = track;
        this.timeline              = timeline;
        this.container             = container;
        this.Position              = chunk.start;
        this.Width                 = chunk.end - chunk.start;
        this.Snap                  = true; // For now...
        this.HarmonySequenceNumber = chunk.harmonySequenceNumber;
        this.IsVariation           = chunk.isVariation;
        this.IntensityCurveType    = chunk.intensityCurve;
        UpdatePosition();

        UpdateChunkName(track.TrackName);
        chunkBackground.color = track.TrackColor;
    }
Ejemplo n.º 19
0
    public void LoadMSF(string fileAddres)
    {
        noteList.Clear();
        tempoList.Clear();

        using (var stream = new FileStream(fileAddres, FileMode.Open, FileAccess.Read))
            using (var reader = new BinaryReader(stream)){
                var headerCH = new HeaderChunkData();

                // チャンクID
                headerCH.chunkID = reader.ReadBytes(4);


                // 自分のPCがリトルエンディアンならバイト順を逆に
                if (BitConverter.IsLittleEndian)
                {
                    // ヘッダ部のデータ長
                    var byteArray = reader.ReadBytes(4);
                    Array.Reverse(byteArray);
                    headerCH.dataLength = BitConverter.ToInt32(byteArray, 0);
                    // フォーマット(2byte)
                    byteArray = reader.ReadBytes(2);
                    Array.Reverse(byteArray);
                    headerCH.format = BitConverter.ToInt16(byteArray, 0);
                    // トラック数(2byte)
                    byteArray = reader.ReadBytes(2);
                    Array.Reverse(byteArray);
                    headerCH.tracks = BitConverter.ToInt16(byteArray, 0);
                    // タイムベース(2byte)
                    byteArray = reader.ReadBytes(2);
                    Array.Reverse(byteArray);
                    headerCH.division = BitConverter.ToInt16(byteArray, 0);
                }
                else
                {
                    // ヘッダ部のデータ長
                    headerCH.dataLength = BitConverter.ToInt32(reader.ReadBytes(4), 0);
                    // フォーマット(2byte)
                    headerCH.format = BitConverter.ToInt16(reader.ReadBytes(2), 0);
                    // トラック数(2byte)
                    headerCH.tracks = BitConverter.ToInt16(reader.ReadBytes(2), 0);
                    // タイムベース(2byte)
                    headerCH.division = BitConverter.ToInt16(reader.ReadBytes(2), 0);
                }

                // トラックチャンク
                var trackCH = new TrackChunkData[headerCH.tracks];

                // トラック数
                for (int i = 0; i < headerCH.tracks; i++)
                {
                    // チャンクID
                    trackCH[i].chunkID = reader.ReadBytes(4);

                    // 自分のPCがリトルエンディアンなら変換する
                    if (BitConverter.IsLittleEndian)
                    {
                        // トラックのデータ長読み込み
                        var byteArray = reader.ReadBytes(4);
                        Array.Reverse(byteArray);
                        trackCH[i].dataLength = BitConverter.ToInt32(byteArray, 0);
                    }
                    else
                    {
                        trackCH[i].dataLength = BitConverter.ToInt32(reader.ReadBytes(4), 0);
                    }

                    // データ部読み込み
                    trackCH[i].data = reader.ReadBytes(trackCH[i].dataLength);

                    // トラックデータ解析へ
                    TrackDataAnalys(trackCH[i].data, headerCH);
                }
            }
    }