/* write in order: * XM HEADER (Song properties) * PATTERNS (HEADER, DATA) * INSTRUMENTS (HEADER, SAMPLE HEADER, SAMPLE DATA) * */ public byte[] Convert(SongData songData) { MemoryStream songDataStream = new MemoryStream(); int ticksPerRow = Settings.TicksRow; xmUtils = new XMUtils(songData, ticksPerRow); byte[] xmHeader = this.GetXMHeaderData(songData, ticksPerRow); byte[] instruments = this.GetAllInstrumentsData(songData.Instruments); byte[] patterns = this.GetAllPatternsData(songData.Patterns, songData.Instruments, songData.NumChannels, songData.NumMasterTracksColumns); songDataStream.Write(xmHeader, 0, xmHeader.Length); songDataStream.Write(patterns, 0, patterns.Length); songDataStream.Write(instruments, 0, instruments.Length); return(songDataStream.ToArray()); }
private byte[] GetInstrumentHeaderData(InstrumentData instrumentData) { //const int instrumentSize = 29; const int instrumentSize = 0x107; const int instrumentNameLen = 22; const byte instrumentType = 0; int numberOfSamples = instrumentData.Samples.Length; byte[] instrumentHeaderStream = new byte[instrumentSize]; int offset; offset = 0; // instrument size Utility.PutInt4InByteArray(instrumentSize, instrumentHeaderStream, offset); offset = 4; Array.Copy(Utility.GetBytesFromString(instrumentData.Name, instrumentNameLen), 0, instrumentHeaderStream, offset, instrumentNameLen); offset = 26; instrumentHeaderStream[offset] = instrumentType; offset = 27; Utility.PutInt2InByteArray(numberOfSamples, instrumentHeaderStream, offset); if (numberOfSamples > 0) { const int sampleHeaderSize = 0x28; offset = 29; Utility.PutInt4InByteArray(sampleHeaderSize, instrumentHeaderStream, offset); } offset = 33; for (int i = 0; i < 96; i++) { instrumentHeaderStream[offset++] = (byte)instrumentData.KeyMap[i]; } const int maxEnvPoints = 12; offset = 129; byte[] envVolumePoints = XMUtils.GetEnvelopePointsValue(instrumentData.EnvVolumePoints, instrumentData.VolumeSustainPoint, instrumentData.VolumeLoopStart, instrumentData.VolumeLoopEnd, instrumentData.VolumeSustainEnabled, instrumentData.VolumeLoopEnabled); int totalEnvVolumePoints = envVolumePoints.Length / 4; if (totalEnvVolumePoints > maxEnvPoints) { totalEnvVolumePoints = maxEnvPoints; } Array.Copy(envVolumePoints, 0, instrumentHeaderStream, offset, totalEnvVolumePoints * 4); offset = 177; byte[] envPanningPoints = XMUtils.GetEnvelopePointsValue(instrumentData.EnvPanningPoints, instrumentData.PanningSustainPoint, instrumentData.PanningLoopStart, instrumentData.PanningLoopEnd, instrumentData.PanningSustainEnabled, instrumentData.PanningLoopEnabled); int totalEnvPanningPoints = envPanningPoints.Length / 4; if (totalEnvPanningPoints > maxEnvPoints) { totalEnvPanningPoints = maxEnvPoints; } Array.Copy(envPanningPoints, 0, instrumentHeaderStream, offset, totalEnvPanningPoints * 4); offset = 225; instrumentHeaderStream[offset++] = (byte)(totalEnvVolumePoints); instrumentHeaderStream[offset++] = (byte)(totalEnvPanningPoints); offset = 227; instrumentHeaderStream[offset++] = XMUtils.GetPointNumber(envVolumePoints, instrumentData.VolumeSustainPoint); instrumentHeaderStream[offset++] = XMUtils.GetPointNumber(envVolumePoints, instrumentData.VolumeLoopStart); instrumentHeaderStream[offset++] = XMUtils.GetPointNumber(envVolumePoints, instrumentData.VolumeLoopEnd); instrumentHeaderStream[offset++] = XMUtils.GetPointNumber(envPanningPoints, instrumentData.PanningSustainPoint); instrumentHeaderStream[offset++] = XMUtils.GetPointNumber(envPanningPoints, instrumentData.PanningLoopStart); instrumentHeaderStream[offset++] = XMUtils.GetPointNumber(envPanningPoints, instrumentData.PanningLoopEnd); offset = 233; instrumentHeaderStream[offset++] = XMUtils.GetVolumePanningType(instrumentData.VolumeEnabled, instrumentData.VolumeSustainEnabled, instrumentData.VolumeLoopEnabled); instrumentHeaderStream[offset++] = XMUtils.GetVolumePanningType(instrumentData.PanningEnabled, instrumentData.PanningSustainEnabled, instrumentData.PanningLoopEnabled); offset = 239; Utility.PutInt2InByteArray(instrumentData.VolumeFadeOut, instrumentHeaderStream, offset); return(instrumentHeaderStream); }
private byte[] GetPatternData(PatternData patternData, InstrumentData[] instruments, int numChannels, int numMasterTrackColumns) { byte noteBit = 1; byte instrumentBit = 2; byte volumeColBit = 4; byte effectTypeBit = 8; byte effectParamBit = 16; byte emptyBit = 128; byte allValuesFilledBit = (byte)(noteBit + instrumentBit + volumeColBit + effectTypeBit + effectParamBit + emptyBit); MemoryStream patternDataStream = new MemoryStream(); // A Carl Corcoran idea, useful to know which is the last sample played by an x channel System.Collections.Generic.Dictionary <int, SampleData?> playingSamplesMap = new System.Collections.Generic.Dictionary <int, SampleData?>(); byte[] masterTrackCommand = new byte[2]; bool isMasterTrackCommandUsed = false; // force number of mastertrack columns to parse at x value, therefore any column beyond the x value will be ignored // NOTE: if applied parseOnlyGlobalVolumeFromMT, other effect but Global Volume will be ignored const int maxMasterTrackColumnToParse = 1; int numMasterTrackColumnsToParse = maxMasterTrackColumnToParse; // numMasterTrackColumns -- count of MT columns in module // numMasterTrackColumnsToParse -- count of MT columns to parse if (numMasterTrackColumnsToParse > numMasterTrackColumns) { numMasterTrackColumnsToParse = numMasterTrackColumns; } // parse only global volume from MasterTrack const bool parseOnlyGlobalVolumeFromMT = false; int currentMasterTrackIndex = 0; int masterTrackIndexLimitForCurrentRow = 0; for (int i = 0; i < patternData.TracksLineData.Length; i++) { xmUtils.ComputeTickPerRowForCurrentLine(patternData.TracksLineData, i, numChannels); int currentRow = i / numChannels; int currentChannel = i % numChannels + 1; if (currentChannel == 1) { if (isMasterTrackCommandUsed) { string errorMessage = string.Format("row {0}, channel {1}: {2}", currentRow, currentChannel, "Some MasterTrack command were not used due to missing free command effects slots"); OnReportProgress(new EventReportProgressArgs(errorMessage, MsgType.ERROR)); } //currentMasterTrackColumnToParse = 0; isMasterTrackCommandUsed = false; currentMasterTrackIndex = currentRow * numMasterTrackColumns; masterTrackIndexLimitForCurrentRow = currentMasterTrackIndex + numMasterTrackColumnsToParse; } while (currentMasterTrackIndex < masterTrackIndexLimitForCurrentRow && !isMasterTrackCommandUsed) { MasterTrackLineData masterTrackLineData = patternData.MasterTrackLineData[currentMasterTrackIndex]; if (masterTrackLineData.IsSet) { masterTrackCommand = xmUtils.GetCommandsFromMasterTrack(masterTrackLineData.EffectNumber, masterTrackLineData.EffectValue, parseOnlyGlobalVolumeFromMT); if (masterTrackCommand[0] + masterTrackCommand[1] > 0) { isMasterTrackCommandUsed = true; //break; } } //currentMasterTrackColumnToParse++; currentMasterTrackIndex++; } TrackLineData trackLineData = patternData.TracksLineData[i]; if (trackLineData.IsSet || isMasterTrackCommandUsed) { byte compressionValue = emptyBit; byte xmNote = 0; byte xmInstrument = 0; byte xmVolume = 0; byte xmEffectNumber = 0; byte xmEffectValue = 0; bool isEffectCommandUsed = false; bool isVolumeCommandUsed = false; bool isPanningCommandUsed = false; if (trackLineData.Note != null) { try { xmNote = XMUtils.GetXMNote(trackLineData.Note); compressionValue = (byte)(compressionValue + noteBit); } catch (ConversionException e) { string errorMessage = string.Format("row {0}, channel {1}: {2}", currentRow, currentChannel, e.Message); OnReportProgress(new EventReportProgressArgs(errorMessage, MsgType.ERROR)); } } if (trackLineData.Instrument != null) { compressionValue = (byte)(compressionValue + instrumentBit); xmInstrument = (byte)(Int16.Parse(trackLineData.Instrument, System.Globalization.NumberStyles.HexNumber) + 1); if (xmNote != 0) { int xmSample = xmUtils.GetPlayedSampleFromKeymap(xmNote, xmInstrument); // figure out which sample will play for this. if (instruments[xmInstrument - 1].Samples.Length > xmSample) { playingSamplesMap[currentChannel] = instruments[xmInstrument - 1].Samples[xmSample]; } } } // the currently playing sample in the channel SampleData?currentlyPlayingSample = null; if (playingSamplesMap.ContainsKey(currentChannel)) { currentlyPlayingSample = playingSamplesMap[currentChannel]; } int sampleDefaultVolume = currentlyPlayingSample != null ? currentlyPlayingSample.Value.DefaultVolume : maxSampleVolume; float sampleVolume = 1.0f; if (currentlyPlayingSample != null) { sampleVolume = currentlyPlayingSample.Value.Volume; } if (trackLineData.EffectNumber != null) { try { byte[] values = xmUtils.GetXMEffect(trackLineData.EffectNumber, trackLineData.EffectValue, xmNote, xmInstrument); if ((values[0] + values[1]) > 0) { isEffectCommandUsed = true; xmEffectNumber = values[0]; xmEffectValue = values[1]; } } catch (ConversionException e) { string errorMessage = string.Format("row {0}, channel {1}: {2}", currentRow, currentChannel, e.Message); OnReportProgress(new EventReportProgressArgs(errorMessage, MsgType.ERROR)); } } // volume column (volume column got priority before panning) if (trackLineData.Volume != null) { xmVolume = xmUtils.GetVolumeColumnEffectFromVolume(trackLineData.Volume); isVolumeCommandUsed = xmVolume > 0; if (isVolumeCommandUsed == false && isEffectCommandUsed == false) { // transpose possible parseable command from volume to effect columns // G|U|D|I|O|B|Q|R|Y|C byte[] values = xmUtils.TransposeVolumeToCommandEffect(trackLineData.Volume); if ((values[0] + values[1]) > 0) { isEffectCommandUsed = true; xmEffectNumber = values[0]; xmEffectValue = values[1]; } } } // only with VOLUME_SCALING_MODE = COLUMN if (Settings.VolumeScalingMode == VOLUME_SCALING_MODE.COLUMN) { bool sampleNeedsVolumeScaling = currentlyPlayingSample != null && currentlyPlayingSample.Value.Volume != 1.0; bool doesTriggerSample = trackLineData.Note != null && trackLineData.Instrument != null; // if volume column command is used, then scale it if (sampleNeedsVolumeScaling && isVolumeCommandUsed && XMExtras.IsVolumeSetOnVolumeColumn(xmVolume)) { try { xmVolume = XMExtras.ScaleVolumeFromVolumeCommand(xmVolume, sampleVolume); } catch (ConversionException e) { string errorMessage = string.Format("row {0}, channel {1}: {2}", currentRow, currentChannel, e.Message); OnReportProgress(new EventReportProgressArgs(errorMessage, MsgType.ERROR)); } sampleNeedsVolumeScaling = false; } // if effect command is used, then scale it if (sampleNeedsVolumeScaling && isEffectCommandUsed && XMExtras.IsVolumeSetOnEffectColumn(xmEffectNumber)) { try { xmEffectValue = XMExtras.ScaleVolumeFromEffectCommand(xmEffectValue, sampleVolume); } catch (ConversionException e) { string errorMessage = string.Format("row {0}, channel {1}: {2}", currentRow, currentChannel, e.Message); OnReportProgress(new EventReportProgressArgs(errorMessage, MsgType.ERROR)); } sampleNeedsVolumeScaling = false; } // if sample is triggered and needs volume scaling, check for any free slot if (sampleNeedsVolumeScaling && doesTriggerSample) { // the real sample volume is relative with the default volume sampleVolume *= (float)currentlyPlayingSample.Value.DefaultVolume / (float)maxSampleVolume; // try to fill on volume column first if (isVolumeCommandUsed == false) { try { xmVolume = XMExtras.ScaleVolumeFromVolumeCommand(sampleVolume); isVolumeCommandUsed = true; } catch (ConversionException e) { string errorMessage = string.Format("row {0}, channel {1}: {2}", currentRow, currentChannel, e.Message); OnReportProgress(new EventReportProgressArgs(errorMessage, MsgType.ERROR)); } sampleNeedsVolumeScaling = false; } // try to fill on effect column if (sampleNeedsVolumeScaling && isEffectCommandUsed == false) { byte[] values = new byte[2]; // transpose possible parseable command from volume to effect columns // G|U|D|J|K|Q|B|R|Y|C try { values = XMExtras.ScaleVolumeFromEffectCommand(sampleVolume); } catch (Exception e) { string errorMessage = string.Format("row {0}, channel {1}: {2}", currentRow, currentChannel, e.Message); OnReportProgress(new EventReportProgressArgs(errorMessage, MsgType.ERROR)); } if ((values[0] + values[1]) > 0) { isEffectCommandUsed = true; xmEffectNumber = values[0]; xmEffectValue = values[1]; } sampleNeedsVolumeScaling = false; } } // if still sample needs scaling a conversion error is thrown if (sampleNeedsVolumeScaling) { // no empty slot free, a log error conversion is thrown string errorMessage = string.Format("row {0}, channel {1}: {2}", currentRow, currentChannel, "Cannot apply scaled volume for this channel due to missing free slots"); OnReportProgress(new EventReportProgressArgs(errorMessage, MsgType.ERROR)); } } // delay column if (trackLineData.Delay != null) { if (isEffectCommandUsed == false) { byte[] values = xmUtils.TransposeDelayToCommandEffect(trackLineData.Delay); isEffectCommandUsed = true; xmEffectNumber = values[0]; xmEffectValue = values[1]; } else { // no empty slot free, a log error conversion is thrown string errorMessage = string.Format("row {0}, channel {1}: {2}", currentRow, currentChannel, "Cannot apply delay for this channel due to missing free slots"); OnReportProgress(new EventReportProgressArgs(errorMessage, MsgType.ERROR)); } } // panning column if (trackLineData.Panning != null) { if (isVolumeCommandUsed == false) { xmVolume = xmUtils.GetVolumeColumnEffectFromPanning(trackLineData.Panning); isPanningCommandUsed = xmVolume > 0; } if (isPanningCommandUsed == false && isEffectCommandUsed == false) { byte[] values = xmUtils.TransposePanningToCommandEffect(trackLineData.Panning); if ((values[0] + values[1]) > 0) { isEffectCommandUsed = true; xmEffectNumber = values[0]; xmEffectValue = values[1]; } } } // apply global commands to the current effect column if (isMasterTrackCommandUsed && !isEffectCommandUsed) { isEffectCommandUsed = true; xmEffectNumber = masterTrackCommand[0]; xmEffectValue = masterTrackCommand[1]; //currentMasterTrackColumnToParse++; isMasterTrackCommandUsed = false; } // xm volume column binary switch if (isPanningCommandUsed || isVolumeCommandUsed) { compressionValue = (byte)(compressionValue + volumeColBit); } if (xmEffectNumber > 0) { compressionValue = (byte)(compressionValue + effectTypeBit); } if (xmEffectValue > 0) { compressionValue = (byte)(compressionValue + effectParamBit); } // this might require a little explanation. // row/track data, wherever is not completely filled with note, instrument, volume col, // effect type, effect value // is packaged in this way: // the first byte means the type of values, and from second the values to put. // an empty row/track data byte is byte value 128 // the values order, starting from the less significant bit are // // 1 1 1 1 1 // note instrument volume col effect type effect value // // so, for example, a value of 26 in bit is 11010 that means note, instrument and effect type filled. // Therefore the first byte will be 154 (128 + 26) // See the specs for a better idea // writes the package byte only if not all values are valorized if (compressionValue != allValuesFilledBit) { patternDataStream.WriteByte(compressionValue); } // checks for every bit of package byte to understand which values has to store in if (((compressionValue & 0x1)) > 0) { patternDataStream.WriteByte(xmNote); } if (((compressionValue & 0x3) >> 1) > 0) { patternDataStream.WriteByte(xmInstrument); } if (((compressionValue & 0x7) >> 2) > 0) { patternDataStream.WriteByte(xmVolume); } if (((compressionValue & 0xf) >> 3) > 0) { patternDataStream.WriteByte(xmEffectNumber); } if (((compressionValue & 0x1f) >> 4) > 0) { patternDataStream.WriteByte(xmEffectValue); } } else { patternDataStream.WriteByte(emptyBit); } } return(patternDataStream.ToArray()); }
private byte[] GetSampleHeaderData(SampleData sampleData, int baseNote, int fineTune, int sampleLen, byte bitsPerSample, int chans, int sampleRate) { MemoryStream stream = new MemoryStream(); BinaryWriter writer = new BinaryWriter(stream); const byte deltaPackedSample = 0; const int maxNameLen = 22; bool isStereo = chans > 1; //int fineTune = 0; //int relNoteNumber = 0; //ModCommons.GetRelNoteAndFTuneProperties(sampleData.RelNoteNumber, sampleData.FineTune, sampleRate, out relNoteNumber, out fineTune); int offset; offset = 0; writer.Seek(offset, SeekOrigin.Begin); writer.Write(Utility.MakeByte4FromInt(sampleLen)); offset = 4; writer.Seek(offset, SeekOrigin.Begin); writer.Write(Utility.MakeByte4FromInt(XMUtils.GetSampleLoopValue(sampleData.LoopStart, bitsPerSample, isStereo))); offset = 8; writer.Seek(offset, SeekOrigin.Begin); // Loop End value is relative to LoopStart writer.Write(Utility.MakeByte4FromInt(XMUtils.GetSampleLoopValue((sampleData.LoopEnd - sampleData.LoopStart), bitsPerSample, isStereo))); offset = 12; writer.Seek(offset, SeekOrigin.Begin); writer.Write((byte)sampleData.DefaultVolume); offset = 13; writer.Seek(offset, SeekOrigin.Begin); writer.Write((sbyte)fineTune); offset = 14; writer.Seek(offset, SeekOrigin.Begin); byte loopMode = XMUtils.GetSampleLoopMode(sampleData.LoopMode); byte type = (byte)(loopMode + (bitsPerSample) + (isStereo ? 0x20 : 0)); writer.Write(type); offset = 15; writer.Seek(offset, SeekOrigin.Begin); writer.Write(XMUtils.GetPanning(sampleData.Panning)); offset = 16; writer.Seek(offset, SeekOrigin.Begin); writer.Write((sbyte)baseNote); //writer.Write((sbyte)XMUtil.GetRelNoteNumber(sampleData.RelNoteNumber, sampleRate, relNoteNumber, fineTune)); offset = 17; writer.Seek(offset, SeekOrigin.Begin); writer.Write(deltaPackedSample); offset = 18; writer.Seek(offset, SeekOrigin.Begin); writer.Write(Utility.GetBytesFromString(sampleData.Name, maxNameLen)); writer.Seek(0, SeekOrigin.Begin); return(Utility.GetBytesFromStream(stream, stream.Length)); }