/// <summary> /// /// </summary> /// <param name="input"></param> /// <returns></returns> public static int GetBassStream(SampleStreamInfo input) { int handle; Stream stream = input.Stream; stream.Seek(0, System.IO.SeekOrigin.Begin); byte[] buffer = Utility.GetBytesFromStream(stream, stream.Length); long length = buffer.Length; GCHandle _hGCFile; // now create a pinned handle, so that the Garbage Collector will not move this object _hGCFile = GCHandle.Alloc(buffer, GCHandleType.Pinned); Func <IntPtr, long, long, BASSFlag, int> actionStreamCreateFile; switch (input.Format) { case FORMAT.WAV: case FORMAT.AIFF: case FORMAT.MP3: case FORMAT.OGG: actionStreamCreateFile = Bass.BASS_StreamCreateFile; break; case FORMAT.FLAC: actionStreamCreateFile = BassFlac.BASS_FLAC_StreamCreateFile; break; case FORMAT.AAC: //actionStreamCreateFile = BassAac.BASS_AAC_StreamCreateFile; throw new NotImplementedException("AAC extension not supported"); default: throw new NotImplementedException("Sample sxtension is not supported"); } handle = actionStreamCreateFile(_hGCFile.AddrOfPinnedObject(), 0L, length, BASSFlag.BASS_STREAM_DECODE); return(handle); }
/// <summary> /// return sample stream with audio format info /// </summary> /// <param name="instrumentIndex"></param> /// <param name="sampleIndex"></param> /// <returns></returns> public SampleStreamInfo GetSampleStreamInfo(int instrumentIndex, int sampleIndex) { SampleStreamInfo output = new SampleStreamInfo(); Stream outputStream = null; string captureSampleRegExpr = String.Format(@"SampleData/Instrument{0}.*/Sample{1}.*\.(wav|aiff?|ogg|flac|mp3|aac)$", instrumentIndex.ToString("00", CultureInfo.InvariantCulture), sampleIndex.ToString("00", CultureInfo.InvariantCulture) ); Regex regPattern = new Regex(captureSampleRegExpr, RegexOptions.IgnoreCase); ZipFile zipFile = null; try { zipFile = new ZipFile(filename); string sampleFilename = null; foreach (ZipEntry zip in zipFile) { Match matchInst = regPattern.Match(zip.Name); sampleFilename = matchInst.Value; if (matchInst.Success) { break; } } if (!String.IsNullOrEmpty(sampleFilename)) { ZipEntry zipEntry = zipFile.GetEntry(sampleFilename); outputStream = new MemoryStream(); using (StreamReader stream = new StreamReader(zipFile.GetInputStream(zipEntry))) { if (stream != null) { stream.BaseStream.CopyTo(outputStream); output.Stream = outputStream; string extension = Path.GetExtension(sampleFilename).Substring(1); if (extension.Equals("aif", StringComparison.InvariantCultureIgnoreCase)) { extension = "aiff"; } output.Format = (FORMAT)Enum.Parse(typeof(FORMAT), extension.ToUpper()); } } } else { // Sample not found! System.Diagnostics.Debug.WriteLine("No sample caught!"); } } catch (Exception) { throw; } finally { if (zipFile != null) { zipFile.Close(); } } return(output); }
/* * This take all sample data (sample info + data) * info part is stored from index 0 to 929 * data starts from 930 * */ private byte[] GetAllSamplesData(InstrumentData[] instruments) { const int sampleInfoSize = 930; /* * Informations about frequency is based on http://www.pouet.net/topic.php?which=8628 * For a PAL machine: * SampleRate = 7093789.2 / (Period * 2) * C2 428 -> 8287.13691 Hz * C3 214 -> 16574.2738 Hz * For a NTSC machine: * SampleRate = 7159090 / (Period * 2) * C2 428 -> 8363.423 Hz * C3 214 -> 16726.846 Hz */ const float PalFreq = 7093789.2f; const float NtscFreq = 7159090.0f; float SysFreq = Settings.NtscMode ? NtscFreq : PalFreq; int noteIndexMax = (Settings.ForceProTrackerCompatibility == PROTRACKER_COMPATIBILITY_MODE.A3MAX) ? ModUtils.NOTE_VALUE_A3 : ModUtils.NOTE_VALUE_B3; const int maxSampleLengthMOD = 65536; int totalInstruments = instruments.Length; int offset; byte[] sampleInfo = new byte[sampleInfoSize]; byte[] allSampleData; //XrnsReaderUtil xrnsReader = new XrnsReaderUtil(srcFileName); //XrnsManager xrnsManager = new XrnsManager(); MemoryStream ms4SampleData = new MemoryStream(); if (totalInstruments > maxInstruments) { totalInstruments = maxInstruments; } offset = 0; // initialize end loop values of all samples with value 1 (avoid crashes in Protracker) for (int i = 0; i < sampleInfo.Length; i += 30) { sampleInfo[i + 29] = 1; } for (int ci = 0; ci < totalInstruments; ci++) { OnReportProgress(new EventReportProgressArgs(String.Format("Processing Sample {0}/{1} - {2}", (ci + 1), totalInstruments, instruments[ci].Name))); if (instruments[ci].Samples.Length > 1) { OnReportProgress(new EventReportProgressArgs(String.Format("More samples detected on instrument {0}", (ci + 1)), MsgType.ERROR)); } byte[] sampleData = new byte[0]; try { SampleStreamInfo originalSample = xrnsManager.GetSampleStreamInfo(ci, 0); //Stream originalSample = xrnsReader.GetInstrumentSample(ci, 0); // means sample is probably empty if (originalSample.Format != FORMAT.NONE) { int handle = BassWrapper.GetBassStream(originalSample); BASS_CHANNELINFO bassChannelInfo = BassWrapper.GetBassChannelInfo(handle); int origres = bassChannelInfo.origres; if (origres == 0) // some streams were reported to return undefinied resolution { OnReportProgress(new EventReportProgressArgs("Sample bps detection failed, assuming 8 bits by default", MsgType.WARNING)); origres = 8; } long sampleLength = Bass.BASS_ChannelGetLength(handle); // samplerate may be: // 1) same as original // 2) taken from song settings int sampleRate; string freqFromIniStr = instruments[ci].Samples[0].SampleFreq; int freqFromIni = 0; int noteIndex = 0; int period = 0; if (freqFromIniStr == null) //the default case is C2 if no settings are provided { noteIndex = ModUtils.NOTE_VALUE_C2; } else if (freqFromIniStr.Equals("Low")) { noteIndex = ModUtils.NOTE_VALUE_C2; } else if (freqFromIniStr.Equals("High")) { noteIndex = ModUtils.NOTE_VALUE_C3; } else if (freqFromIniStr.Equals("Maximum") || freqFromIniStr.Equals("Max")) { noteIndex = noteIndexMax; } else if (freqFromIniStr.Equals("Original")) { freqFromIni = bassChannelInfo.freq; } else { if (freqFromIniStr.Length == 3) { period = modUtils.GetModNote(freqFromIniStr); } else { freqFromIni = int.Parse(freqFromIniStr); } } if (freqFromIni > 0) { sampleRate = freqFromIni; OnReportProgress(new EventReportProgressArgs(String.Format("Sample {0} frequency manually adjusted to: {1} Hz", (ci + 1), sampleRate), MsgType.INFO)); } else if (noteIndex > 0) { sampleRate = (int)Math.Round(SysFreq / (ModUtils.PeriodsRange[noteIndex] * 2)); OnReportProgress(new EventReportProgressArgs(String.Format("Sample {0} frequency manually adjusted from {2} to: {1} Hz", (ci + 1), sampleRate, freqFromIniStr), MsgType.INFO)); } else if (period > 0) { sampleRate = (int)Math.Round(SysFreq / ((float)period * 2.0f)); OnReportProgress(new EventReportProgressArgs(String.Format("Sample {0} frequency manually adjusted from note value {2} to: {1} Hz", (ci + 1), sampleRate, freqFromIniStr), MsgType.INFO)); } else { noteIndex = ModUtils.NOTE_VALUE_C2; sampleRate = (int)Math.Round(SysFreq / (ModUtils.PeriodsRange[noteIndex] * 2)); OnReportProgress(new EventReportProgressArgs(String.Format("Sample {0} frequency defaults to C2 frequency {1} Hz", (ci + 1), sampleRate), MsgType.INFO)); } float ret = 0; Bass.BASS_ChannelGetAttribute(handle, BASSAttribute.BASS_ATTRIB_SRC, ref ret); //Console.WriteLine("BASS_ATTRIB_SRC " + ret); int sincPoints = instruments[ci].Samples[0].SincInterpolationPoints; if ((int)ret != sincPoints) { OnReportProgress(new EventReportProgressArgs(String.Format("Altering number of Sinc Interpolation Points to {0}", sincPoints))); Bass.BASS_ChannelSetAttribute(handle, BASSAttribute.BASS_ATTRIB_SRC, (float)sincPoints); Bass.BASS_ChannelGetAttribute(handle, BASSAttribute.BASS_ATTRIB_SRC, ref ret); //Console.WriteLine("BASS_ATTRIB_SRC_NEW " + ret); if ((int)ret != sincPoints) { throw new ApplicationException("Failed to set number of Sinc Interpolation Points"); } } int mixer = BassWrapper.PlugChannelToMixer(handle, sampleRate, 1, 8); if (Settings.VolumeScalingMode == VOLUME_SCALING_MODE.SAMPLE && instruments[ci].Samples[0].Volume != 1.0f) { OnReportProgress(new EventReportProgressArgs(String.Format("Ramping sample volume to value {0}", instruments[ci].Samples[0].Volume))); BassWrapper.AdjustSampleVolume(handle, mixer, instruments[ci].Samples[0].Volume); } Stream stream = BassWrapper.GetModEncodedSample(mixer, sampleLength, Settings.ForceProTrackerCompatibility); Bass.BASS_StreamFree(handle); Bass.BASS_StreamFree(mixer); int originalChans = bassChannelInfo.chans; int originalBps = origres; modUtils.StoreSampleInfo(ci, (int)sampleLength, (int)stream.Length, sampleRate, originalChans, originalBps, instruments[ci].Samples[0].RelNoteNumber, instruments[ci].Samples[0].FineTune, instruments[ci].Samples[0].Transpose); if (stream.Length > maxSampleLengthMOD) { throw new ApplicationException(String.Format("Sample number {0} is too large: max size for mod is {1}. Current length is {2}", (ci + 1), maxSampleLengthMOD, stream.Length)); } // sample data will be stored only if sample doesn't exceed length of 65536 bytes sampleData = Utility.GetBytesFromStream(stream, stream.Length); stream.Close(); } } catch (Exception e) { OnReportProgress(new EventReportProgressArgs(e.Message, MsgType.ERROR)); } if (instruments[ci].Samples.Length > 0) { Array.Copy(Utility.GetBytesFromString(instruments[ci].Name, 22), 0, sampleInfo, offset, 22); offset += 22; // stored as a word number in big endian Utility.PutInt2InByteArray((sampleData.Length / 2), true, sampleInfo, offset); offset += 2; // for any doubt just see in mod specs how fineTune is stored sampleInfo[offset++] = (byte)(modUtils.GetSampleFineTune(ci) >> 4 & 0x0F); // default volume sampleInfo[offset++] = instruments[ci].Samples[0].DefaultVolume; //if (ModUtil.IsLoopSample(instruments[ci].Samples[0].LoopMode)) if (sampleData.Length > 0 && instruments[ci].Samples[0].LoopMode.Equals("Off", StringComparison.OrdinalIgnoreCase) == false) { Utility.PutInt2InByteArray(modUtils.GetLoopValue(instruments[ci].Samples[0].LoopStart, ci), true, sampleInfo, offset); offset += 2; Utility.PutInt2InByteArray(modUtils.GetLoopValue(instruments[ci].Samples[0].LoopEnd - instruments[ci].Samples[0].LoopStart, ci), true, sampleInfo, offset); offset += 2; } else { offset += 4; } ms4SampleData.Write(sampleData, 0, sampleData.Length); } else { offset += 30; } } //xrnsReader.FreeResources(); ms4SampleData.Seek(0, SeekOrigin.Begin); byte[] sampleChunkData = Utility.GetBytesFromStream(ms4SampleData, ms4SampleData.Length); allSampleData = new byte[sampleInfoSize + sampleChunkData.Length]; Array.Copy(sampleInfo, 0, allSampleData, 0, sampleInfo.Length); Array.Copy(sampleChunkData, 0, allSampleData, sampleInfoSize, sampleChunkData.Length); return(allSampleData); }
private byte[] GetAllInstrumentsData(InstrumentData[] instrumentsData) { MemoryStream outputStream = new MemoryStream(); //XrnsReaderUtil xrnsReader = new XrnsReaderUtil(srcFileName); for (int ci = 0; ci < instrumentsData.Length; ci++) { byte[] instrumentHeader = this.GetInstrumentHeaderData(instrumentsData[ci]); outputStream.Write(instrumentHeader, 0, instrumentHeader.Length); Stream[] encodedSample = new Stream[instrumentsData[ci].Samples.Length]; for (int si = 0; si < instrumentsData[ci].Samples.Length; si++) { OnReportProgress(new EventReportProgressArgs(String.Format("Processing instrument {0}/{1}, sample {2}/{3} ", (ci + 1), instrumentsData.Length, (si + 1), instrumentsData[ci].Samples.Length))); byte[] sampleHeaderBuffer = new byte[0]; byte bps = 8; int chans = 1; int sampleRate = 0; int sampleLength = 0; int baseNote = 0; int fineTune = 0; try { //Stream originalSample = xrnsManager.GetSampleStream(ci, si); SampleStreamInfo sampleStreamInfo = xrnsManager.GetSampleStreamInfo(ci, si); // means sample is probably empty if (sampleStreamInfo.Format != FORMAT.NONE) { int handle = BassWrapper.GetBassStream(sampleStreamInfo); BASS_CHANNELINFO bassChannelInfo = BassWrapper.GetBassChannelInfo(handle); int origres = bassChannelInfo.origres; if (origres == 0) // some streams were reported to return undefinied resolution { OnReportProgress(new EventReportProgressArgs("Sample bps detection failed, assuming 16 bits by default", MsgType.WARNING)); origres = 16; } long originalSampleLength = Bass.BASS_ChannelGetLength(handle); int mixer = BassWrapper.PlugChannelToMixer(handle, bassChannelInfo.freq, bassChannelInfo.chans, origres); if (Settings.VolumeScalingMode == VOLUME_SCALING_MODE.SAMPLE && instrumentsData[ci].Samples[si].Volume != 1.0f) { OnReportProgress(new EventReportProgressArgs(String.Format("Ramping sample volume to value {0}", instrumentsData[ci].Samples[si].Volume))); BassWrapper.AdjustSampleVolume(handle, mixer, instrumentsData[ci].Samples[si].Volume); } Stream stream = BassWrapper.GetXMEncodedSample(mixer, originalSampleLength, bassChannelInfo.chans, origres); Bass.BASS_StreamFree(handle); Bass.BASS_StreamFree(mixer); encodedSample[si] = stream; bps = (byte)(origres > 8 ? 16 : 8); chans = bassChannelInfo.chans; sampleRate = bassChannelInfo.freq; sampleLength = (int)encodedSample[si].Length; xmUtils.StoreSampleInfo(ci, si, sampleLength, sampleRate, chans, bps, instrumentsData[ci].Samples[si].RelNoteNumber, instrumentsData[ci].Samples[si].FineTune, instrumentsData[ci].Samples[si].Transpose); baseNote = xmUtils.GetSampleBaseNote(ci, si); fineTune = xmUtils.GetSampleFineTune(ci, si); } } catch (Exception e) { OnReportProgress(new EventReportProgressArgs(e.Message, MsgType.ERROR)); throw e; } sampleHeaderBuffer = GetSampleHeaderData(instrumentsData[ci].Samples[si], baseNote, fineTune, sampleLength, bps, chans, sampleRate); outputStream.Write(sampleHeaderBuffer, 0, sampleHeaderBuffer.Length); } for (int si = 0; si < encodedSample.Length; si++) { if (encodedSample[si] != null) { encodedSample[si].Seek(0, SeekOrigin.Begin); byte[] encodedSampleBuffer = Utility.GetBytesFromStream(encodedSample[si], encodedSample[si].Length); outputStream.Write(encodedSampleBuffer, 0, encodedSampleBuffer.Length); } } } //xrnsReader.FreeResources(); return(outputStream.ToArray()); }