public static double GetAdjustedAudioLength(double length, decimal sourceBpm, decimal targetBpm) { var sourceLoopLength = BpmHelper.GetDefaultLoopLength(sourceBpm); var repeatsPerLoop = Math.Round(sourceLoopLength / length, 0); return(GetDefaultLoopLength(targetBpm) / repeatsPerLoop); }
/// <summary> /// Saves a shortened version the specified sample as a mono wave in the Scratch folder and launches Scratch /// </summary> /// <param name="sample">The sample.</param> public static void LaunchShort(Sample sample) { if (_applicationFolder == "") { return; } SilenceHelper.GenerateSilenceAudioFile(GetSilenceFilename()); SaveSample(sample); var bpm = BpmHelper.GetBpmFromLoopLength(sample.LengthSeconds); var loopLength = BpmHelper.GetDefaultLoopLength(bpm); var shortLength = loopLength / 8; if (shortLength > sample.LengthSeconds) { shortLength = sample.LengthSeconds; } var gain = GetGain(sample.Filename); var scratchFilePath = Path.Combine(_applicationFolder, ScratchFile); AudioExportHelper.SaveAsMonoWave(sample.Filename, scratchFilePath, shortLength, gain); var scratchExePath = Path.Combine(_applicationFolder, ScratchExe); Process.Start(scratchExePath); }
/// <summary> /// Gets the audio stream tempo change as an audio stream rate /// </summary> /// <param name="track1">The audio stream being fading out</param> /// <param name="track2">The audio stream being faded into.</param> /// <returns>The audio stream rate the first audioStream needs to be changed to in order to match the second audioStream</returns> private static float GetTrackTempoChangeAsSampleRate(Track track1, Track track2) { if (track1 == null || track2 == null) { return(DefaultSampleRate); } return(track1.DefaultSampleRate * BpmHelper.GetTrackTempoChangeAsRatio(track1, track2)); }
/// <summary> /// Gets the track tempo change as a ratio (i.e. 1.02, .97 etc) /// </summary> /// <param name="track1">The track being fading out</param> /// <param name="track2">The track being faded into.</param> /// <returns>The ratio the first track needs to be multiplied by to in order to match the second track</returns> public static float GetTrackTempoChangeAsRatio(Track track1, Track track2) { if (track1 == null || track2 == null) { return(1f); } var percentChange = (float)(BpmHelper.GetAdjustedBpmPercentChange(track1.EndBpm, track2.StartBpm)); return(1 + percentChange / 100f); }
/// <summary> /// Sets the audio stream pitch to match another BPM /// </summary> /// <param name="audioStream">The audio stream to change the temp of.</param> /// <param name="streamBpm">The stream BPM.</param> /// <param name="matchBpm">The match BPM.</param> public static void SetPitchToMatchBpm(AudioStream audioStream, decimal streamBpm, decimal matchBpm) { if (audioStream == null || audioStream.ChannelId == int.MinValue) { return; } var percentChange = (float)(BpmHelper.GetAdjustedBpmPercentChange(streamBpm, matchBpm)); //lock (Lock) { Bass.BASS_ChannelSetAttribute(audioStream.ChannelId, BASSAttribute.BASS_ATTRIB_TEMPO_PITCH, percentChange); Thread.Sleep(1); } }
/// <summary> /// Does an audio stream power down effect asynchronously /// </summary> /// <param name="audioStream">The audio stream.</param> private static void PowerDownAsync(AudioStream audioStream) { if (audioStream == null || !audioStream.IsAudioLoaded()) { return; } var freq = audioStream.DefaultSampleRate; var interval = (int)(BpmHelper.GetDefaultLoopLength(audioStream.Bpm) * 1000) / 128; // set the volume slide //lock (Lock) { Bass.BASS_ChannelSlideAttribute(audioStream.ChannelId, BASSAttribute.BASS_ATTRIB_VOL, 0F, interval * 8); Thread.Sleep(1); } var percentValue = 0.70; while (freq > 100) { percentValue = percentValue / 1.2; interval = (int)(interval * 0.9D); freq = (int)(audioStream.DefaultSampleRate * percentValue); if (freq <= 100 || audioStream.ChannelId == int.MinValue) { continue; } //lock (Lock) { Bass.BASS_ChannelSlideAttribute(audioStream.ChannelId, BASSAttribute.BASS_ATTRIB_FREQ, freq, interval); Thread.Sleep(1); } Thread.Sleep(interval); } Pause(audioStream); if (!audioStream.IsAudioLoaded()) { return; } //lock (Lock) { Bass.BASS_ChannelSetAttribute(audioStream.ChannelId, BASSAttribute.BASS_ATTRIB_FREQ, audioStream.DefaultSampleRate); Thread.Sleep(1); } SetVolume(audioStream, 100M); }
internal static void SetTempoToMatchBpm(int channel, decimal streamBpm, decimal matchBpm) { if (channel == int.MinValue) { return; } var percentChange = (float)(BpmHelper.GetAdjustedBpmPercentChange(streamBpm, matchBpm)); //lock (Lock) { Bass.BASS_ChannelSetAttribute(channel, BASSAttribute.BASS_ATTRIB_TEMPO, percentChange); Thread.Sleep(1); Bass.BASS_ChannelSetAttribute(channel, BASSAttribute.BASS_ATTRIB_TEMPO_OPTION_SEQUENCE_MS, matchBpm > streamBpm ? 20 : 82); Thread.Sleep(1); } }
/// <summary> /// Sets the audio stream tempo to match another audio stream's tempo /// </summary> /// <param name="changeTrack">The audio stream to change the temp of.</param> /// <param name="matchTrack">The audio stream to match the BPM of</param> public static void SetTrackTempoToMatchAnotherTrack(Track changeTrack, Track matchTrack) { if (changeTrack == null || matchTrack == null) { return; } if (!changeTrack.IsAudioLoaded()) { return; } if (!matchTrack.IsAudioLoaded()) { return; } var percentChange = (float)(BpmHelper.GetAdjustedBpmPercentChange(changeTrack.EndBpm, matchTrack.StartBpm)); //lock (Lock) { Bass.BASS_ChannelSetAttribute(changeTrack.ChannelId, BASSAttribute.BASS_ATTRIB_TEMPO, percentChange); Thread.Sleep(1); } }
public static TagDetails LoadTags(string filename) { var extension = Path.GetExtension(filename); if (extension == null) { return(null); } if (extension.ToLower() != ".mp3") { return(null); } lock (_tagDetails) { if (_tagDetails.ContainsKey(filename)) { return(_tagDetails[filename]); } } var tags = BassTags.BASS_TAG_GetFromFile(filename); if (tags == null) { throw new Exception("Cannot load tags for file " + filename); } var tagDetails = new TagDetails { Title = tags.title, Artist = tags.artist, Album = tags.album, AlbumArtist = tags.albumartist, Genre = tags.genre, Gain = tags.replaygain_track_peak }; var key = tags.NativeTag("InitialKey"); if (key != "") { tagDetails.Key = key; } decimal bpm; if (decimal.TryParse(tags.bpm, out bpm)) { tagDetails.Bpm = BpmHelper.NormaliseBpm(bpm); } var duration = TimeSpan.FromSeconds(tags.duration); if (duration.TotalMilliseconds != 0) { tagDetails.Length = (decimal)duration.TotalMilliseconds / 1000; } int trackNumber; var trackNumberTag = (tags.track + "/").Split('/')[0].Trim(); if (int.TryParse(trackNumberTag, out trackNumber)) { tagDetails.TrackNumber = trackNumber; } if (tagDetails.AlbumArtist == "") { tagDetails.AlbumArtist = tagDetails.Artist; } if (tagDetails.Title.Contains("/")) { var data = tagDetails.Title.Split('/').ToList(); tagDetails.Artist = data[0].Trim(); tagDetails.Title = data[1].Trim(); } lock (_tagDetails) { if (!_tagDetails.ContainsKey(filename)) { _tagDetails.Add(filename, tagDetails); } } return(tagDetails); }
/// <summary> /// Saves the partial as wave. /// </summary> /// <param name="inFilename">The in filename.</param> /// <param name="outFilename">The out filename.</param> /// <param name="start">The start position in seconds.</param> /// <param name="length">The length in seconds.</param> /// <param name="offset">The offset position in seconds.</param> /// <param name="gain">The gain.</param> /// <param name="bpm">The BPM.</param> /// <param name="targetBpm">The target BPM.</param> public static void SavePartialAsWave(string inFilename, string outFilename, double start, double length, double offset = 0, float gain = 0, decimal bpm = 0, decimal targetBpm = 0) { // DebugHelper.WriteLine("Saving portion of track as wave with offset - " + inFilename); var audioStream = new Sample { Filename = inFilename, Description = inFilename, Gain = gain, Bpm = bpm }; AudioStreamHelper.LoadAudio(audioStream); if (targetBpm != 0) { if (bpm == 0) { bpm = BpmHelper.GetBpmFromLoopLength(length); } var percentChange = BpmHelper.GetAdjustedBpmPercentChange(bpm, targetBpm) / 100; AudioStreamHelper.SetTempoToMatchBpm(audioStream.ChannelId, bpm, targetBpm); length = length * (double)(1 + percentChange); } const BASSEncode flags = BASSEncode.BASS_ENCODE_PCM; BassEnc.BASS_Encode_Start(audioStream.ChannelId, outFilename, flags, null, IntPtr.Zero); var startByte = Bass.BASS_ChannelSeconds2Bytes(audioStream.ChannelId, start); var endByte = Bass.BASS_ChannelSeconds2Bytes(audioStream.ChannelId, start + length); if (offset == 0 || offset == start) { TransferBytes(audioStream.ChannelId, startByte, endByte); } else { startByte = Bass.BASS_ChannelSeconds2Bytes(audioStream.ChannelId, offset); TransferBytes(audioStream.ChannelId, startByte, endByte); startByte = Bass.BASS_ChannelSeconds2Bytes(audioStream.ChannelId, start); endByte = Bass.BASS_ChannelSeconds2Bytes(audioStream.ChannelId, offset); TransferBytes(audioStream.ChannelId, startByte, endByte); } BassEnc.BASS_Encode_Stop(audioStream.ChannelId); Bass.BASS_StreamFree(audioStream.ChannelId); AudioStreamHelper.UnloadAudio(audioStream); }
/// <summary> /// Loads any attributes stored in a the track comment tag. /// </summary> /// <param name="track">The track.</param> public static void LoadExtendedAttributes(Track track) { if (track == null) { return; } if (track.Artist == "" || track.Title == "") { return; } // DebugHelper.WriteLine("Loading Extended Attributes " + track.Description); var attributes = GetExtendedAttributes(track.Description); if (attributes.ContainsKey("FadeIn")) { track.FadeInStart = track.SecondsToSamples(ConversionHelper.ToDouble(attributes["FadeIn"])); } if (attributes.ContainsKey("FadeOut")) { track.FadeOutStart = track.SecondsToSamples(ConversionHelper.ToDouble(attributes["FadeOut"])); } if (attributes.ContainsKey("BPMAdjust")) { track.BpmAdjustmentRatio = ConversionHelper.ToDecimal(attributes["BPMAdjust"]); } if (attributes.ContainsKey("FadeInLengthInSeconds")) { track.FadeInEnd = track.FadeInStart + track.SecondsToSamples(ConversionHelper.ToDouble(attributes["FadeInLengthInSeconds"])); } if (attributes.ContainsKey("FadeOutLengthInSeconds")) { track.FadeOutEnd = track.FadeOutStart + track.SecondsToSamples(ConversionHelper.ToDouble(attributes["FadeOutLengthInSeconds"])); } if (attributes.ContainsKey("PreFadeInStartVolume")) { track.PreFadeInStartVolume = ConversionHelper.ToFloat(attributes["PreFadeInStartVolume"]) / 100; track.UsePreFadeIn = true; } if (attributes.ContainsKey("PreFadeInPosition")) { track.PreFadeInStart = track.SecondsToSamples(ConversionHelper.ToDouble(attributes["PreFadeInPosition"])); track.UsePreFadeIn = true; } if (attributes.ContainsKey("PreFadeInStart")) { track.PreFadeInStart = track.SecondsToSamples(ConversionHelper.ToDouble(attributes["PreFadeInStart"])); track.UsePreFadeIn = true; } if (attributes.ContainsKey("StartBPM")) { track.StartBpm = BpmHelper.NormaliseBpm(ConversionHelper.ToDecimal(attributes["StartBPM"])); } if (attributes.ContainsKey("EndBPM")) { track.EndBpm = BpmHelper.NormaliseBpm(ConversionHelper.ToDecimal(attributes["EndBPM"])); } if (attributes.ContainsKey("Duration")) { if (track.Length == 0) { track.Length = (long)(ConversionHelper.ToDouble(attributes["Duration"]) * 1000); } } if (attributes.ContainsKey("PowerDown")) { track.PowerDownOnEnd = ConversionHelper.ToBoolean(attributes["PowerDown"]); track.PowerDownOnEndOriginal = track.PowerDownOnEnd; } if (attributes.ContainsKey("StartLoopCount")) { track.StartLoopCount = ConversionHelper.ToInt(attributes["StartLoopCount"]); } if (attributes.ContainsKey("EndLoopCount")) { track.EndLoopCount = ConversionHelper.ToInt(attributes["EndLoopCount"]); } if (attributes.ContainsKey("SkipStart")) { track.SkipStart = track.SecondsToSamples(ConversionHelper.ToDouble(attributes["SkipStart"])); } if (attributes.ContainsKey("SkipLengthInSeconds")) { track.SkipEnd = track.SkipStart + track.SecondsToSamples(ConversionHelper.ToDouble(attributes["SkipLengthInSeconds"])); } if (attributes.ContainsKey("Rank")) { track.Rank = ConversionHelper.ToInt(attributes["Rank"], 1); } if (attributes.ContainsKey("Key")) { track.Key = attributes["Key"]; } }