public AudioTrackBass(Stream data, bool quick = false) { Preview = quick; BassFlags flags = Preview ? 0 : (BassFlags.Decode | BassFlags.Prescan); if (data == null) throw new ArgumentNullException(@"Data couldn't be loaded!"); //encapsulate incoming stream with async buffer if it isn't already. dataStream = data as AsyncBufferStream ?? new AsyncBufferStream(data, quick ? 8 : -1); procs = new DataStreamFileProcedures(dataStream); audioStreamPrefilter = Bass.CreateStream(StreamSystem.NoBuffer, flags, procs.BassProcedures, IntPtr.Zero); if (Preview) activeStream = audioStreamPrefilter; else { activeStream = BassFx.TempoCreate(audioStreamPrefilter, BassFlags.Decode); activeStream = BassFx.ReverseCreate(activeStream, 5f, BassFlags.Default); Bass.ChannelSetAttribute(activeStream, ChannelAttribute.TempoUseQuickAlgorithm, 1); Bass.ChannelSetAttribute(activeStream, ChannelAttribute.TempoOverlapMilliseconds, 4); Bass.ChannelSetAttribute(activeStream, ChannelAttribute.TempoSequenceMilliseconds, 30); } Length = (Bass.ChannelBytes2Seconds(activeStream, Bass.ChannelGetLength(activeStream)) * 1000); Bass.ChannelGetAttribute(activeStream, ChannelAttribute.Frequency, out initialFrequency); }
/// <summary> /// Constructs a new <see cref="TrackBass"/> from provided audio data. /// </summary> /// <param name="data">The sample data stream.</param> /// <param name="quick">If true, the track will not be fully loaded, and should only be used for preview purposes. Defaults to false.</param> public TrackBass(Stream data, bool quick = false) { EnqueueAction(() => { Preview = quick; if (data == null) { throw new ArgumentNullException(nameof(data)); } //encapsulate incoming stream with async buffer if it isn't already. dataStream = data as AsyncBufferStream ?? new AsyncBufferStream(data, quick ? 8 : -1); procedures = CreateDataStreamFileProcedures(dataStream); if (!RuntimeInfo.SupportsIL) { pinnedProcedures = GCHandle.Alloc(procedures, GCHandleType.Pinned); } BassFlags flags = Preview ? 0 : BassFlags.Decode | BassFlags.Prescan | BassFlags.Float; activeStream = Bass.CreateStream(StreamSystem.NoBuffer, flags, procedures.BassProcedures, RuntimeInfo.SupportsIL ? IntPtr.Zero : GCHandle.ToIntPtr(pinnedProcedures)); if (!Preview) { // We assign the BassFlags.Decode streams to the device "bass_nodevice" to prevent them from getting // cleaned up during a Bass.Free call. This is necessary for seamless switching between audio devices. // Further, we provide the flag BassFlags.FxFreeSource such that freeing the activeStream also frees // all parent decoding streams. const int bass_nodevice = 0x20000; Bass.ChannelSetDevice(activeStream, bass_nodevice); tempoAdjustStream = BassFx.TempoCreate(activeStream, BassFlags.Decode | BassFlags.FxFreeSource); Bass.ChannelSetDevice(activeStream, bass_nodevice); activeStream = BassFx.ReverseCreate(tempoAdjustStream, 5f, BassFlags.Default | BassFlags.FxFreeSource); Bass.ChannelSetAttribute(activeStream, ChannelAttribute.TempoUseQuickAlgorithm, 1); Bass.ChannelSetAttribute(activeStream, ChannelAttribute.TempoOverlapMilliseconds, 4); Bass.ChannelSetAttribute(activeStream, ChannelAttribute.TempoSequenceMilliseconds, 30); } // will be -1 in case of an error double seconds = Bass.ChannelBytes2Seconds(activeStream, Bass.ChannelGetLength(activeStream)); bool success = seconds >= 0; if (success) { Length = seconds * 1000; Bass.ChannelGetAttribute(activeStream, ChannelAttribute.Frequency, out float frequency); initialFrequency = frequency; bitrate = (int)Bass.ChannelGetAttribute(activeStream, ChannelAttribute.Bitrate); isLoaded = true; } }); InvalidateState(); }
/// <summary> /// Constructs a new <see cref="Waveform"/> from provided audio data. /// </summary> /// <param name="data">The sample data stream. If null, an empty waveform is constructed.</param> public Waveform(Stream data = null) { if (data == null) { return; } readTask = Task.Run(() => { var procs = new DataStreamFileProcedures(data); int decodeStream = Bass.CreateStream(StreamSystem.NoBuffer, BassFlags.Decode | BassFlags.Float, procs.BassProcedures, IntPtr.Zero); ChannelInfo info; Bass.ChannelGetInfo(decodeStream, out info); long length = Bass.ChannelGetLength(decodeStream); // Each "point" is generated from a number of samples, each sample contains a number of channels int sampleDataPerPoint = (int)(info.Frequency * resolution * info.Channels); points.Capacity = (int)(length / sampleDataPerPoint); int bytesPerIteration = sampleDataPerPoint * points_per_iteration; var dataBuffer = new float[bytesPerIteration / bytes_per_sample]; while (length > 0) { length = Bass.ChannelGetData(decodeStream, dataBuffer, bytesPerIteration); int samplesRead = (int)(length / bytes_per_sample); // Process a sequence of samples for each point for (int i = 0; i < samplesRead; i += sampleDataPerPoint) { // Process each sample in the sequence var point = new WaveformPoint(info.Channels); for (int j = i; j < i + sampleDataPerPoint; j += info.Channels) { // Process each channel in the sample for (int c = 0; c < info.Channels; c++) { point.Amplitude[c] = Math.Max(point.Amplitude[c], Math.Abs(dataBuffer[j + c])); } } for (int c = 0; c < info.Channels; c++) { point.Amplitude[c] = Math.Min(1, point.Amplitude[c]); } points.Add(point); } } channels = info.Channels; }, cancelSource.Token); }
public TrackBass(Stream data, bool quick = false) { PendingActions.Enqueue(() => { Preview = quick; if (data == null) { throw new ArgumentNullException(nameof(data)); } //encapsulate incoming stream with async buffer if it isn't already. dataStream = data as AsyncBufferStream ?? new AsyncBufferStream(data, quick ? 8 : -1); var procs = new DataStreamFileProcedures(dataStream); BassFlags flags = Preview ? 0 : BassFlags.Decode | BassFlags.Prescan; activeStream = Bass.CreateStream(StreamSystem.NoBuffer, flags, procs.BassProcedures, IntPtr.Zero); if (!Preview) { // We assign the BassFlags.Decode streams to the device "bass_nodevice" to prevent them from getting // cleaned up during a Bass.Free call. This is necessary for seamless switching between audio devices. // Further, we provide the flag BassFlags.FxFreeSource such that freeing the activeStream also frees // all parent decoding streams. const int bass_nodevice = 0x20000; Bass.ChannelSetDevice(activeStream, bass_nodevice); tempoAdjustStream = BassFx.TempoCreate(activeStream, BassFlags.Decode | BassFlags.FxFreeSource); Bass.ChannelSetDevice(activeStream, bass_nodevice); activeStream = BassFx.ReverseCreate(tempoAdjustStream, 5f, BassFlags.Default | BassFlags.FxFreeSource); Bass.ChannelSetAttribute(activeStream, ChannelAttribute.TempoUseQuickAlgorithm, 1); Bass.ChannelSetAttribute(activeStream, ChannelAttribute.TempoOverlapMilliseconds, 4); Bass.ChannelSetAttribute(activeStream, ChannelAttribute.TempoSequenceMilliseconds, 30); } Length = Bass.ChannelBytes2Seconds(activeStream, Bass.ChannelGetLength(activeStream)) * 1000; float frequency; Bass.ChannelGetAttribute(activeStream, ChannelAttribute.Frequency, out frequency); initialFrequency = frequency; bitrate = (int)Bass.ChannelGetAttribute(activeStream, ChannelAttribute.Bitrate); isLoaded = true; OnLoaded?.Invoke(this); }); InvalidateState(); }
protected virtual void Dispose(bool disposing) { if (isDisposed) { return; } isDisposed = true; cancelSource?.Cancel(); cancelSource?.Dispose(); points = null; if (pinnedProcedures.IsAllocated) { pinnedProcedures.Free(); } procedures = null; }
protected override void Dispose(bool disposing) { if (activeStream != 0) { isRunning = false; Bass.ChannelStop(activeStream); Bass.StreamFree(activeStream); } activeStream = 0; dataStream?.Dispose(); dataStream = null; if (pinnedProcedures.IsAllocated) { pinnedProcedures.Free(); } procedures = null; base.Dispose(disposing); }
/// <summary> /// Constructs a new <see cref="Waveform"/> from provided audio data. /// </summary> /// <param name="data">The sample data stream. If null, an empty waveform is constructed.</param> public Waveform(Stream data) { if (data == null) { return; } readTask = Task.Run(() => { // for the time being, this code cannot run if there is no bass device available. if (Bass.CurrentDevice <= 0) { return; } procedures = CreateDataStreamFileProcedures(data); if (!RuntimeInfo.SupportsIL) { pinnedProcedures = GCHandle.Alloc(procedures, GCHandleType.Pinned); } int decodeStream = Bass.CreateStream(StreamSystem.NoBuffer, BassFlags.Decode | BassFlags.Float, procedures.BassProcedures, RuntimeInfo.SupportsIL ? IntPtr.Zero : GCHandle.ToIntPtr(pinnedProcedures)); Bass.ChannelGetInfo(decodeStream, out ChannelInfo info); long length = Bass.ChannelGetLength(decodeStream); // Each "point" is generated from a number of samples, each sample contains a number of channels int samplesPerPoint = (int)(info.Frequency * resolution * info.Channels); int bytesPerPoint = samplesPerPoint * bytes_per_sample; points.Capacity = (int)(length / bytesPerPoint); // Each iteration pulls in several samples int bytesPerIteration = bytesPerPoint * points_per_iteration; var sampleBuffer = new float[bytesPerIteration / bytes_per_sample]; // Read sample data while (length > 0) { length = Bass.ChannelGetData(decodeStream, sampleBuffer, bytesPerIteration); int samplesRead = (int)(length / bytes_per_sample); // Each point is composed of multiple samples for (int i = 0; i < samplesRead; i += samplesPerPoint) { // Channels are interleaved in the sample data (data[0] -> channel0, data[1] -> channel1, data[2] -> channel0, etc) // samplesPerPoint assumes this interleaving behaviour var point = new WaveformPoint(info.Channels); for (int j = i; j < i + samplesPerPoint; j += info.Channels) { // Find the maximum amplitude for each channel in the point for (int c = 0; c < info.Channels; c++) { point.Amplitude[c] = Math.Max(point.Amplitude[c], Math.Abs(sampleBuffer[j + c])); } } // BASS may provide unclipped samples, so clip them ourselves for (int c = 0; c < info.Channels; c++) { point.Amplitude[c] = Math.Min(1, point.Amplitude[c]); } points.Add(point); } } Bass.ChannelSetPosition(decodeStream, 0); length = Bass.ChannelGetLength(decodeStream); // Read FFT data float[] bins = new float[fft_bins]; int currentPoint = 0; long currentByte = 0; while (length > 0) { length = Bass.ChannelGetData(decodeStream, bins, (int)fft_samples); currentByte += length; double lowIntensity = computeIntensity(info, bins, low_min, mid_min); double midIntensity = computeIntensity(info, bins, mid_min, high_min); double highIntensity = computeIntensity(info, bins, high_min, high_max); // In general, the FFT function will read more data than the amount of data we have in one point // so we'll be setting intensities for all points whose data fits into the amount read by the FFT // We know that each data point required sampleDataPerPoint amount of data for (; currentPoint < points.Count && currentPoint * bytesPerPoint < currentByte; currentPoint++) { points[currentPoint].LowIntensity = lowIntensity; points[currentPoint].MidIntensity = midIntensity; points[currentPoint].HighIntensity = highIntensity; } } channels = info.Channels; }, cancelSource.Token); }