public AudioInfo Decode(System.IO.Stream stream, float targetBitrate, string fileExt) { var length = stream.Length; byte[] source = new byte[length]; // read the file into the buffer stream.Read(source, 0, (int)length); // now create a pinned handle, so that the Garbage Collector will not move this object var _hGCFile = GCHandle.Alloc(source, GCHandleType.Pinned); try { var buffer = ReadMonoFromStream(_hGCFile.AddrOfPinnedObject(), source.Length, (int) targetBitrate, -1, 0); var result = new AudioInfo(); result.Samples = new Samples() { Values = buffer, Bitrate = (int)targetBitrate }; return result; } catch(Exception E) { Logger.WarnException("Audio decoding exception.", E); return null; } finally { _hGCFile.Free(); } }
public void Process(Audio item, AudioInfo info) { if (item == null) { throw new ArgumentNullException("item"); } SHA1HashDescriptor Descriptor = new SHA1HashDescriptor(item); item.Data.Add(Descriptor); }
public void Process(Audio item, AudioInfo info) { var tempogram = new Tempogram(); var s = info.Samples; s = new EnvelopeProcessor(factory).Build(info.Samples, 32, false); var s2 = new Samples() { Values = new float[s.Values.Length], Bitrate = s.Bitrate }; var intensity = 0; for (int i = 0; i < s.Values.Length - 1; i++) { var d = s.Values[i + 1] - s.Values[i]; var dd = d > 0 ? d : 0; s.Values[i] = dd; s2.Values[i] = d; if (d > minAmplitudeChangeForIntensityRate) intensity++; } s.Values[s.Values.Length - 1] = 0; s2.Values[s.Values.Length - 1] = 0; var time = s.Values.Length / s.Bitrate;//time of sound var maxShift = (int)(s.Values.Length * (maxRithmDuration / time)); var autoCorr1 = AutoCorr(s.Values, maxShift, 5); var autoCorr2 = AutoCorr(s2.Values, maxShift, 2); var l = (float)autoCorr1.Length; var k = Math.Log(2); var list1 = new List<KeyValuePair<float, float>>(); var list2 = new List<KeyValuePair<float, float>>(); for (int i = 0; i < l; i++) { var j = i / (float)l; j = (float)(Math.Log(j + 1) / k); list1.Add(new KeyValuePair<float, float>(j, autoCorr1[i])); var v = autoCorr2[i]; list2.Add(new KeyValuePair<float, float>(j, v > 0 ? v : 0)); } tempogram.LongTempogram.Build(list1); tempogram.ShortTempogram.Build(list2); tempogram.Intensity = (float)intensity / time; CalcTempo(tempogram); //save to audio item item.Data.Add(tempogram); }
public virtual void Process(Audio item, AudioInfo info) { //build amplitude envelope var s = Build(info.Samples); //resample var resampler = Factory.CreateResampler(); var resampled = resampler.Resample(s, info.Samples.Bitrate * ((float)EnvelopeLength / info.Samples.Values.Length)); //build packed array var envelope = new Envelope(resampled); //save into audio item item.Data.Add(envelope); //build volumeDescriptor var volDesc = new VolumeDescriptor(); volDesc.Build(s.Values); item.Data.Add(volDesc); }
/// <summary> /// Gets audio from queue and process it /// </summary> protected virtual void Process(IAudioDecoder decoder) { int counter = 0; Audio item; while ((item = GetItemFromQueue()) != null) { try { counter++; //decode audio source to samples and mp3 tags extracting AudioInfo info = null; using (var stream = item.GetSourceStream()) info = decoder.Decode(stream, TargetBitrate, item.GetSourceExtension()); //normalize volume level info.Samples.Normalize(); //launch sample processors foreach (var processor in factory.CreateSampleProcessors()) { try { processor.Process(item, info); } catch (Exception E) { Logger.WarnException("Audio processor exception.", E); } } OnProgress(new ProgressChangedEventArgs(100 * (itemsCount - sourceQueue.Count) / itemsCount, null)); item.State = AudioState.Processed; } catch (Exception E) { Logger.ErrorException("Audio processing failed.", E); item.State = AudioState.Bad; } } }