public FileTransfer.FileStreamAssembler MergeAsStereoWavAssembler(AudioStream other)
        {
            uint sampleRate = 8000;

            FileTransfer.WavFileAssembler mergedAssembler = new FileTransfer.WavFileAssembler("MergedAudioStreams-" + FiveTuple.GetHashCode() + ".wav", this.fileStreamAssemblerList, this.FiveTuple, FileTransfer.FileStreamTypes.RTP, this.initialFrameNumber, this.StartTime, sampleRate);
            //figure out if sample rates match with sampleTicks etc.
            //double thisSampleRateSkew = this.GetSampleTicksPerSecond() / sampleRate;
            //double otherSampleRateSkew = other.GetSampleTicksPerSecond() / sampleRate;

            //if (thisSampleRateSkew > 0.9 && thisSampleRateSkew < 1.11 && otherSampleRateSkew > 0.9 && otherSampleRateSkew < 1.11) {
            //figure out the correct start time and lock that to a sampleTick
            double nanosecondHundredsPerSample = 10000000.0 / sampleRate;//8000Hz => 1250

            SampleChunkInfo thisTimeReference        = this.GetMinJitterTimeReference(sampleRate);
            TimeSpan        thisTicksReferenceOffset = new TimeSpan((long)(nanosecondHundredsPerSample * ((int)thisTimeReference.SampleTick - this.sampleInfo[0].SampleTick)));
            DateTime        thisFirstSampleTimestamp = thisTimeReference.Timestamp.Subtract(thisTicksReferenceOffset);

            SampleChunkInfo otherTimeReference        = other.GetMinJitterTimeReference(sampleRate);
            TimeSpan        otherTicksReferenceOffset = new TimeSpan((long)(nanosecondHundredsPerSample * ((int)otherTimeReference.SampleTick - other.sampleInfo[0].SampleTick)));
            DateTime        otherFirstSampleTimestamp = otherTimeReference.Timestamp.Subtract(otherTicksReferenceOffset);

            long thisSampleTicksOffset, otherSampleTicksOffset;

            if (thisFirstSampleTimestamp < otherFirstSampleTimestamp)
            {
                thisSampleTicksOffset  = this.sampleInfo[0].SampleTick;
                otherSampleTicksOffset = otherTimeReference.SampleTick - (long)(otherTimeReference.Timestamp.Subtract(thisFirstSampleTimestamp).Ticks / nanosecondHundredsPerSample);
            }
            else
            {
                thisSampleTicksOffset  = thisTimeReference.SampleTick - (long)(thisTimeReference.Timestamp.Subtract(otherFirstSampleTimestamp).Ticks / nanosecondHundredsPerSample);
                otherSampleTicksOffset = other.sampleInfo[0].SampleTick;
            }

            var thisLastTuple  = this.sampleInfo[this.sampleInfo.Count - 1];
            var otherLastTuple = other.sampleInfo[other.sampleInfo.Count - 1];

            //uint metaDataLength = 0;
            mergedAssembler.TryActivate();



            //nSamples might be incorrect here
            Pcm16BitSampleStream thisStream  = new Pcm16BitSampleStream(this, thisSampleTicksOffset, true);
            Pcm16BitSampleStream otherStream = new Pcm16BitSampleStream(other, otherSampleTicksOffset, true);

            //uint nSamples = (uint)Math.Max(thisLastTuple.SampleTick + thisLastTuple.DataLength - thisSampleTicksOffset, otherLastTuple.SampleTick + otherLastTuple.DataLength - otherSampleTicksOffset);
            uint nSamples = mergedAssembler.CountSamplesInStreams(thisStream, otherStream);

            //reset positions
            thisStream.Position  = 0;
            otherStream.Position = 0;

            mergedAssembler.WriteSampleStreamToFile(nSamples, thisStream, otherStream);

            //mergedAssembler.FinishAssembling();
            return(mergedAssembler);
        }
Exemple #2
0
        public FileTransfer.FileStreamAssembler AssembleAsWavFileNative()
        {
            uint sampleRate = 8000;

            byte bitsPerSample = 8;

            FileTransfer.WavFileAssembler.AudioFormat outFormat;

            if (this.Format == PacketHandlers.RtpPacketHandler.RtpPayloadType.G722)
            {
                //G722 sample rate is 16.000 samples/s according to RFC 3551
                //https://tools.ietf.org/html/rfc3551
                sampleRate = 16000;
                //only AU format handles G722

                FileTransfer.AuFileAssembler auAssembler = new FileTransfer.AuFileAssembler("AudioStream-" + this.FiveTuple.GetHashCode().ToString() + "-" + this.Format.ToString() + ".au", this.fileStreamAssemblerList, this.FiveTuple, FileTransfer.FileStreamTypes.RTP, this.initialFrameNumber, this.StartTime, FileTransfer.AuFileAssembler.Encoding.G722, sampleRate);
                if (auAssembler.TryActivate())
                {
                    auAssembler.AssembleAsWavFileNative(this.tempFileStream);
                }
                return(auAssembler);
            }
            else if (this.Format == PacketHandlers.RtpPacketHandler.RtpPayloadType.G729)
            {
                bitsPerSample = 1;
                outFormat     = FileTransfer.WavFileAssembler.AudioFormat.WAVE_FORMAT_G729;
            }
            else if (this.Format == PacketHandlers.RtpPacketHandler.RtpPayloadType.G711_PCM_A)
            {
                outFormat = FileTransfer.WavFileAssembler.AudioFormat.WAVE_FORMAT_ALAW;
            }
            else if (this.Format == PacketHandlers.RtpPacketHandler.RtpPayloadType.G711_PCM_U)
            {
                outFormat = FileTransfer.WavFileAssembler.AudioFormat.WAVE_FORMAT_MULAW;
            }
            else
            {
                throw new NotImplementedException("WAV extraction of " + this.Format.ToString() + " format is not implemented");
            }

            uint nSamples = (uint)((this.tempFileStream.Length * 8) / bitsPerSample);

            //uint metaDataLength = 0;


            FileTransfer.WavFileAssembler assembler = new FileTransfer.WavFileAssembler("AudioStream-" + this.FiveTuple.GetHashCode().ToString() + "-" + this.Format.ToString() + ".wav", this.fileStreamAssemblerList, this.FiveTuple, FileTransfer.FileStreamTypes.RTP, this.initialFrameNumber, this.StartTime, sampleRate);
            if (assembler.TryActivate())
            {
                assembler.AssembleAsWavFileNative(this.tempFileStream, outFormat);
            }

            return(assembler);
        }
        public FileTransfer.FileStreamAssembler AssembleAsWavFile(short[] decompressionTable, bool insertSilenceOnMissingData = false)
        {
            uint sampleRate = 8000;


            byte bitsPerSampleIn = 8;
            uint nSamples        = (uint)((this.tempFileStream.Length * 8) / bitsPerSampleIn);

            FileTransfer.WavFileAssembler assembler = new FileTransfer.WavFileAssembler("AudioStream-" + this.FiveTuple.GetHashCode().ToString() + ".wav", this.fileStreamAssemblerList, this.FiveTuple, FileTransfer.FileStreamTypes.RTP, this.initialFrameNumber, this.StartTime, sampleRate);
            assembler.TryActivate();
            Pcm16BitSampleStream stream = new Pcm16BitSampleStream(this, this.sampleInfo[0].SampleTick, insertSilenceOnMissingData);

            assembler.WriteSampleStreamToFile(nSamples, stream);

            //assembler.FinishAssembling();
            return(assembler);
        }