void IMicrophoneSubscriber.ReceiveMicrophoneData(ArraySegment <float> data, [NotNull] WaveFormat format)
        {
            if (data.Array == null)
            {
                throw new ArgumentNullException("data");
            }

            // ReSharper disable once InconsistentlySynchronizedField (Justification: `_resamplerInput` is itself thread safe, so using it with synchronisation is unnecessary)
            if (!format.Equals(_resamplerInput.WaveFormat))
            {
                throw new ArgumentException("Incorrect format supplied to preprocessor", "format");
            }

            lock (_inputWriteLock)
            {
                // Write as much data into the buffer as possible
                var written = _resamplerInput.Write(data);

                // If not everything was written it means the input buffer is full! The only thing to do is throw
                // away the excess audio and to keep track of exactly how much was lost. The lost samples will be injected
                // as silence, to keep everything in sync.
                if (written < data.Count)
                {
                    var lost = data.Count - written;

                    Interlocked.Add(ref _droppedSamples, lost);
                    Log.Warn("Lost {0} samples in the preprocessor (buffer full), injecting silence to compensate", lost);
                }
            }

            //Wake up the processing thread
            _threadEvent.Set();
        }
Example #2
0
        void IMicrophoneSubscriber.ReceiveMicrophoneData(ArraySegment <float> data, [NotNull] WaveFormat format)
        {
            if (data.Array == null)
            {
                throw new ArgumentNullException("data");
            }
            if (!format.Equals(_resamplerInput.WaveFormat))
            {
                throw new ArgumentException("Incorrect format supplied to preprocessor", "format");
            }

            lock (_inputWriteLock)
            {
                //Write data into input buffer
                var written = _resamplerInput.Write(data);
                if (written < data.Count)
                {
                    //We didn't write everything, so try to write out as much as possible (fill buffer) and then count the rest of the samples as lost
                    var written2 = _resamplerInput.Write(new ArraySegment <float>(data.Array, data.Offset + written, Math.Min(data.Count - written, _resamplerInput.Capacity - _resamplerInput.Capacity)));

                    //Increase the count of lost samples, so we can inject the appropriate amount of silence to compensate later
                    var totalWritten = written + written2;
                    var lost         = data.Count - totalWritten;
                    if (lost > 0)
                    {
                        Interlocked.Add(ref _droppedSamples, lost);
                        Log.Warn("Lost {0} samples in the preprocessor (buffer full), injecting silence to compensate", lost);
                    }
                }
            }

            //Wake up the processing thread
            _threadEvent.Set();
        }
Example #3
0
        public void Handle(ArraySegment <float> inputSamples, WaveFormat format)
        {
            if (_resetRequired)
            {
                Log.Trace("Resetting encoder pipeline");

                _resampler.Reset();
                _input.Reset();
                _output.Reset();

                _resetRequired = false;
            }

            if (!format.Equals(_inputFormat))
            {
                throw new ArgumentException(string.Format("Samples expected in format {0}, but supplied with format {1}", _inputFormat, format), "format");
            }

            if (_microphoneDiagnosticOutput != null)
            {
                _microphoneDiagnosticOutput.WriteSamples(inputSamples);
            }

            //Write samples to the pipeline (keep a running total of how many we have sent)
            //Keep sending until we've sent all of these samples
            var offset = 0;

            while (offset != inputSamples.Count)
            {
                offset += _input.Write(inputSamples.Array, offset + inputSamples.Offset, inputSamples.Count - offset);

                //Drain some of those samples just written, encode them and send them off
                EncodeFrames();
            }
        }
Example #4
0
        private static byte[] ResamplePcm(ref byte[] toResample, ref int sourceLength, WaveFormat sourceFormat, WaveFormat destPcmFormat, out int resultLength)
        {
            Debug.Assert(destPcmFormat.Encoding == WaveFormatEncoding.Pcm, "Codec format must be PCM");

            if (resampleRateStream != null && (!lastResampleSourceFormat.Equals(sourceFormat) || !lastResampleDestFormat.Equals(destPcmFormat)))
            {
                resampleRateStream.Dispose();
                resampleRateStream = null;
            }
            if (resampleRateStream == null)
            {
                WaveFormat sourceRateFormat = new WaveFormat(sourceFormat.SampleRate, sourceFormat.BitsPerSample, destPcmFormat.Channels);
                resampleRateStream = new AcmStream(sourceRateFormat, destPcmFormat);
                if (sourceFormat.Channels != destPcmFormat.Channels)
                {
                    WaveFormat destChanFormat = new WaveFormat(sourceFormat.SampleRate, sourceFormat.BitsPerSample, destPcmFormat.Channels);
                    if (resampleChannelStream != null)
                    {
                        resampleChannelStream.Dispose();
                    }
                    resampleChannelStream = new AcmStream(sourceFormat, destChanFormat);
                }
                lastResampleSourceFormat = sourceFormat;
                lastResampleDestFormat   = destPcmFormat;
            }

            int bytesConverted;

            if (sourceFormat.Channels != destPcmFormat.Channels)
            {
                if (destPcmFormat.Channels == 1 && sourceFormat.Channels == 2)
                {
                    toResample   = MixStereoToMono(toResample, sourceLength);
                    sourceLength = toResample.Length;
                }
                else
                {
                    Buffer.BlockCopy(toResample, 0, resampleChannelStream.SourceBuffer, 0, sourceLength);
                    sourceLength = resampleChannelStream.Convert(sourceLength, out bytesConverted);
                    if (bytesConverted >> 1 != sourceLength)
                    {
                        Console.WriteLine("WARNING: All input bytes were not converted.");
                    }
                    toResample = resampleChannelStream.DestBuffer;
                }
            }

            Buffer.BlockCopy(toResample, 0, resampleRateStream.SourceBuffer, 0, sourceLength);
            resultLength = resampleRateStream.Convert(sourceLength, out bytesConverted);
            if (bytesConverted != sourceLength)
            {
                Console.WriteLine("WARNING: All input bytes were not converted.");
                return(null);
            }

            return(resampleRateStream.DestBuffer);
        }
Example #5
0
        public void ReceiveMicrophoneData(ArraySegment <float> inputSamples, [NotNull] WaveFormat format)
        {
            if (format == null)
            {
                throw new ArgumentNullException("format");
            }
            if (!format.Equals(_inputFormat))
            {
                throw new ArgumentException(string.Format("Samples expected in format {0}, but supplied with format {1}", _inputFormat, format), "format");
            }

            using (var encoderLock = _encoder.Lock())
            {
                var encoder = encoderLock.Value;

                //Early exit if we have been disposed on the main thread
                if (_disposed)
                {
                    return;
                }

                //Early exit if we've sent the last frame of this stream
                if (_stopped)
                {
                    return;
                }

                //Propogate the loss value on to the encoder
                encoder.PacketLoss = TransmissionPacketLoss;

                //Write samples to the pipeline (keep a running total of how many we have sent)
                //Keep sending until we've sent all of these samples
                var offset = 0;
                while (offset != inputSamples.Count)
                {
                    // ReSharper disable once AssignNullToNotNullAttribute (Justification: Array segment cannot be null)
                    offset += _input.Write(new ArraySegment <float>(inputSamples.Array, inputSamples.Offset + offset, inputSamples.Count - offset));

                    //Drain some of those samples just written, encode them and send them off
                    //If we're shutting down send a maximum of 1 packet
                    var encodedFrames = EncodeFrames(encoder, _stopping ? 1 : int.MaxValue);

                    //Don't encode any more frames if we've sent the one final frame
                    if (encodedFrames > 0 && _stopping)
                    {
                        _stopped = true;
                        Log.Debug("Encoder stopped");
                        break;
                    }
                }
            }
        }
Example #6
0
        public ProcessorWaveProvider(string sourceName, IWaveProvider sourceWaveProvider, string waveFilePath, WaveFormat outFormat, Common.ProcessRadioSignalingItemDelegate sigDelegate, Action <bool> hasPropertyChanged, bool recordEnabled, Common.SignalRecordingType recordType, int recordKickTime, Common.NoiseFloor noiseFloor, int customNoiseFloor, bool removeNoise, bool decodeMDC1200, bool decodeGEStar, bool decodeFleetSync, bool decodeP25)
            : base(sourceWaveProvider, waveFilePath)
        {
            LastValidStreamTitle = string.Empty;
            _sourceName          = sourceName;
            _sourceFormat        = sourceWaveProvider.WaveFormat;
            _outFormat           = outFormat;
            _hasPropertyChanged  = hasPropertyChanged;

            _silenceHelper = new SilenceHelper(outFormat.AverageBytesPerSecond / (outFormat.BitsPerSample / 8), noiseFloor, removeNoise, customNoiseFloor);

            if (outFormat.Equals(sourceWaveProvider.WaveFormat))
            {
                _resampleStream = null;
                _useResampler   = false;
            }
            else
            {
                if (Common.AppSettings.Instance.DiagnosticMode)
                {
                    Common.ConsoleHelper.ColorWriteLine(ConsoleColor.Magenta, "{0}: Source Format <> Out Format [{1}] <> [{2}]", sourceName, sourceWaveProvider.WaveFormat, outFormat);
                }
                _resampleStream = new NAudio.Wave.Compression.AcmStream(sourceWaveProvider.WaveFormat, outFormat);
                _useResampler   = true;
            }
            if (decodeMDC1200)
            {
                _mdc = new Decoders.MDC1200(outFormat.SampleRate, ProcessMDC1200, sourceName);
            }
            else
            {
                _mdc = null;
            }
            if (decodeGEStar)
            {
                _star = new Decoders.STAR(outFormat.SampleRate, ProcessSTAR, Decoders.STAR.star_format.star_format_1_16383, sourceName);
            }
            else
            {
                _star = null;
            }
            _rootDecoder = new Decoders.RootDecoder(outFormat.SampleRate, decodeFleetSync, decodeP25, ProcessRootDecoder);

            _recorder       = new AudioRecorder(sourceName, recordType, recordKickTime, outFormat, AudioProcessingGlobals.DefaultSaveFileWaveFormat, recordEnabled);
            _bytesPerSample = outFormat.BitsPerSample / 8;
            _encoding       = outFormat.Encoding;
            _sigDelegate    = sigDelegate;
        }
Example #7
0
        public static byte[] Resample(this WaveFormat waveFormat, byte[] data, WaveFormat outputFormat)
        {
            if (waveFormat.Equals(outputFormat))
            {
                return(data);
            }

            if (outputFormat.Encoding != WaveFormatEncoding.IeeeFloat)
            {
                throw new NotSupportedException("Only float supported.");
            }
            if (outputFormat.Channels > 1)
            {
                throw new NotSupportedException("Only mono supported.");
            }

            var sampleProvider = waveFormat.GetSampleProvider(data);

            if (waveFormat.Channels > 1)
            {
                sampleProvider = new StereoToMonoSampleProvider(sampleProvider);
            }

            var numSamples = waveFormat.SampleRate;
            var buffer     = new float[numSamples];

            var samples     = new List <float>();
            var samplesRead = sampleProvider.Read(buffer, 0, buffer.Length);

            while (samplesRead > 0)
            {
                var floats = samplesRead == buffer.Length ? buffer : buffer.Take(samplesRead).ToArray();
                samples.AddRange(floats);
                samplesRead = sampleProvider.Read(buffer, 0, buffer.Length);
            }

            buffer = samples.Resampled(waveFormat.SampleRate, outputFormat.SampleRate);

            var output     = new byte[buffer.Length * sizeof(float)];
            var waveBuffer = new WaveBuffer(output);

            for (var i = 0; i < buffer.Length; i++)
            {
                waveBuffer.FloatBuffer[i] = buffer[i];
            }
            return(output);
        }
Example #8
0
        public bool Equals(FrameFormat other)
        {
            if (Codec != other.Codec)
            {
                return(false);
            }

            if (FrameSize != other.FrameSize)
            {
                return(false);
            }

            if (!WaveFormat.Equals(other.WaveFormat))
            {
                return(false);
            }

            return(true);
        }
Example #9
0
        public AudioRecorder(string streamSourceName, Common.SignalRecordingType recordingType, int recordingKickTime, WaveFormat sourceWaveFormat, WaveFormat fileWaveFormat, bool recordingEnabled)
        {
            _recordingType = recordingType;
            switch (_recordingType)
            {
            case SignalRecordingType.Fixed:
            {
                RecordingKickTimeTicks = TimeSpan.FromMinutes(recordingKickTime).Ticks;
                break;
            }

            default:
            {
                RecordingKickTimeTicks = TimeSpan.FromSeconds(recordingKickTime).Ticks;
                break;
            }
            }
            RecordingPrefix   = RadioSignalLogger.MakeSourceFilePrefix(streamSourceName);
            _sourceWaveFormat = sourceWaveFormat;
            if (fileWaveFormat == null)
            {
                _fileWaveFormat = sourceWaveFormat;
            }
            else
            {
                _fileWaveFormat = fileWaveFormat;
            }
            _recordingEnabled = recordingEnabled;

            if (_sourceWaveFormat.Equals(_fileWaveFormat))
            {
                _resampleStream = null;
                _useResampler   = false;
            }
            else
            {
                _resampleStream = new NAudio.Wave.Compression.AcmStream(_sourceWaveFormat, _fileWaveFormat);
                _useResampler   = true;
            }
        }
Example #10
0
        /// <summary>
        /// Combine two stereo files to one quad file
        /// </summary>
        /// <param name="filePathLeft">file path to the left stereo file</param>
        /// <param name="filePathRight">file path to the right stereo file</param>
        /// <param name="combinedFileNamePath">file path to the combined quad file</param>
        /// <returns></returns>
        public static bool CombineStereoToQuad(string filePathLeft, string filePathRight, string combinedFileNamePath)
        {
            WaveFormat waveFormatLeft  = GetWaveFormat(filePathLeft);
            WaveFormat waveFormatRight = GetWaveFormat(filePathRight);

            if (!waveFormatLeft.Equals(waveFormatRight))
            {
                Console.Out.WriteLine("The two files to combine must have the same format");
                return(false);
            }
            if (waveFormatLeft.Channels != 2 || waveFormatRight.Channels != 2)
            {
                Console.Out.WriteLine("The two files to combine must be stereo");
                return(false);
            }

            int sampleRate = waveFormatLeft.SampleRate;

            float[] channel1;
            float[] channel2;
            float[] channel3;
            float[] channel4;
            SplitStereoWaveFileToMono(filePathLeft, out channel1, out channel2);
            SplitStereoWaveFileToMono(filePathRight, out channel3, out channel4);

            // find out what channel is longest
            int maxLength = Math.Max(channel1.Length, channel3.Length);

            using (WaveFileWriter wavWriter = new WaveFileWriter(combinedFileNamePath, WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, 4)))
            {
                // write one and one float (interlaced), pad if neccesary
                for (int i = 0; i < maxLength; i++)
                {
                    if (i < channel1.Length)
                    {
                        wavWriter.WriteSample(channel1[i]);
                    }
                    else
                    {
                        wavWriter.WriteSample(0.0f);
                    }
                    if (i < channel2.Length)
                    {
                        wavWriter.WriteSample(channel2[i]);
                    }
                    else
                    {
                        wavWriter.WriteSample(0.0f);
                    }
                    if (i < channel3.Length)
                    {
                        wavWriter.WriteSample(channel3[i]);
                    }
                    else
                    {
                        wavWriter.WriteSample(0.0f);
                    }
                    if (i < channel4.Length)
                    {
                        wavWriter.WriteSample(channel4[i]);
                    }
                    else
                    {
                        wavWriter.WriteSample(0.0f);
                    }
                }
            }
            return(true);
        }
Example #11
0
        public static void Export(String epubProjPath, String outputPath, ProgressUpdater progressUpdater)
        {
            ProjectInfo projInfo = new ProjectInfo(epubProjPath);

            File.Copy(projInfo.EpubPath, outputPath, true);
            Epub epubFile = projInfo.EpubFile;

            ClearDirectory(projInfo.TempPath);
            using (ZipArchive archive = ZipFile.Open(outputPath, ZipArchiveMode.Update))
            {
                long totalBytes = 0;

                XNamespace smilXns = @"http://www.w3.org/ns/SMIL";
                XNamespace epubXns = @"http://www.idpf.org/2007/ops";

                XDocument  xOpf    = XDocument.Parse(epubFile.OpfFile.Content);
                XElement   rootOpf = xOpf.Root;
                XNamespace xnsOpf  = rootOpf.Attribute("xmlns") != null?rootOpf.Attribute("xmlns").Value : XNamespace.None;

                XElement metadataOpf = rootOpf.Element(xnsOpf + "metadata");
                XElement manifestOpf = rootOpf.Element(xnsOpf + "manifest");

                List <XAttribute> delAttr = new List <XAttribute>();
                foreach (XAttribute attr in metadataOpf.Attributes())
                {
                    if (attr.IsNamespaceDeclaration)
                    {
                        if (attr.Value == rootOpf.GetDefaultNamespace())
                        {
                            delAttr.Add(attr);
                        }
                    }
                }
                foreach (XAttribute attr in delAttr)
                {
                    attr.Remove();
                }

                int total = 4 * projInfo.Contents.Count + 1 + projInfo.TotalSentences;
                progressUpdater.Initialize(total);
                foreach (Content content in projInfo.Contents)
                {
                    long   contentBytes = 0;
                    String smilID       = GetSmilID(content.ID);
                    String audioID      = GetAudioID(content.ID);

                    #region Overwrite XHTML files
                    {
                        XElement        xContent       = new XElement(content.Root);
                        List <XText>    xContentBlocks = new List <XText>();
                        List <XElement> xImageBlocks   = new List <XElement>();
                        GetBlocks(content.Xns, xContent.Element(content.Xns + "body"), xContentBlocks, xImageBlocks);
                        int count = 0;
                        foreach (XText xText in xContentBlocks)
                        {
                            int id = Int32.Parse(xText.Value.Split('-')[1]);
                            if (id != count)
                            {
                                throw new Exception("Wrong ordering: ContentBlocks");
                            }

                            XElement parent = xText.Parent;
                            foreach (Sentence sentence in content.ContentBlocks[id].Sentences)
                            {
                                XElement xSentence = new XElement(content.Xns + "span");
                                xSentence.Add(new XAttribute("id", GetXhtmlID(sentence)));
                                xSentence.Add(new XText(sentence.OriginalText));
                                parent.Add(xSentence);
                            }
                            xText.Remove();
                            count++;
                        }
                        count = 0;
                        foreach (XElement xImage in xImageBlocks)
                        {
                            int id = Int32.Parse(xImage.Attribute("id").Value.Split('-')[1]);
                            if (id != count)
                            {
                                throw new Exception("Wrong ordering: ImageBlocks");
                            }
                            xImage.SetAttributeValue("alt", content.ImageBlocks[id].Text);
                            count++;
                        }
                        String          xhtmlZipPath = Path.Combine(projInfo.PackageName, content.Source);
                        ZipArchiveEntry xhtmlEntry   = archive.GetEntry(xhtmlZipPath.Replace('\\', '/'));
                        if (xhtmlEntry == null)
                        {
                            xhtmlEntry = archive.CreateEntry(xhtmlZipPath);
                        }
                        using (StreamWriter streamWriter = new StreamWriter(xhtmlEntry.Open()))
                        {
                            streamWriter.Write(xContent);
                            streamWriter.BaseStream.SetLength(streamWriter.BaseStream.Position);
                            streamWriter.Close();
                        }
                        xhtmlEntry.LastWriteTime = DateTimeOffset.UtcNow.LocalDateTime;
                    }

                    progressUpdater.Increment();
                    #endregion

                    #region Create SMIL files
                    {
                        String fileName = Path.GetFileName(content.Source);

                        XElement xContent = new XElement(smilXns + "smil");
                        xContent.Add(new XAttribute(XNamespace.Xmlns + "epub", epubXns));
                        xContent.Add(new XAttribute("version", "3.0"));
                        XElement xSeq = new XElement(smilXns + "seq");
                        xSeq.Add(new XAttribute("id", "seq1"));
                        xSeq.Add(new XAttribute(epubXns + "textref", fileName));
                        xSeq.Add(new XAttribute(epubXns + "type", "bodymatter chapter"));
                        xContent.Add(new XElement(smilXns + "body", xSeq));
                        int count = 0;
                        foreach (Block block in content.Blocks)
                        {
                            if (block is ImageBlock)
                            {
                                String begin = GetClockValue(contentBytes);
                                contentBytes += block.Bytes;
                                String end = GetClockValue(contentBytes);

                                XElement xText = new XElement(smilXns + "text");
                                xText.Add(new XAttribute("src", fileName + "#" + block.B_ID));
                                XElement xAudio = new XElement(smilXns + "audio");
                                xAudio.Add(new XAttribute("clipBegin", begin));
                                xAudio.Add(new XAttribute("clipEnd", end));
                                xAudio.Add(new XAttribute("src", fileName + ".mp3"));
                                XElement xPar = new XElement(smilXns + "par", xText, xAudio);
                                xPar.Add(new XAttribute("id", "par" + count));
                                xSeq.Add(xPar);
                                count++;
                                continue;
                            }
                            foreach (Sentence sentence in block.Sentences)
                            {
                                String begin = GetClockValue(contentBytes);
                                contentBytes += sentence.Bytes;
                                String end = GetClockValue(contentBytes);

                                XElement xText = new XElement(smilXns + "text");
                                xText.Add(new XAttribute("src", fileName + "#" + GetXhtmlID(sentence)));
                                XElement xAudio = new XElement(smilXns + "audio");
                                xAudio.Add(new XAttribute("clipBegin", begin));
                                xAudio.Add(new XAttribute("clipEnd", end));
                                xAudio.Add(new XAttribute("src", fileName + ".mp3"));
                                XElement xPar = new XElement(smilXns + "par", xText, xAudio);
                                xPar.Add(new XAttribute("id", "par" + count));
                                xSeq.Add(xPar);
                                count++;
                            }
                        }

                        String          smilZipPath = Path.Combine(projInfo.PackageName, content.Source) + ".smil";
                        ZipArchiveEntry smilEntry   = archive.CreateEntry(smilZipPath);
                        using (StreamWriter streamWriter = new StreamWriter(smilEntry.Open()))
                        {
                            streamWriter.Write(xContent);
                            streamWriter.BaseStream.SetLength(streamWriter.BaseStream.Position);
                            streamWriter.Close();
                        }
                        smilEntry.LastWriteTime = DateTimeOffset.UtcNow.LocalDateTime;
                    }

                    progressUpdater.Increment();
                    #endregion

                    #region Modify Opf file
                    {
                        XElement duration = new XElement(xnsOpf + "meta");
                        duration.Add(new XAttribute("property", "media:duration"));
                        duration.Add(new XAttribute("refines", "#" + smilID));
                        duration.Add(GetClockValue(contentBytes));
                        metadataOpf.Add(duration);
                        totalBytes += contentBytes;


                        XElement smil = new XElement(xnsOpf + "item");
                        smil.Add(new XAttribute("id", smilID));
                        smil.Add(new XAttribute("href", content.Source + ".smil"));
                        smil.Add(new XAttribute("media-type", "application/smil+xml"));

                        XElement audio = new XElement(xnsOpf + "item");
                        audio.Add(new XAttribute("id", audioID));
                        audio.Add(new XAttribute("href", content.Source + ".mp3"));
                        audio.Add(new XAttribute("media-type", "audio/mpeg"));

                        XElement xhtml = manifestOpf.Elements(xnsOpf + "item").FirstOrDefault(e => e.Attribute("href").Value.Equals(content.Source));
                        xhtml.Add(new XAttribute("media-overlay", smilID));
                        xhtml.AddAfterSelf(smil, audio);
                    }

                    progressUpdater.Increment();
                    #endregion

                    #region Merge Audio files
                    {
                        // Check wheter every WavFiles has the same wav-format.
                        WaveFormat waveFormat = null;
                        foreach (String sourceFile in Directory.EnumerateFiles(content.ContentAudio, "*.wav"))
                        {
                            using (WaveFileReader waveFileReader = new WaveFileReader(sourceFile))
                            {
                                if (waveFormat == null)
                                {
                                    waveFormat = waveFileReader.WaveFormat;
                                }
                                else if (!waveFormat.Equals(waveFileReader.WaveFormat))
                                {
                                    throw new InvalidOperationException("Can't concatenate WAV Files that don't share the same format");
                                }
                            }
                        }

                        if (waveFormat != null)
                        {
                            byte[] buffer = new byte[1024];
                            int    read;
                            String outputWave = Path.Combine(projInfo.TempPath, content.CID + ".wav");
                            using (WaveFileWriter waveFileWriter = new WaveFileWriter(outputWave, waveFormat))
                            {
                                foreach (Block block in content.Blocks)
                                {
                                    foreach (Sentence sentence in block.Sentences)
                                    {
                                        String sourceFile = sentence.WavPath;
                                        using (WaveFileReader waveFileReader = new WaveFileReader(sourceFile))
                                        {
                                            while ((read = waveFileReader.Read(buffer, 0, buffer.Length)) > 0)
                                            {
                                                waveFileWriter.Write(buffer, 0, read);
                                            }
                                        }
                                        progressUpdater.Increment();
                                    }
                                }
                            }

                            String outputMP3 = Path.Combine(projInfo.TempPath, content.CID + ".mp3");
                            using (WaveFileReader waveReader = new WaveFileReader(outputWave))
                            {
                                using (MediaFoundationResampler resampled = new MediaFoundationResampler(waveReader, new WaveFormat(32000, 1)))
                                {
                                    int desiredBitRate = 0; // ask for lowest available bitrate
                                    MediaFoundationEncoder.EncodeToMp3(resampled, outputMP3, desiredBitRate);
                                }
                            }

                            String audioZipPath = Path.Combine(projInfo.PackageName, content.Source + ".mp3");
                            archive.CreateEntryFromFile(outputMP3, audioZipPath);
                        }
                    }

                    progressUpdater.Increment();
                    #endregion
                }

                XElement totalDuration = new XElement(xnsOpf + "meta");
                totalDuration.Add(new XAttribute("property", "media:duration"));
                totalDuration.Add(GetClockValue(totalBytes));
                metadataOpf.Add(totalDuration);

                String          opfPath  = epubFile.GetOpfPath();
                ZipArchiveEntry opfEntry = archive.GetEntry(opfPath.Replace('\\', '/'));
                if (opfEntry == null)
                {
                    opfEntry = archive.CreateEntry(opfPath);
                }
                using (StreamWriter streamWriter = new StreamWriter(opfEntry.Open()))
                {
                    streamWriter.Write(xOpf);
                    streamWriter.BaseStream.SetLength(streamWriter.BaseStream.Position);
                    streamWriter.Close();
                }
                opfEntry.LastWriteTime = DateTimeOffset.UtcNow.LocalDateTime;
            }

            ClearDirectory(projInfo.ExportPath);
            ZipFile.ExtractToDirectory(outputPath, projInfo.ExportPath);

            progressUpdater.Increment();
        }
Example #12
0
        // Return Total Bytes of Output Wav File.
        public long Synthesize(string inputText, string outputPath)
        {
            if (inputText == null || outputPath == null)
            {
                throw new Exception("Null inputText or outputPath.");
            }

            ClearTemp();

            TextIndexList = new List <int> {
                0
            };
            ByteIndexList = new List <long> {
                0
            };

            int           startIndex = 0;
            long          totalBytes = 0;
            WaveFormat    waveFormat = null;
            List <string> waveList   = new List <string>();

            // SPLIT INTO SENTENCES BY TYPE
            List <PairSI> sList = Split(inputText);

            if (sList.Count == 0)
            {
                sList.Add(new PairSI(" ", 6));
            }
            foreach (PairSI sPair in sList)
            {
                string text = sPair.Key;
                int    type = sPair.Value;
                //Console.WriteLine(text + "/" + type);

                List <string> phonemeList = new List <string>();
                string        longPhoneme = "";

                #region ----------- Gen Phoneme -----------
                List <PairSS> tList = g2p.GenTranscriptList(text, type);
                foreach (PairSS tPair in tList)
                {
                    string cutWord    = tPair.Key;
                    string transcript = tPair.Value;
                    string phoneme    = phonemeConverter.Convert(transcript, type);
                    //Console.WriteLine("Phon: " + phoneme + " Trans: " + transcript + " Word: " + cutWord);
                    if (type != 2 && type != 6)
                    {
                        phoneme = TrimPhoneme(phoneme);
                    }
                    phonemeList.Add(phoneme);

                    int index = inputText.IndexOf(cutWord, startIndex);
                    if (index < 0)
                    {
                        throw new Exception("WRONG TEXT INDEX: " + cutWord + " " + startIndex + " " + inputText);
                    }
                    startIndex = index + cutWord.Length;
                    TextIndexList.Add(startIndex);
                }
                if (type != 2)
                {
                    foreach (string phoneme in phonemeList)
                    {
                        longPhoneme += phoneme;
                    }
                    if (type != 6)
                    {
                        longPhoneme = silence + longPhoneme + silence;
                    }
                }
                else
                {
                    if (phonemeList.Count == 0)
                    {
                        longPhoneme = " ";
                    }
                    else
                    {
                        longPhoneme = phonemeList[0];
                    }
                }
                #endregion

                #region ------------ Gen Sound ------------
                string wavPath = SoundSynthesize(longPhoneme, type);
                string durPath = Path.ChangeExtension(wavPath, ".dur");

                // Check wheter every WavFiles has the same wav-format.
                waveList.Add(wavPath);
                using (WaveFileReader waveFileReader = new WaveFileReader(wavPath))
                {
                    if (waveFormat == null)
                    {
                        waveFormat = waveFileReader.WaveFormat;
                    }
                    else if (!waveFormat.Equals(waveFileReader.WaveFormat))
                    {
                        throw new InvalidOperationException("Can't concatenate WAV Files that don't share the same format");
                    }
                }

                // Get Duration
                if (type != 2 && type != 6)
                {
                    using (StreamReader streamReader = new StreamReader(durPath))
                    {
                        streamReader.ReadLine(); // First SIL
                        foreach (string phoneme in phonemeList)
                        {
                            int sIndex = 0;
                            int tIndex = 0;
                            while ((tIndex = phoneme.IndexOf('|', sIndex)) >= 0)
                            {
                                string[] line    = streamReader.ReadLine().Split(' ');
                                string   durPhon = line[2].Split("-+".ToCharArray())[1];
                                if (phoneme.IndexOf(durPhon, sIndex) != sIndex)
                                {
                                    throw new Exception("Mismatch phoneme and duration");
                                }
                                sIndex = tIndex + 1;

                                if (tIndex == phoneme.Length - 1)
                                {
                                    ByteIndexList.Add(totalBytes + GetByteFromDur(line[1]));
                                }
                            }
                        }

                        string dur = streamReader.ReadLine().Split(' ')[1];
                        totalBytes += GetByteFromDur(dur);

                        if (!streamReader.EndOfStream)
                        {
                            throw new Exception("Invalid Duration.");
                        }
                        streamReader.Close();
                    }
                }
                else if (type == 6)
                {
                    using (StreamReader streamReader = new StreamReader(durPath))
                    {
                        foreach (string phoneme in phonemeList)
                        {
                            int sIndex = 0;
                            int tIndex = 0;
                            while ((tIndex = phoneme.IndexOf('|', sIndex)) >= 0)
                            {
                                string[] line    = streamReader.ReadLine().Split(' ');
                                string   durPhon = line[2].Split("-+".ToCharArray())[1];
                                if (phoneme.IndexOf(durPhon, sIndex) != sIndex)
                                {
                                    throw new Exception("Mismatch phoneme and duration");
                                }
                                sIndex = tIndex + 1;

                                if (tIndex == phoneme.Length - 1)
                                {
                                    ByteIndexList.Add(totalBytes + GetByteFromDur(line[1]));
                                    totalBytes += GetByteFromDur(line[1]);
                                }
                            }
                        }
                        if (!streamReader.EndOfStream)
                        {
                            throw new Exception("Invalid Duration.");
                        }
                        streamReader.Close();
                    }
                }
                else
                {
                    using (WaveFileReader waveFileReader = new WaveFileReader(wavPath))
                    {
                        totalBytes += waveFileReader.Length;
                        ByteIndexList.Add(totalBytes);
                    }
                }
                #endregion

                if (TextIndexList.Count != ByteIndexList.Count)
                {
                    throw new Exception("Wrong Index & Byte Calculation: " + TextIndexList.Count + ", " + ByteIndexList.Count);
                }
            }

            int bytes = ConcatWavFiles(waveList, waveFormat, outputPath);

            if (bytes != totalBytes)
            {
                throw new Exception("Miscalculating Total Bytes.");
            }
            return(totalBytes);
        }
Example #13
0
        public long Synthesize(List <List <string> > sentence, string outputPath)
        {
            //Console.WriteLine("-----------------------------------------");
            ByteIndexList = new List <long>()
            {
                0
            };
            List <List <PairSI> > wordPhonemes = new List <List <PairSI> >();

            foreach (List <string> word in sentence)
            {
                List <PairSI> subPhonemes = new List <PairSI>();
                foreach (string syllable in word)
                {
                    GetSubPhoneme(syllable, subPhonemes);
                }
                wordPhonemes.Add(subPhonemes);
            }

            List <PairSI> nPhonemes  = NormalizePhoneme(wordPhonemes);
            long          totalBytes = 0;
            int           curWord    = 0;
            int           curSubWord = 0;
            WaveFormat    waveFormat = null;
            List <string> waveList   = new List <string>();

            foreach (PairSI phoneme in nPhonemes)
            {
                int    type    = phoneme.Value;
                string wavPath = SoundSynthesize(phoneme.Key, phoneme.Value);
                string durPath = Path.ChangeExtension(wavPath, ".dur");

                // Check wheter every WavFiles has the same wav-format.
                waveList.Add(wavPath);
                using (WaveFileReader waveFileReader = new WaveFileReader(wavPath))
                {
                    if (waveFormat == null)
                    {
                        waveFormat = waveFileReader.WaveFormat;
                    }
                    else if (!waveFormat.Equals(waveFileReader.WaveFormat))
                    {
                        throw new InvalidOperationException("Can't concatenate WAV Files that don't share the same format");
                    }
                }

                #region Get Duration
                if (type == 1)
                {
                    using (StreamReader streamReader = new StreamReader(durPath))
                    {
                        string lastDur = "0";
                        while (!streamReader.EndOfStream)
                        {
                            if (wordPhonemes[curWord][curSubWord].Value != type)
                            {
                                throw new Exception("MisAlignment");
                            }
                            string phon   = wordPhonemes[curWord][curSubWord].Key;
                            int    sIndex = 0;
                            int    tIndex = 0;
                            while ((tIndex = phon.IndexOf('|', sIndex)) >= 0)
                            {
                                string[] line    = streamReader.ReadLine().Split(' ');
                                string   durPhon = line[2].Split("-+".ToCharArray())[1];
                                if (phon.IndexOf(durPhon, sIndex) != sIndex)
                                {
                                    throw new Exception("Mismatch phoneme and duration");
                                }
                                sIndex  = tIndex + 1;
                                lastDur = line[1];
                            }
                            if (++curSubWord >= wordPhonemes[curWord].Count)
                            {
                                ByteIndexList.Add(totalBytes + GetByteFromDur(lastDur));
                                curWord++;
                                curSubWord = 0;
                            }
                        }
                        streamReader.Close();
                        totalBytes += GetByteFromDur(lastDur);
                    }
                }
                else
                {
                    using (WaveFileReader waveFileReader = new WaveFileReader(wavPath))
                    {
                        totalBytes += waveFileReader.Length;
                        waveFileReader.Close();
                    }
                    if (++curSubWord >= wordPhonemes[curWord].Count)
                    {
                        ByteIndexList.Add(totalBytes);
                        curWord++;
                        curSubWord = 0;
                    }
                }
                #endregion
            }

            int bytes = ConcatWavFiles(waveList, waveFormat, outputPath);

            if (bytes != totalBytes)
            {
                throw new Exception("Miscalculating Total Bytes.");
            }
            return(totalBytes);
        }
Example #14
0
 public bool Equals(FormatKey other)
 {
     return(ShareMode == other.ShareMode && Format.Equals(other.Format));
 }
Example #15
0
        static void Main(string[] args)
        {
            int           iSampleRate = 41000;
            int           iCh         = 2;
            int           iBits       = 16;
            int           iVol        = 100;
            WaveInType    waveInType  = WaveInType.WaveIn;
            bool          isHead      = true;
            bool          isHelp      = false;
            bool          isTest      = false;
            List <string> errorList   = new List <string>();

            //read args
            {
                if (args != null)
                {
                    string sw = null;
                    for (int i = 0; i < args.Length; i++)
                    {
                        string arg = args[i];
                        if (string.IsNullOrWhiteSpace(sw))
                        {
                            switch (arg)
                            {
                            case "-d":
                            case "-r":
                            case "-c":
                            case "-b":
                            case "-v":
                                sw = arg;
                                break;

                            case "-N":
                                isHead = false;
                                break;

                            case "-test":
                                isTest = true;
                                break;

                            case "--h":
                            case "--v":
                                isHelp = true;
                                break;

                            default:
                                errorList.Add(string.Format("arg[{0}] : illegal option \"{1}\"", new object[] { i, arg }));
                                break;
                            }
                        }
                        else
                        {
                            Action <Action> Exec = (action) =>
                            {
                                try
                                {
                                    action();
                                }
                                catch (Exception e)
                                {
                                    errorList.Add(string.Format("arg[{0}] : illegal param \"{2}\" at \"{1}\"", new object[] { i, sw, arg }));
                                    errorList.Add(e.ToString());
                                }
                            };
                            switch (sw)
                            {
                            case "-d":
                                switch (arg)
                                {
                                case "wasapiloopback":
                                    waveInType = WaveInType.WasapiLoppback;
                                    break;

                                case "wavein":
                                    waveInType = WaveInType.WaveIn;
                                    break;

                                default:
                                    errorList.Add(string.Format("arg[{0}] : illegal param \"{2}\" at \"{1}\"", new object[] { i, sw, arg }));
                                    break;
                                }
                                break;

                            case "-r":
                                Exec(() => iSampleRate = int.Parse(arg));
                                ;
                                break;

                            case "-c":
                                Exec(() => iCh = int.Parse(arg));
                                break;

                            case "-b":
                                Exec(() => iBits = int.Parse(arg));
                                break;

                            case "-v":
                                Exec(() => iVol = int.Parse(arg));
                                break;
                            }
                            sw = null;
                        }
                    }
                }
            }

            if (isHead)
            {
                message("naucon v0.0.0.0.0.1");
                message("auther takumi.");
                message("copyright libraplanet.");
                message("license n/a");
                message("");
                if (!isHelp)
                {
                    message("parameter:");
                    message(string.Format("  sampling rale  {0} Hz", new object[] { iSampleRate }));
                    message(string.Format("  ch             {0} ch", new object[] { iCh }));
                    message(string.Format("  bits           {0} bit", new object[] { iBits }));
                    message(string.Format("  capture device {0}", new object[] { waveInType }));
                    message(string.Format("  vol            {0}", new object[] { iVol }));
                    message("");
                }
            }
            //start
            if (errorList.Count > 0)
            {
                foreach (string s in errorList)
                {
                    message(s);
                }
                message("");
            }
            else if (isHelp)
            {
                //help
                message("usage: naucon [[option] [param]]...");
                message("");
                message("options and pamrams");
                message("-d [wavein | wasapiloopback]  mode of capture device.");
                message("                              WaveIn or WASAPI Loopback.");
                message("-r [n]                        sampling rate.");
                message("                                e.g.) 441000");
                message("-c [n]                        channels.");
                message("                                e.g.) 2");
                message("-b [n]                        bits per sample.");
                message("                                e.g.) 16");
                message("-v [n]                        volume. 100 = 100%");
                message("                                e.g.) 16");
                message("-N                            no output head message.");
                message("-test                         argument test (no recording).");
                message("--h                           view help.");
                message("--v                           view version.");
                message("");
            }
            else
            {
                object     mutex    = new object();
                bool       isActive = true;
                IWaveIn    waveIn;
                WaveFormat outWaveFormat = new WaveFormat(iSampleRate, iBits, iCh);

                //init
                {
                    switch (waveInType)
                    {
                    case WaveInType.WasapiLoppback:
                        waveIn = new WasapiLoopbackCapture();
                        break;

                    case WaveInType.WaveIn:
                    default:
                        WaveCallbackInfo callback = WaveCallbackInfo.FunctionCallback();
                        waveIn            = new WaveIn(callback);
                        waveIn.WaveFormat = outWaveFormat;
                        break;
                    }
                }

                if (isHead)
                {
                    message("output format:");
                    message(string.Format("  sampling rale  {0} Hz", new object[] { outWaveFormat.SampleRate }));
                    message(string.Format("  ch             {0} ch", new object[] { outWaveFormat.Channels }));
                    message(string.Format("  bits           {0} bit", new object[] { outWaveFormat.BitsPerSample }));
                    message(string.Format("  encoding       {0}", new object[] { outWaveFormat.Encoding }));
                    message("");
                }

                //event
                {
                    waveIn.DataAvailable += (sender, e) =>
                    {
                        lock (mutex)
                        {
                            if (WaveFormat.Equals(waveIn.WaveFormat, outWaveFormat) && (iVol == 100))
                            {
                                using (Stream consoleStream = Console.OpenStandardOutput())
                                {
                                    consoleStream.Write(e.Buffer, 0, e.BytesRecorded);
                                }
                            }
                            else
                            {
                                byte[]    data;
                                AudioData audio = new AudioData(waveIn.WaveFormat, e.Buffer, e.BytesRecorded);

                                if (iVol != 100)
                                {
                                    audio.ChangeVolume(iVol / 100.0);
                                }

                                audio.Conver(outWaveFormat);
                                data = audio.ToBytes();
                                if ((data != null) && (data.Length > 0))
                                {
                                    using (Stream consoleStream = Console.OpenStandardOutput())
                                    {
                                        consoleStream.Write(data, 0, data.Length);
                                    }
                                }
                            }
                        }
                    };

                    waveIn.RecordingStopped += (sender, e) =>
                    {
                        lock (mutex)
                        {
                            isActive = false;
                        }
                    };
                }


                if (!isTest)
                {
                    waveIn.StartRecording();
                    while (true)
                    {
                        lock (mutex)
                        {
                            if (isActive)
                            {
                                Thread.Sleep(1);
                            }
                            else
                            {
                                return;
                            }
                        }
                    }
                }
            }
        }
Example #16
0
        /// <summary>
        /// Check waveform files consistence between waveform and
        /// Referrence waveform files with the filemap.
        /// </summary>
        /// <param name="fileMap">File list map listed the sentences to validate.</param>
        /// <param name="waveDir">Base directory of waveform file.</param>
        /// <param name="refWaveDir">Directory of referrence waveform file.</param>
        /// <param name="refName">The name of the referrence waveform directory.</param>
        /// <returns>Data error set found.</returns>
        public static DataErrorSet ValidateWaveAlignment(FileListMap fileMap, string waveDir,
            string refWaveDir, string refName)
        {
            if (fileMap == null)
            {
                throw new ArgumentNullException("fileMap");
            }

            if (fileMap.Map == null)
            {
                throw new ArgumentException("fileMap.Map is null");
            }

            if (fileMap.Map.Keys == null)
            {
                throw new ArgumentException("fileMap.Map.Keys is null");
            }

            if (string.IsNullOrEmpty(refName))
            {
                throw new ArgumentNullException("refName");
            }

            if (string.IsNullOrEmpty(refWaveDir))
            {
                throw new ArgumentNullException("refWaveDir");
            }

            DataErrorSet errorSet = new DataErrorSet();

            foreach (string sid in fileMap.Map.Keys)
            {
                try
                {
                    string refFile = Path.Combine(refWaveDir, fileMap.Map[sid] + ".wav");
                    string waveFile = Path.Combine(waveDir, fileMap.Map[sid] + ".wav");

                    int waveSampleCount = 0;
                    int refSampleCount = 0;
                    WaveFormat waveFormat = new WaveFormat();
                    WaveFormat refWaveFormat = new WaveFormat();

                    StringBuilder sb = new StringBuilder();

                    // validate referrence file existance
                    if (!File.Exists(refFile))
                    {
                        sb.AppendFormat(CultureInfo.InvariantCulture,
                            "{0} file [{0}] does not exist.", refName, refFile);
                    }
                    else
                    {
                        refSampleCount = WaveFile.ReadSampleCount(refFile);
                        refWaveFormat = WaveFile.ReadFormat(refFile);
                    }

                    // validate waveform file existance
                    if (!File.Exists(waveFile))
                    {
                        sb.AppendFormat(CultureInfo.InvariantCulture,
                            "Wave file [{0}] does not exist.", waveFile);
                    }
                    else
                    {
                        waveSampleCount = WaveFile.ReadSampleCount(waveFile);
                        waveFormat = WaveFile.ReadFormat(waveFile);
                    }

                    // validate content consistence
                    if (waveSampleCount != 0 && refSampleCount != 0
                        && waveSampleCount != refSampleCount)
                    {
                        sb.AppendFormat(CultureInfo.InvariantCulture,
                            "The sample count is not the same between waveform file [{0}] and {1} file [{2}].",
                            waveFile, refName, refFile);
                    }

                    if (!waveFormat.Equals(refWaveFormat))
                    {
                        sb.AppendFormat(CultureInfo.InvariantCulture,
                            "The waveform format is not the same between waveform file [{0}] and {1} file [{2}].",
                            waveFile, refName, refFile);
                    }

                    if (sb.Length > 0)
                    {
                        errorSet.Errors.Add(new DataError(string.Empty, sb.ToString(), sid));
                    }
                }
                catch (InvalidDataException ide)
                {
                    string message = Helper.BuildExceptionMessage(ide);
                    errorSet.Errors.Add(new DataError(string.Empty, message, sid));
                }
            }

            return errorSet;
        }