예제 #1
0
        /// <summary>
        /// Calculates the length of the silence.
        /// </summary>
        /// <param name="length">The length of silence, in seconds..</param>
        /// <param name="wave">The wave object, containing bitrate, channels, etc..</param>
        /// <returns></returns>
        /// <remarks>Documented by Dev02, 2008-03-30</remarks>
        private int CalculateSilenceLength(double length, WaveCat wave)
        {
            int samplecount = Convert.ToInt32(length * wave.samplerate);
            int buffersize  = samplecount * wave.channels * wave.BitsPerSample / 8;

            return(buffersize);
        }
예제 #2
0
        /// <summary>
        /// Generates a wave buffer, consisting of silence.
        /// </summary>
        /// <param name="length">The length of silence, in seconds.</param>
        /// <param name="wave">The wave object, containing bitrate, channels, etc.</param>
        /// <returns></returns>
        /// <remarks>Documented by Dev02, 2008-03-30</remarks>
        private byte[] GenerateSilence(double length, WaveCat wave)
        {
            int buffersize = CalculateSilenceLength(length, wave);

            //generate empty byte buffer with the required length
            byte[] data = new byte[buffersize];
            return(data);
        }
예제 #3
0
        public void Merge(string[] files, string outfile)
        {
            WaveCat wa_IN  = new WaveCat();
            WaveCat wa_out = new WaveCat();

            wa_out.DataLength = 0;
            wa_out.length     = 0;


            //Gather header data
            foreach (string path in files)
            {
                wa_IN.WaveHeaderIN(@path);
                wa_out.DataLength += wa_IN.DataLength;
                wa_out.length     += wa_IN.length;
            }

            //Recontruct new header
            wa_out.BitsPerSample = wa_IN.BitsPerSample;
            wa_out.channels      = wa_IN.channels;
            wa_out.samplerate    = wa_IN.samplerate;
            wa_out.WaveHeaderOUT(@outfile);

            foreach (string path in files)
            {
                FileStream fs      = new FileStream(@path, FileMode.Open, FileAccess.Read);
                byte[]     arrfile = new byte[fs.Length - 44];
                fs.Position = 44;
                fs.Read(arrfile, 0, arrfile.Length);
                fs.Close();

                FileStream   fo = new FileStream(@outfile, FileMode.Append, FileAccess.Write);
                BinaryWriter bw = new BinaryWriter(fo);
                bw.Write(arrfile);
                bw.Close();
                fo.Close();
            }
        }
예제 #4
0
파일: WaveCat.cs 프로젝트: hmehr/OSS
        public void Merge(string[] files, string outfile)
        {
            WaveCat wa_IN = new WaveCat();
            WaveCat wa_out = new WaveCat();

            wa_out.DataLength = 0;
            wa_out.length = 0;

            //Gather header data
            foreach (string path in files)
            {
                wa_IN.WaveHeaderIN(@path);
                wa_out.DataLength += wa_IN.DataLength;
                wa_out.length += wa_IN.length;

            }

            //Recontruct new header
            wa_out.BitsPerSample = wa_IN.BitsPerSample;
            wa_out.channels = wa_IN.channels;
            wa_out.samplerate = wa_IN.samplerate;
            wa_out.WaveHeaderOUT(@outfile);

            foreach (string path in files)
            {
                FileStream fs = new FileStream(@path, FileMode.Open, FileAccess.Read);
                byte[] arrfile = new byte[fs.Length - 44];
                fs.Position = 44;
                fs.Read(arrfile, 0, arrfile.Length);
                fs.Close();

                FileStream fo = new FileStream(@outfile, FileMode.Append, FileAccess.Write);
                BinaryWriter bw = new BinaryWriter(fo);
                bw.Write(arrfile);
                bw.Close();
                fo.Close();
            }
        }
예제 #5
0
        /// <summary>
        /// Concatenates the specified files.
        /// </summary>
        /// <param name="files">The files.</param>
        /// <param name="outfile">The outfile.</param>
        /// <param name="stereo">if set to <c>true</c> [stereo].</param>
        /// <param name="worker">The worker.</param>
        /// <remarks>Documented by Dev02, 2008-03-30</remarks>
        public void Concatenate(List <MediaFieldFile> files, FileInfo outfile, bool stereo)
        {
            WaveCat wa_IN  = new WaveCat();
            WaveCat wa_out = new WaveCat();

            wa_out.DataLength    = 0;
            wa_out.length        = 0;
            wa_out.channels      = (short)(stereo ? 2 : 1);
            wa_out.BitsPerSample = 0;
            wa_out.samplerate    = 0;

            //gather header data for each audio file
            int count = files.Count;
            int index = 0;

            foreach (MediaFieldFile file in files)
            {
                if (file.mediafield.Type == MediaField.TypeEnum.AudioField && file.ContainsFile && file.Extension.ToLowerInvariant() == Resources.AUDIO_WAVE_EXTENSION.ToLowerInvariant()) //fix for [MLA-1271]: only able to concatenate wave files
                {
                    wa_IN.WaveHeaderIN(file.file.FullName);

                    //take over sampling rate from first input file
                    if (wa_out.samplerate < 1)
                    {
                        wa_out.samplerate = wa_IN.samplerate;
                    }

                    //take over bits per sample from first input file
                    if (wa_out.BitsPerSample < 1)
                    {
                        wa_out.BitsPerSample = wa_IN.BitsPerSample;
                    }

                    //check if input wave properties are conform to output wave properties
                    if (wa_IN.BitsPerSample != wa_out.BitsPerSample)
                    {
                        BusinessLayer.AddLog(string.Format("Warning: {0} Bits Per Sample instead of {1} in {2}", wa_IN.BitsPerSample, wa_out.BitsPerSample, file.file.Name));
                    }
                    if (wa_IN.samplerate != wa_out.samplerate)
                    {
                        BusinessLayer.AddLog(string.Format("Warning: {0} Samplingrate instead of {1} in {2}", wa_IN.samplerate, wa_out.samplerate, file.file.Name));
                    }

                    if (wa_IN.channels == wa_out.channels)
                    {
                        wa_out.DataLength += wa_IN.DataLength;
                        wa_out.length     += wa_IN.DataLength;
                    }
                    else
                    {
                        wa_out.DataLength += Convert.ToInt32(1.0 * wa_IN.length / wa_IN.channels * wa_out.channels);
                        wa_out.length     += Convert.ToInt32(1.0 * wa_IN.length / wa_IN.channels * wa_out.channels);
                    }
                }
                BusinessLayer.ReportProgress(index++, count);
            }

            //generate the new header out of last input file
            //wa_out.BitsPerSample = wa_IN.BitsPerSample;
            //wa_out.channels = wa_IN.channels;
            //wa_out.samplerate = wa_IN.samplerate;

            //check for samplingrate, if it is right
            if (wa_out.samplerate < 1 || wa_out.BitsPerSample < 1)
            {
                BusinessLayer.AddLog("Error: SamplingRate or BitsPerSample could not be detected properly.");
                return;
            }

            //modify header data length for each silence
            foreach (MediaFieldFile file in files)
            {
                if (file.mediafield.Type == MediaField.TypeEnum.Silence)
                {
                    //just add the silence data length to the header
                    int length = CalculateSilenceLength(file.mediafield.SilenceDuration, wa_out);
                    wa_out.length     += length;
                    wa_out.DataLength += length;
                }
            }

            //save the header
            wa_out.WaveHeaderOUT(outfile.FullName);

            //generate the wave data
            index = 0;
            foreach (MediaFieldFile file in files)
            {
                if (file.mediafield.Type == MediaField.TypeEnum.AudioField && file.ContainsFile)
                {
                    wa_IN.WaveHeaderIN(file.file.FullName);

                    FileStream fs      = new FileStream(file.file.FullName, FileMode.Open, FileAccess.Read);
                    byte[]     arrfile = new byte[fs.Length - 44];
                    fs.Position = 44;
                    fs.Read(arrfile, 0, arrfile.Length);
                    fs.Close();

                    if (wa_IN.channels != wa_out.channels)
                    {
                        //transform from mono to stereo
                        if (wa_IN.channels == 1 && wa_out.channels == 2)
                        {
                            arrfile = TransformMonoToStereo(arrfile);
                        }
                        //transform from stereo to mono
                        if (wa_IN.channels == 2 && wa_out.channels == 1)
                        {
                            arrfile = TransformStereoToMono(arrfile);
                        }
                    }

                    FileStream   fo = new FileStream(outfile.FullName, FileMode.Append, FileAccess.Write);
                    BinaryWriter bw = new BinaryWriter(fo);
                    bw.Write(arrfile);
                    bw.Close();
                    fo.Close();
                }
                else if (file.mediafield.Type == MediaField.TypeEnum.Silence)
                {
                    FileStream   fo = new FileStream(outfile.FullName, FileMode.Append, FileAccess.Write);
                    BinaryWriter bw = new BinaryWriter(fo);
                    bw.Write(GenerateSilence(file.mediafield.SilenceDuration, wa_out));
                    bw.Close();
                    fo.Close();
                }
                BusinessLayer.ReportProgress(index++, count);
            }
        }
예제 #6
0
        /// <summary>
        /// Concatenates the specified files.
        /// </summary>
        /// <param name="files">The files.</param>
        /// <param name="outfile">The outfile.</param>
        /// <param name="stereo">if set to <c>true</c> [stereo].</param>
        /// <param name="worker">The worker.</param>
        /// <remarks>Documented by Dev02, 2008-03-30</remarks>
        public void Concatenate(List<MediaFieldFile> files, FileInfo outfile, bool stereo)
        {
            WaveCat wa_IN = new WaveCat();
            WaveCat wa_out = new WaveCat();

            wa_out.DataLength = 0;
            wa_out.length = 0;
            wa_out.channels = (short)(stereo ? 2 : 1);
            wa_out.BitsPerSample = 0;
            wa_out.samplerate = 0;

            //gather header data for each audio file
            int count = files.Count;
            int index = 0;
            foreach (MediaFieldFile file in files)
            {
                if (file.mediafield.Type == MediaField.TypeEnum.AudioField && file.ContainsFile && file.Extension.ToLowerInvariant() == Resources.AUDIO_WAVE_EXTENSION.ToLowerInvariant()) //fix for [MLA-1271]: only able to concatenate wave files
                {
                    wa_IN.WaveHeaderIN(file.file.FullName);

                    //take over sampling rate from first input file
                    if (wa_out.samplerate < 1)
                        wa_out.samplerate = wa_IN.samplerate;

                    //take over bits per sample from first input file
                    if (wa_out.BitsPerSample < 1)
                        wa_out.BitsPerSample = wa_IN.BitsPerSample;

                    //check if input wave properties are conform to output wave properties
                    if (wa_IN.BitsPerSample != wa_out.BitsPerSample)
                        BusinessLayer.AddLog(string.Format("Warning: {0} Bits Per Sample instead of {1} in {2}", wa_IN.BitsPerSample, wa_out.BitsPerSample, file.file.Name));
                    if (wa_IN.samplerate != wa_out.samplerate)
                        BusinessLayer.AddLog(string.Format("Warning: {0} Samplingrate instead of {1} in {2}", wa_IN.samplerate, wa_out.samplerate, file.file.Name));

                    if (wa_IN.channels == wa_out.channels)
                    {
                        wa_out.DataLength += wa_IN.DataLength;
                        wa_out.length += wa_IN.DataLength;
                    }
                    else
                    {
                        wa_out.DataLength += Convert.ToInt32(1.0 * wa_IN.length / wa_IN.channels * wa_out.channels);
                        wa_out.length += Convert.ToInt32(1.0 * wa_IN.length / wa_IN.channels * wa_out.channels);
                    }
                }
                BusinessLayer.ReportProgress(index++, count);
            }

            //generate the new header out of last input file
            //wa_out.BitsPerSample = wa_IN.BitsPerSample;
            //wa_out.channels = wa_IN.channels;
            //wa_out.samplerate = wa_IN.samplerate;

            //check for samplingrate, if it is right
            if (wa_out.samplerate < 1 || wa_out.BitsPerSample < 1)
            {
                BusinessLayer.AddLog("Error: SamplingRate or BitsPerSample could not be detected properly.");
                return;
            }

            //modify header data length for each silence
            foreach (MediaFieldFile file in files)
            {
                if (file.mediafield.Type == MediaField.TypeEnum.Silence)
                {
                    //just add the silence data length to the header
                    int length = CalculateSilenceLength(file.mediafield.SilenceDuration, wa_out);
                    wa_out.length += length;
                    wa_out.DataLength += length;
                }
            }

            //save the header
            wa_out.WaveHeaderOUT(outfile.FullName);

            //generate the wave data
            index = 0;
            foreach (MediaFieldFile file in files)
            {
                if (file.mediafield.Type == MediaField.TypeEnum.AudioField && file.ContainsFile)
                {
                    wa_IN.WaveHeaderIN(file.file.FullName);

                    FileStream fs = new FileStream(file.file.FullName, FileMode.Open, FileAccess.Read);
                    byte[] arrfile = new byte[fs.Length - 44];
                    fs.Position = 44;
                    fs.Read(arrfile, 0, arrfile.Length);
                    fs.Close();

                    if (wa_IN.channels != wa_out.channels)
                    {
                        //transform from mono to stereo
                        if (wa_IN.channels == 1 && wa_out.channels == 2)
                            arrfile = TransformMonoToStereo(arrfile);
                        //transform from stereo to mono
                        if (wa_IN.channels == 2 && wa_out.channels == 1)
                            arrfile = TransformStereoToMono(arrfile);
                    }

                    FileStream fo = new FileStream(outfile.FullName, FileMode.Append, FileAccess.Write);
                    BinaryWriter bw = new BinaryWriter(fo);
                    bw.Write(arrfile);
                    bw.Close();
                    fo.Close();
                }
                else if (file.mediafield.Type == MediaField.TypeEnum.Silence)
                {
                    FileStream fo = new FileStream(outfile.FullName, FileMode.Append, FileAccess.Write);
                    BinaryWriter bw = new BinaryWriter(fo);
                    bw.Write(GenerateSilence(file.mediafield.SilenceDuration, wa_out));
                    bw.Close();
                    fo.Close();
                }
                BusinessLayer.ReportProgress(index++, count);
            }
        }
예제 #7
0
        /// <summary>
        /// Generates a wave buffer, consisting of silence.
        /// </summary>
        /// <param name="length">The length of silence, in seconds.</param>
        /// <param name="wave">The wave object, containing bitrate, channels, etc.</param>
        /// <returns></returns>
        /// <remarks>Documented by Dev02, 2008-03-30</remarks>
        private byte[] GenerateSilence(double length, WaveCat wave)
        {
            int buffersize = CalculateSilenceLength(length, wave);

            //generate empty byte buffer with the required length
            byte[] data = new byte[buffersize];
            return data;
        }
예제 #8
0
        /// <summary>
        /// Calculates the length of the silence.
        /// </summary>
        /// <param name="length">The length of silence, in seconds..</param>
        /// <param name="wave">The wave object, containing bitrate, channels, etc..</param>
        /// <returns></returns>
        /// <remarks>Documented by Dev02, 2008-03-30</remarks>
        private int CalculateSilenceLength(double length, WaveCat wave)
        {
            int samplecount = Convert.ToInt32(length * wave.samplerate);
            int buffersize = samplecount * wave.channels * wave.BitsPerSample / 8;

            return buffersize;
        }