コード例 #1
0
ファイル: tRNG.cs プロジェクト: fernandolucasrodriguez/qit
        public WaveFormat CalculateMaxThroughputSoundFormat(int m_Device)
        {
            WaveFormat m_Format;

            // automatically select mode gathering more bytes
            int MaxThroughput = 0, MaxThroughputChannel = 0, MaxThroughputRate = 0, MaxThroughputDepth = 0;
            for (int r = 0; r < SampleRateList.Length; r++)
            {
                for (int c = 0; c < ChannelList.Length; c++)
                {
                    for (int d = 0; d < SampleDepthList.Length; d++)
                    {
                        m_Format = new WaveFormat(SampleRateList[r], SampleDepthList[d], ChannelList[c]);
                        m_Format.wFormatTag = (short)WaveFormats.Pcm;
                        IntPtr m_WaveIn;
                        int waveResult = WaveNative.waveInOpen(out m_WaveIn, m_Device, m_Format, null, 0, WaveNative.WAVE_FORMAT_DIRECT | WaveNative.WAVE_FORMAT_QUERY);
                        if (waveResult == WaveNative.MMSYSERR_NOERROR)
                        {
                            if (m_Format.nAvgBytesPerSec > MaxThroughput)
                            {
                                MaxThroughput = m_Format.nAvgBytesPerSec;
                                MaxThroughputChannel = m_Format.nChannels;
                                MaxThroughputRate = m_Format.nSamplesPerSec;
                                MaxThroughputDepth = m_Format.wBitsPerSample;
                            }
                        }
                    }
                }
            }

            m_Format = new WaveFormat(MaxThroughputRate, MaxThroughputDepth, MaxThroughputChannel);
            m_Format.wFormatTag = (short)WaveFormats.Pcm;

            return m_Format;
        }
コード例 #2
0
ファイル: WmaStream.cs プロジェクト: arangas/MediaPortal-1
    /// <summary>
    /// Create WmaStream with specific format for for uncompressed audio data.
    /// </summary>
    /// <param name="FileName">Name of asf file</param>
    /// <param name="OutputFormat">WaveFormat that define the desired audio data format</param>
    public WmaStream(string FileName, WaveFormat OutputFormat)

    {
      m_Reader = WM.CreateSyncReader(WMT_RIGHTS.WMT_RIGHT_NO_DRM);

      try

      {
        m_Reader.Open(FileName);

        Init(OutputFormat);
      }

      catch

      {
        try

        {
          m_Reader.Close();
        }

        catch {}

        m_Reader = null;

        throw;
      }
    }
コード例 #3
0
ファイル: WmaWriter.cs プロジェクト: Boerlam001/MonoGame
    /// <summary>
    /// Create the writer indicating Metadata information
    /// </summary>
    /// <param name="output"><see cref="System.IO.Stream"/> Where resulting WMA string will be written</param>
    /// <param name="format">PCM format of input data received in <see cref="WmaWriter.Write"/> method</param>
    /// <param name="profile">IWMProfile that describe the resulting compressed stream</param>
    /// <param name="MetadataAttributes">Array of <see cref="Yeti.WMFSdk.WM_Attr"/> structures describing the metadata information that will be in the result stream</param>
    public WmaWriter(Stream output, WaveFormat format, IWMProfile profile, WM_Attr[] MetadataAttributes)
      : base(output, format)
		{
			m_Writer = WM.CreateWriter();
      IWMWriterAdvanced wa = (IWMWriterAdvanced)m_Writer;
      wa.AddSink((IWMWriterSink)this);
      m_Writer.SetProfile(profile);
      uint inputs;
      m_Writer.GetInputCount(out inputs);
      if (inputs == 1)
      {
        IWMInputMediaProps InpProps;
        Guid type;
        m_Writer.GetInputProps(0, out InpProps);
        InpProps.GetType(out type);
        if (type == MediaTypes.WMMEDIATYPE_Audio)
        {
          WM_MEDIA_TYPE mt;
          mt.majortype = MediaTypes.WMMEDIATYPE_Audio;
          mt.subtype = MediaTypes.WMMEDIASUBTYPE_PCM;
          mt.bFixedSizeSamples	= true;
          mt.bTemporalCompression = false;
          mt.lSampleSize	= (uint)m_InputDataFormat.nBlockAlign;
          mt.formattype	= MediaTypes.WMFORMAT_WaveFormatEx;
          mt.pUnk	= IntPtr.Zero;
          mt.cbFormat	= (uint)Marshal.SizeOf(m_InputDataFormat);

          GCHandle h = GCHandle.Alloc(m_InputDataFormat, GCHandleType.Pinned);
          try
          {
            mt.pbFormat	= h.AddrOfPinnedObject();
            InpProps.SetMediaType(ref mt);
          }
          finally
          {
            h.Free();
          }
          m_Writer.SetInputProps(0, InpProps);
          if ( MetadataAttributes != null )
          {
            WMHeaderInfo info = new WMHeaderInfo((IWMHeaderInfo)m_Writer);
            foreach(WM_Attr attr in MetadataAttributes)
            {
              info.SetAttribute(attr);
            }
            info = null;
          }
          m_Writer.BeginWriting();
          m_Profile = profile;
        }
        else
        {
          throw new ArgumentException("Invalid profile", "profile");
        }
      }
      else
      {
        throw new ArgumentException("Invalid profile", "profile");
      }
		}
コード例 #4
0
ファイル: WaveIn.cs プロジェクト: PCaponetti/PClapper
        public WaveInRecorder(
			int device, 
			WaveFormat format, 
			int bufferSize, 
			int bufferCount, 
			BufferDoneEventHandler doneProc)
        {
            m_DoneProc = doneProc;
            WaveInHelper.Try(
                WaveNative.waveInOpen(
                    out m_WaveIn,
                    device, format,
                    m_BufferProc,
                    0,
                    WaveNative.CALLBACK_FUNCTION));
            AllocateBuffers(bufferSize, bufferCount);
            for (int i = 0; i < bufferCount; i++)
            {
                SelectNextBuffer();
                m_CurrentBuffer.Record();
            }
            WaveInHelper.Try(WaveNative.waveInStart(m_WaveIn));
            m_Thread = new Thread(new ThreadStart(ThreadProc));
            m_Thread.Start();
        }
コード例 #5
0
ファイル: tRNG.cs プロジェクト: fernandolucasrodriguez/qit
 public tRNG(int buffer_size)
 {
     BufferSize = buffer_size;
     SoundDevice = WaveNative.WAVE_MAPPER;
     SoundFormat = CalculateMaxThroughputSoundFormat(SoundDevice);
     SoundBuffer = new List<byte>();
 }
コード例 #6
0
ファイル: WriterConfig.cs プロジェクト: Boerlam001/MonoGame
 /// <summary>
 /// A constructor with this signature must be implemented by descendants. 
 /// <see cref="System.Runtime.Serialization.ISerializable"/> for more information
 /// </summary>
 /// <param name="info">The <see cref="System.Runtime.Serialization.SerializationInfo"/> where is the serialized data.</param>
 /// <param name="context">The source (see <see cref="System.Runtime.Serialization.StreamingContext"/>) for this serialization.</param>
 protected AudioWriterConfig(SerializationInfo info, StreamingContext context)
 {
   int rate = info.GetInt32("Format.Rate");
   int bits = info.GetInt32("Format.Bits");
   int channels = info.GetInt32("Format.Channels");
   m_Format = new WaveFormat(rate, bits, channels);
 }
コード例 #7
0
ファイル: WaveWriter.cs プロジェクト: arangas/MediaPortal-1
    public WaveWriter(Stream Output, WaveFormat Format, uint AudioDataSize)
      : base(Output, Format)

    {
      m_AudioDataSize = AudioDataSize;

      WriteWaveHeader();
    }
コード例 #8
0
    /// <summary>
    /// Create an instance of WmaWriterConfig specifying the writer input format and the ouput profile
    /// </summary>
    /// <param name="format">Input data format</param>
    /// <param name="profile">Output profile</param>
    public WmaWriterConfig(WaveFormat format, IWMProfile profile)
      : base(format)

    {
      WMProfile prf = new WMProfile(profile);

      m_ProfileData = prf.ProfileData;
    }
コード例 #9
0
ファイル: WaveOut.cs プロジェクト: fernandolucasrodriguez/qit
 public WaveOutPlayer(int device, WaveFormat format, int bufferSize, int bufferCount, BufferFillEventHandler fillProc)
 {
     m_zero = format.wBitsPerSample == 8 ? (byte)128 : (byte)0;
     m_FillProc = fillProc;
     WaveOutHelper.Try(WaveNative.waveOutOpen(out m_WaveOut, device, format, m_BufferProc, 0, WaveNative.CALLBACK_FUNCTION));
     AllocateBuffers(bufferSize, bufferCount);
     m_Thread = new Thread(new ThreadStart(ThreadProc));
     m_Thread.Start();
 }
コード例 #10
0
ファイル: WaveWriter.cs プロジェクト: GhostTap/MonoGame
 public WaveWriter(Stream Output, WaveFormat Format)
   :base(Output, Format)
 {
   if ( !OutStream.CanSeek )
   {
     throw new ArgumentException("The stream must supports seeking if AudioDataSize is not supported", "Output");
   }
   OutStream.Seek(WaveHeaderSize+8, SeekOrigin.Current);
 }
コード例 #11
0
ファイル: AudioPlayer.cs プロジェクト: tjhorner/gtaivtools
        public void Initialize(AudioFile file, AudioWave wave)
        {
            _file = file;
            _wave = wave;

            _format = new WaveFormat(_wave.SamplesPerSecond, 16, 1);

            _lastBlock = -1;
            _looped = false;

            _state = new DviAdpcmDecoder.AdpcmState();
            _leftOverBuffer = null;
        }
コード例 #12
0
ファイル: Player.cs プロジェクト: fernandolucasrodriguez/qit
 public void SetInput(MemoryStream input, WaveFormat format)
 {
     if (this.IsRunning)
         this.Stop();
     try
     {
         m_Format = format;
         m_AudioStream = input;
     }
     catch (Exception e)
     {
         Debug.WriteLine(e.Message + e.StackTrace);
     }
 }
コード例 #13
0
ファイル: WaveStream.cs プロジェクト: shin527/cspspemu
		private void ReadHeader()
		{
			BinaryReader Reader = new BinaryReader(m_Stream);
			if (ReadChunk(Reader) != "RIFF")
				throw new Exception("Invalid file format");

			Reader.ReadInt32(); // File length minus first 8 bytes of RIFF description, we don't use it

			if (ReadChunk(Reader) != "WAVE")
				throw new Exception("Invalid file format");

			if (ReadChunk(Reader) != "fmt ")
				throw new Exception("Invalid file format");

			int len = Reader.ReadInt32();
			if (len < 16) // bad format chunk length
				throw new Exception("Invalid file format");

			m_Format = new WaveFormat(22050, 16, 2); // initialize to any format
			m_Format.wFormatTag = Reader.ReadInt16();
			m_Format.nChannels = Reader.ReadInt16();
			m_Format.nSamplesPerSec = Reader.ReadInt32();
			m_Format.nAvgBytesPerSec = Reader.ReadInt32();
			m_Format.nBlockAlign = Reader.ReadInt16();
			m_Format.wBitsPerSample = Reader.ReadInt16(); 

			// advance in the stream to skip the wave format block 
			len -= 16; // minimum format size
			while (len > 0)
			{
				Reader.ReadByte();
				len--;
			}

			// assume the data chunk is aligned
			while(m_Stream.Position < m_Stream.Length && ReadChunk(Reader) != "data")
				;

			if (m_Stream.Position >= m_Stream.Length)
				throw new Exception("Invalid file format");

			m_Length = Reader.ReadInt32();
			m_DataPos = m_Stream.Position;

			Position = 0;
		}
コード例 #14
0
ファイル: WavePlayer.cs プロジェクト: crazyender/FFMPEG.net
        public void PutSample(WaveDataType type)
        {
            int ret;
            int size = type.size;
            int rate = type.sample_rate == 0 ? 44100 : type.sample_rate;
            int bit = type.bit_per_sample == 0 ? 16 : type.bit_per_sample;
            int channel = type.channel == 0 ? 2 : type.channel;
            if (waveOut == IntPtr.Zero)
            {
                WaveLib.WaveFormat fmt = new WaveLib.WaveFormat(rate, bit, channel);
                ret = WaveNative.waveOutOpen(out waveOut, -1, fmt, null, 0, WaveNative.CALLBACK_NULL);
                if (ret != WaveNative.MMSYSERR_NOERROR)
                    throw new Exception("can not open wave device");
            }


            queue.Enqueue(type);
        }
コード例 #15
0
ファイル: WaveStream.cs プロジェクト: GhostTap/MonoGame
		public void ReadHeader()
		{
            Position = 0;

			BinaryReader Reader = new BinaryReader(m_Stream);
            var errorCaused = ReadChunk(Reader);
            if (errorCaused != "RIFF")
				throw new Exception("Invalid file format");

			Reader.ReadInt32(); // File length minus first 8 bytes of RIFF description, we don't use it

			if (ReadChunk(Reader) != "WAVE")
				throw new Exception("Invalid file format");

			if (ReadChunk(Reader) != "fmt ")
				throw new Exception("Invalid file format");

			int FormatLength = Reader.ReadInt32();
      if ( FormatLength < 16) // bad format chunk length
				throw new Exception("Invalid file format");

			m_Format = new WaveFormat(22050, 16, 2); // initialize to any format
			m_Format.wFormatTag = Reader.ReadInt16();
			m_Format.nChannels = Reader.ReadInt16();
			m_Format.nSamplesPerSec = Reader.ReadInt32();
			m_Format.nAvgBytesPerSec = Reader.ReadInt32();
			m_Format.nBlockAlign = Reader.ReadInt16();
			m_Format.wBitsPerSample = Reader.ReadInt16(); 
      if ( FormatLength > 16)
      {
        m_Stream.Position += (FormatLength-16);
      }
			// assume the data chunk is aligned
			while(m_Stream.Position < m_Stream.Length && ReadChunk(Reader) != "data")
				;

			if (m_Stream.Position >= m_Stream.Length)
				throw new Exception("Invalid file format");

			m_Length = Reader.ReadInt32();
			m_DataPos = m_Stream.Position;

			Position = 0;
		}
コード例 #16
0
ファイル: Form1.cs プロジェクト: Dahrkael/toys
        public Form1()
        {
            InitializeComponent();

            format = new WaveFormat(SampleRate, BitRate, Canales);
            osc1 = new Oscilador();
            osc2 = new Oscilador();
            osc3 = new Oscilador();

            Osc1_TrackBarAmplitud.Value = (int)osc1.Amplitud;
            Osc2_TrackBarAmplitud.Value = (int)osc2.Amplitud;
            Osc3_TrackBarAmplitud.Value = (int)osc3.Amplitud;

            Osc1_TrackBarFrecuencia.Value = (int)osc1.Frecuencia;
            Osc2_TrackBarFrecuencia.Value = (int)osc2.Frecuencia;
            Osc3_TrackBarFrecuencia.Value = (int)osc3.Frecuencia;

            trackBarVolumen.Value = (int)(Volumen * 100);
        }
コード例 #17
0
ファイル: Mp3Writer.cs プロジェクト: Boerlam001/MonoGame
 /// <summary>
 /// Create a Mp3Writer with specific MP3 format
 /// </summary>
 /// <param name="Output">Stream that will hold the MP3 resulting data</param>
 /// <param name="InputDataFormat">PCM format of input data</param>
 /// <param name="Mp3Config">Desired MP3 config</param>
 public Mp3Writer(Stream Output, WaveFormat InputDataFormat, BE_CONFIG Mp3Config)
   :base(Output, InputDataFormat)
 {
   try
   {
     m_Mp3Config = Mp3Config;
     uint LameResult = Lame_encDll.beInitStream(m_Mp3Config, ref m_InputSamples, ref m_OutBufferSize, ref m_hLameStream);
     if ( LameResult != Lame_encDll.BE_ERR_SUCCESSFUL)
     {
       throw new ApplicationException(string.Format("Lame_encDll.beInitStream failed with the error code {0}", LameResult));
     }
     m_InBuffer = new byte[m_InputSamples*2]; //Input buffer is expected as short[]
     m_OutBuffer = new byte[m_OutBufferSize];
   }
   catch
   {
     base.Close();
     throw;
   }
 }
コード例 #18
0
        public void PutSample(WaveDataType type)
        {
            int ret;
            int size    = type.size;
            int rate    = type.sample_rate == 0 ? 44100 : type.sample_rate;
            int bit     = type.bit_per_sample == 0 ? 16 : type.bit_per_sample;
            int channel = type.channel == 0 ? 2 : type.channel;

            if (waveOut == IntPtr.Zero)
            {
                WaveLib.WaveFormat fmt = new WaveLib.WaveFormat(rate, bit, channel);
                ret = WaveNative.waveOutOpen(out waveOut, -1, fmt, null, 0, WaveNative.CALLBACK_NULL);
                if (ret != WaveNative.MMSYSERR_NOERROR)
                {
                    throw new Exception("can not open wave device");
                }
            }


            queue.Enqueue(type);
        }
コード例 #19
0
        private void PlayUsingWaveOut(AudioFrame frame)
        {
            var type = frame.WaveDate;
            int ret;
            int size    = type.size;
            int rate    = type.sample_rate == 0 ? 44100 : type.sample_rate;
            int bit     = type.bit_per_sample == 0 ? 16 : type.bit_per_sample;
            int channel = type.channel == 0 ? 2 : type.channel;

            if (waveOut == IntPtr.Zero)
            {
                WaveLib.WaveFormat fmt = new WaveLib.WaveFormat(rate, bit, channel);
                ret = WaveNative.waveOutOpen(out waveOut, -1, fmt, null, 0, WaveNative.CALLBACK_NULL);
                if (ret != WaveNative.MMSYSERR_NOERROR)
                {
                    throw new Exception("can not open wave device");
                }
            }


            queue.Enqueue(type);
            //ret = WriteWaveOut(frame);
        }
コード例 #20
0
ファイル: Program.cs プロジェクト: Grawp/opentx
        void Go()
        {
            int samplerate = 16000;
            int bits = 16;  // 8 or 16
            int channels = 1;  // 1 or 2

            filewriter = new WavFileWriter("out.wav", samplerate, bits, channels);

            WaveFormat fmt = new WaveFormat(samplerate, bits, channels);

            // devicenumber, wavformat, buffersize, callback
            int buffersize = 16384;
            WaveInRecorder rec = new WaveInRecorder(-1, fmt, buffersize, this.DataArrived);
            tmparray = new byte[buffersize];

            Console.WriteLine("Recording - press Enter to end");
            Console.ReadLine();
            rec.Close();

            filewriter.Close();

            Console.WriteLine("Bye");
        }
コード例 #21
0
ファイル: Recorder.cs プロジェクト: master20151121/mmoments
        static public string Stop()
        {
            WaveLib.WaveFormat m_Format = new WaveLib.WaveFormat(44100, 16, 2);
            if (m_Recorder != null)
            {
                try
                {
                    // chunksize is length of wave data and the header.
                    long chunksize = RecorderOutputStream.Length + 36;

                    // writing wave header and data
                    System.IO.BinaryWriter bw = new BinaryWriter(fs);

                    WriteChars(bw, "RIFF");
                    bw.Write((int)chunksize);
                    WriteChars(bw, "WAVEfmt ");
                    bw.Write((int)16);
                    bw.Write(m_Format.wFormatTag);
                    bw.Write(m_Format.nChannels);
                    bw.Write(m_Format.nSamplesPerSec);
                    bw.Write(m_Format.nAvgBytesPerSec);
                    bw.Write(m_Format.nBlockAlign);
                    bw.Write(m_Format.wBitsPerSample);
                    WriteChars(bw, "data");
                    bw.Write(RecorderOutputStream.Length);
                    bw.Write(RecorderOutputStream.ToArray());
                    bw.Close();
                    fs.Close();
                    m_Recorder.Dispose();
                }
                finally
                {
                    m_Recorder = null;
                }
            }
            return filename_;
        }
コード例 #22
0
ファイル: WaveIn.cs プロジェクト: zaeem/FlexCollab
        public WaveInRecorder(int device, WaveFormat format, int bufferSize, int bufferCount, BufferDoneEventHandler doneProc)
        {
            try
            {
                EncoderWorking = false;
                m_DoneProc = doneProc;
                WaveInHelper.Try(WaveNative.waveInOpen(out m_WaveIn, device, format, m_BufferProc, 0, WaveNative.CALLBACK_FUNCTION));
                AllocateBuffers(bufferSize, bufferCount);
                for (int i = 0; i < bufferCount; i++)
                {
                    SelectNextBuffer();
                    m_CurrentBuffer.Record();
                }
                WaveInHelper.Try(WaveNative.waveInStart(m_WaveIn));
                m_Thread = new Thread(new ThreadStart(ThreadProc));
                m_Thread.Name = "WaveInRecorder Thread :  ThreadProc()";
                m_Thread.Start();
                EncoderWorking = true;
            }
            catch(Exception )
            {

            }
        }
コード例 #23
0
ファイル: Lame.cs プロジェクト: arangas/MediaPortal-1
 public ushort nQuality; // Quality Setting, HIGH BYTE should be NOT LOW byte, otherwhise quality=5
 // FUTURE USE, SET TO 0, align strucutre to 331 bytes
 //[ MarshalAs( UnmanagedType.ByValArray, SizeConst=255-4*4-2 )]
 //public byte[]   btReserved;//[255-4*sizeof(DWORD) - sizeof( WORD )];
 public LHV1(WaveFormat format, uint MpeBitRate)
 {
   if (format.wFormatTag != (short)WaveFormats.Pcm)
   {
     throw new ArgumentOutOfRangeException("format", "Only PCM format supported");
   }
   if (format.wBitsPerSample != 16)
   {
     throw new ArgumentOutOfRangeException("format", "Only 16 bits samples supported");
   }
   dwStructVersion = 1;
   dwStructSize = (uint)Marshal.SizeOf(typeof (BE_CONFIG));
   switch (format.nSamplesPerSec)
   {
     case 16000:
     case 22050:
     case 24000:
       dwMpegVersion = MPEG2;
       break;
     case 32000:
     case 44100:
     case 48000:
       dwMpegVersion = MPEG1;
       break;
     default:
       throw new ArgumentOutOfRangeException("format", "Unsupported sample rate");
   }
   dwSampleRate = (uint)format.nSamplesPerSec; // INPUT FREQUENCY
   dwReSampleRate = 0; // DON'T RESAMPLE
   switch (format.nChannels)
   {
     case 1:
       nMode = MpegMode.MONO;
       break;
     case 2:
       nMode = MpegMode.STEREO;
       break;
     default:
       throw new ArgumentOutOfRangeException("format", "Invalid number of channels");
   }
   switch (MpeBitRate)
   {
     case 32:
     case 40:
     case 48:
     case 56:
     case 64:
     case 80:
     case 96:
     case 112:
     case 128:
     case 160: //Allowed bit rates in MPEG1 and MPEG2
       break;
     case 192:
     case 224:
     case 256:
     case 320: //Allowed only in MPEG1
       if (dwMpegVersion != MPEG1)
       {
         throw new ArgumentOutOfRangeException("MpsBitRate", "Bit rate not compatible with input format");
       }
       break;
     case 8:
     case 16:
     case 24:
     case 144: //Allowed only in MPEG2
       if (dwMpegVersion != MPEG2)
       {
         throw new ArgumentOutOfRangeException("MpsBitRate", "Bit rate not compatible with input format");
       }
       break;
     default:
       throw new ArgumentOutOfRangeException("MpsBitRate", "Unsupported bit rate");
   }
   dwBitrate = MpeBitRate; // MINIMUM BIT RATE
   nPreset = LAME_QUALITY_PRESET.LQP_NORMAL_QUALITY; // QUALITY PRESET SETTING
   dwPsyModel = 0; // USE DEFAULT PSYCHOACOUSTIC MODEL 
   dwEmphasis = 0; // NO EMPHASIS TURNED ON
   bOriginal = 1; // SET ORIGINAL FLAG
   bWriteVBRHeader = 0;
   bNoRes = 0; // No Bit resorvoir
   bCopyright = 0;
   bCRC = 0;
   bEnableVBR = 0;
   bPrivate = 0;
   bStrictIso = 0;
   dwMaxBitrate = 0;
   dwVbrAbr_bps = 0;
   nQuality = 0;
   nVbrMethod = VBRMETHOD.VBR_METHOD_NONE;
   nVBRQuality = 0;
 }
コード例 #24
0
 public static extern int waveOutOpen(out IntPtr hWaveOut, int uDeviceID, WaveFormat lpFormat, WaveDelegate dwCallback, int dwInstance, int dwFlags);
コード例 #25
0
ファイル: WmaWriter.cs プロジェクト: Boerlam001/MonoGame
 /// <summary>
 /// Create the writer without metadata Information
 /// </summary>
 /// <param name="output"><see cref="System.IO.Stream"/> Where resulting WMA string will be written</param>
 /// <param name="format">PCM format of input data received in <see cref="WmaWriter.Write"/> method</param>
 /// <param name="profile">IWMProfile that describe the resulting compressed stream</param>
 public WmaWriter(Stream output, WaveFormat format, IWMProfile profile)
   :this(output, format, profile, null)
 {
 }
コード例 #26
0
ファイル: Recorder.cs プロジェクト: master20151121/mmoments
        static public void Start()
        {
            Stop();
            try
            {
                String filename = "recording";
                String ending = ".wav";

                // iterate the filename
                while (File.Exists(filename + counter + ending))
                {
                    counter++;
                }

                filename_ = filename + counter.ToString() + ending;
                // create a new file and wave recorder
                fs = new FileStream(filename + counter + ending, System.IO.FileMode.Create);
                RecorderOutputStream = new MemoryStream();
                WaveLib.WaveFormat fmt = new WaveLib.WaveFormat(44100, 16, 2);
                m_Recorder = new WaveLib.WaveInRecorder(-1, fmt, 16384, 3, new WaveLib.BufferDoneEventHandler(DataArrived));

            }
            catch
            {
                Stop();
                throw;
            }
        }
コード例 #27
0
ファイル: Lame.cs プロジェクト: arangas/MediaPortal-1
 public BE_CONFIG(WaveFormat format)
   : this(format, 128) {}
コード例 #28
0
ファイル: Lame.cs プロジェクト: arangas/MediaPortal-1
 public BE_CONFIG(WaveFormat format, uint MpeBitRate)
 {
   this.dwConfig = BE_CONFIG_LAME;
   this.format = new Format(format, MpeBitRate);
 }
コード例 #29
0
ファイル: Lame.cs プロジェクト: arangas/MediaPortal-1
 public Format(WaveFormat format, uint MpeBitRate)
 {
   lhv1 = new LHV1(format, MpeBitRate);
 }
コード例 #30
0
ファイル: WaveNative.cs プロジェクト: zaeem/FlexCollab
 public static extern int waveOutOpen(out IntPtr hWaveOut, int uDeviceID, WaveFormat lpFormat, WaveDelegate dwCallback, int dwInstance, int dwFlags);
コード例 #31
0
ファイル: Mp3WriterConfig.cs プロジェクト: GhostTap/MonoGame
 public Mp3WriterConfig(WaveFormat InFormat, uint outputBitRate)
     : this(InFormat, new Lame.BE_CONFIG(InFormat, outputBitRate))
 {
 }
コード例 #32
0
ファイル: Mp3WriterConfig.cs プロジェクト: GhostTap/MonoGame
 public Mp3WriterConfig(WaveFormat InFormat)
   :this(InFormat, new Lame.BE_CONFIG(InFormat))
 {
 }
コード例 #33
0
ファイル: Mp3WriterConfig.cs プロジェクト: GhostTap/MonoGame
 public Mp3WriterConfig(WaveFormat InFormat, Lame.BE_CONFIG beconfig)
   :base(InFormat)
 {
   m_BeConfig = beconfig;
 }