示例#1
0
 public unsafe int EnumAudioEndpointsNative(DataFlow dataFlow, DeviceState stateMask, out IntPtr collection)
 {
     IntPtr pcollection;
     int result = InteropCalls.CallI(_basePtr, unchecked(dataFlow), unchecked(stateMask), &pcollection, ((void**)(*(void**)_basePtr))[3]);
     collection = pcollection;
     return result;
 }
示例#2
0
        int IMMNotificationClient.OnDefaultDeviceChanged(DataFlow flow, Role role, string id)
        {
            if (DefaultDeviceChanged != null)
                DefaultDeviceChanged(this, new DefaultDeviceChangedEventArgs(id, flow, role));

            return (int)HResult.S_OK;
        }
示例#3
0
 /// <summary>
 /// Returns the default audio endpoint for the specified data-flow direction and role.
 /// </summary>
 /// <param name="dataFlow">The data-flow direction for the endpoint device.</param>
 /// <param name="role">The role of the endpoint device.</param>
 /// <returns><see cref="MMDevice"/> instance of the endpoint object for the default audio endpoint device.</returns>
 public static MMDevice DefaultAudioEndpoint(DataFlow dataFlow, Role role)
 {
     using (var enumerator = new MMDeviceEnumerator())
     {
         return enumerator.GetDefaultAudioEndpoint(dataFlow, role);
     }
 }
示例#4
0
 /// <summary>
 /// Generates a collection of audio endpoint devices that meet the specified criteria.
 /// </summary>
 /// <param name="dataFlow">The data-flow direction for the endpoint device.</param>
 /// <param name="stateMask">The state or states of the endpoints that are to be included in the collection.</param>
 /// <returns><see cref="MMDeviceCollection"/> which contains the enumerated devices.</returns>
 public static MMDeviceCollection EnumerateDevices(DataFlow dataFlow, DeviceState stateMask)
 {
     using (var enumerator = new MMDeviceEnumerator())
     {
         return enumerator.EnumAudioEndpoints(dataFlow, stateMask);
     }
 }
示例#5
0
 /// <summary>
 /// Generates a collection of audio endpoint devices that meet the specified criteria.
 /// </summary>
 /// <param name="dataFlow">The data-flow direction for the endpoint device.</param>
 /// <param name="stateMask">The state or states of the endpoints that are to be included in the collection.</param>
 /// <returns><see cref="MMDeviceCollection"/> which contains the enumerated devices.</returns>
 public MMDeviceCollection EnumAudioEndpoints(DataFlow dataFlow, DeviceState stateMask)
 {
     IntPtr pcollection;
     CoreAudioAPIException.Try(EnumAudioEndpointsNative(dataFlow, stateMask, out pcollection), InterfaceName,
         "EnumAudioEndpoints");
     return new MMDeviceCollection(pcollection);
 }
示例#6
0
 public unsafe int GetDefaultAudioEndpointNative(DataFlow dataFlow, Role role, out IntPtr device)
 {
     IntPtr pdevice;
     int result = InteropCalls.CallI(_basePtr, unchecked(dataFlow), unchecked(role), &pdevice, ((void**)(*(void**)_basePtr))[4]);
     device = pdevice;
     return result;
 }
 private AudioSessionManager2 GetDefaultAudioSessionManager2(DataFlow dataFlow)
 {
     using (var enumerator = new MMDeviceEnumerator())
     {
         using (var device = enumerator.GetDefaultAudioEndpoint(dataFlow, Role.Multimedia))
         {
             var sessionManager = AudioSessionManager2.FromMMDevice(device);
             return sessionManager;
         }
     }
 }
示例#8
0
        private int SearchDeviceNumber(DataFlow dataFlow, string friendlyName)
        {
            int returnValue = 0;

            for (int i = 0; i < WaveIn.DeviceCount; i++)
            {
                if (WaveIn.GetCapabilities(i).ProductName.Equals(friendlyName, StringComparison.OrdinalIgnoreCase))
                {
                    returnValue = i;
                }
            }

            return returnValue;
        }
 /// <summary>
 /// Check to see if a default audio end point exists without needing an exception.
 /// </summary>
 /// <param name="dataFlow">Data Flow</param>
 /// <param name="role">Role</param>
 /// <returns>True if one exists, and false if one does not exist.</returns>
 public bool HasDefaultAudioEndpoint(DataFlow dataFlow, Role role)
 {
     const int E_NOTFOUND = unchecked((int)0x80070490);
     IMMDevice device = null;
     int hresult = ((IMMDeviceEnumerator)realEnumerator).GetDefaultAudioEndpoint(dataFlow, role, out device);
     if (hresult == 0x0)
     {
         Marshal.ReleaseComObject(device);
         return true;
     }
     if (hresult == E_NOTFOUND)
     {
         return false;
     }
     Marshal.ThrowExceptionForHR(hresult);
     return false;
 }
示例#10
0
			public void Analyze(ControlFlowGraph graph)
			{				
				DBC.Pre(graph != null, "graph is null");

				Profile.Start("Splicing");
				var visited = new List<BasicBlock>();
				foreach (BasicBlock root in graph.Roots)
					DoSpliceHandlers(m_instructions, root, visited);

				visited.Clear();
				foreach (BasicBlock root in graph.Roots)
					DoSpliceNullCheck(m_instructions, root, visited);
				var data = new DataFlow<Lattice>(m_instructions, graph.Roots);
				m_skipped = data.Skipped;
				Profile.Stop("Splicing");
				
				var functions = new Lattice.Functions();
				Dictionary<BasicBlock, Lattice> lattices = data.Analyze(functions, m_initialState);
				
				Profile.Start("Post Transform");
				m_states = new State[m_instructions.Length];
				foreach (var entry in lattices)
				{
					BasicBlock block = entry.Key;
					if (block.Length > 0)
					{
						Lattice lattice = entry.Value;
						
						for (int index = block.First.Index; index <= block.Last.Index; ++index)	// it'd be nice to assert that every index was set, but methods often have dead code so it's a little difficult
						{
							m_states[index] = lattice.State;
							lattice = lattice.Transform(index);
						}
					}
				}
				Profile.Stop("Post Transform");
				
				for (int index = 0; index < m_instructions.Length; ++index)
				{
					Log.DebugLine(this, "{0:X2}: {1}", m_instructions[index].Untyped.Offset, m_states[index]);
				}
			}
示例#11
0
 /// <summary>Creates a new reference to the audio device with the given ID with the given flow direction.</summary>
 public AudioDeviceProxy(string deviceId, DataFlow flow)
 {
     Flow     = flow;
     DeviceId = deviceId ?? DEFAULT_DEVICE_ID;
     deviceEnumerator.RegisterEndpointNotificationCallback(this);
 }
示例#12
0
 public int GetDefaultDeviceNumber(DataFlow dataflow)
 {
     return(SearchDeviceNumber(DataFlow.Capture, deviceEnum.GetDefaultAudioEndpoint(dataflow, Role.Multimedia).FriendlyName));
 }
示例#13
0
 public void OnDefaultDeviceChanged(DataFlow flow, Role role, string defaultDeviceId)
 {
     var handler = this.DefaultDeviceChanged;
     if (handler != null)
     {
         handler(this, new MMNotificationEventArgs(flow, role));
     }
 }
示例#14
0
 void IMMNotificationClient.OnDefaultDeviceChanged(DataFlow dataFlow, Role deviceRole, string defaultDeviceId)
 {
     if (deviceRole == Role && dataFlow == DataFlow)
         OnDefaultDeviceChanged();
 }
示例#15
0
 public DeviceInfo(string name, string id, DataFlow type)
 {
     Name = name;
     Id   = id;
     Type = type;
 }
示例#16
0
 void IMMNotificationClient.OnDefaultDeviceChanged(DataFlow flow, Role role, string defaultDeviceId)
 {
     ReloadDevices();
 }
        private void LoadAudioDevices(DataFlow _dataFlow, DeviceState _deviceState)
        {
            ClearAudioDevices();
            try
            {
                // Instantiate an Enumerator to find audio devices
                MMDeviceEnumerator device_enumerator = new MMDeviceEnumerator();

                // Get list of devices based on parameters
                MMDeviceCollection device_collection = device_enumerator.EnumerateAudioEndPoints(_dataFlow, _deviceState);

                // Loop through devices
                foreach (MMDevice device in device_collection)
                {
                    try
                    {
                        // Creates a managed audio device and creates identifying strings for the device
                        AudioDevice managed_device = new AudioDevice(device);

                        // Create stack panel to hold other elements
                        StackPanel panel = new StackPanel();
                        panel.Orientation = Orientation.Horizontal;

                        // Create progress bar to show device volume
                        ProgressBar progbar = new ProgressBar();
                        progbar.Width = 150;

                        // Create volume label to show device volume
                        Label volume_label = new Label();
                        volume_label.Content = "0.00";

                        // Create friendly label for easy identification of device
                        Label friendly_label = new Label();
                        friendly_label.Content = device.FriendlyName;

                        // Add elements to StackPanel
                        panel.Children.Add(progbar);
                        panel.Children.Add(volume_label);
                        panel.Children.Add(friendly_label);

                        // Add StackPanel to main Devices StackPanel
                        Devices.Children.Add(panel);

                        // Register names
                        RegisterName(managed_device.md5PanelName, panel);
                        RegisterName(managed_device.md5ProgbarName, progbar);
                        RegisterName(managed_device.md5VolumeLabel, volume_label);
                        RegisterName(managed_device.md5FriendlyLabel, friendly_label);

                        // Add managed audio device to devices list
                        activeDevices.Add(managed_device);
                    }
                    catch (Exception ex)
                    {
                        // Do something with exception when an audio endpoint could not be loaded
                        System.Diagnostics.Debug.Print(device.FriendlyName + " could not be loaded");
                    }
                }

                // TODO : Use event driven code for this
                DispatcherTimer tmr = new DispatcherTimer();
                tmr.Tick += new EventHandler(UpdateDeviceVolume);
                tmr.Interval = new TimeSpan(0, 0, 0, 0, 75);
                tmr.Start();
            }
            catch (Exception ex)
            {
                // Something happened that prevents us enumerating through the device list
                System.Diagnostics.Debug.Print("Could not enumerate devices due to an exception: " + ex.Message);
            }
        }
示例#18
0
 public void GetDefaultAudioEndpoint(DataFlow dataFlow, Role role, out IMMDevice device)
 {
     _inner.GetDefaultAudioEndpoint(dataFlow, role, out device);
 }
示例#19
0
 public DeviceFullInfo(string name, string id, DataFlow type, string iconPath, DeviceState state, bool isUsb) : base(name, id, type, isUsb)
 {
     IconPath = iconPath;
     State    = state;
 }
        /// <summary>
        /// Get MMDevices for this system
        /// </summary>
        /// <param name="df">DataFlow.Capture for Input (Microphones), DataFlow.Render for Output (Soundcards), DataFlow.All for all</param>
        /// <param name="ds">Which DeviceStates to show</param>
        /// <returns></returns>
        public List<MMDevice> GetDevices(DataFlow df = DataFlow.All, DeviceState ds = DeviceState.Active)
        {
            List<MMDevice> devices = new List<MMDevice>();

            MMDeviceEnumerator enumerator = new MMDeviceEnumerator();
            foreach (MMDevice device in enumerator.EnumerateAudioEndPoints(df, ds))
            {
                devices.Add(device);
            }

            return devices;
        }
示例#21
0
 private AudioSessionManager2 GetDefaultAudioSessionManager2(DataFlow dataFlow)
 {
     using (var enumerator = new MMDeviceEnumerator())
     {
         using (var device = enumerator.GetDefaultAudioEndpoint(dataFlow, Role.Multimedia))
         {
             Debug.WriteLine("DefaultDevice: " + device.FriendlyName);
             var sessionManager = AudioSessionManager2.FromMMDevice(device);
             return sessionManager;
         }
     }
 }
示例#22
0
 private static AudioDevice GetDefaultDevice(DataFlow dataFlow)
 {
     var imde = MMDeviceEnumerator.Current;
     IMMDevice immd;
     imde.GetDefaultAudioEndpoint(dataFlow, Role.Multimedia, out immd);
     return new AudioDevice(immd);
 }
示例#23
0
        /// <summary>
        /// The OnDefaultDeviceChanged method notifies the client that the default audio endpoint
        /// device for a particular device role has changed.
        /// </summary>
        /// <param name="dataFlow">The data-flow direction of the endpoint device.</param>
        /// <param name="role">The device role of the audio endpoint device.</param>
        /// <param name="deviceId">The device id that identifies the audio endpoint device.</param>
        /// <returns>HRESULT</returns>
        void IMMNotificationClient.OnDefaultDeviceChanged(DataFlow dataFlow, Role role, string deviceId)
        {
            if (DefaultDeviceChanged != null)
                DefaultDeviceChanged(this, new DefaultDeviceChangedEventArgs(deviceId, dataFlow, role));

            //return (int) HResult.S_OK;
        }
示例#24
0
		/// <summary>
		/// Get Default Endpoint
		/// </summary>
		/// <param name="dataFlow">Data Flow</param>
		/// <param name="role">Role</param>
		/// <returns>Device</returns>
		public MMDevice GetDefaultAudioEndpoint(DataFlow dataFlow, Role role)
		{
			IMMDevice _Device = null;
			Marshal.ThrowExceptionForHR((_realEnumerator).GetDefaultAudioEndpoint(dataFlow, role, out _Device));
			return new MMDevice(_Device);
		}
示例#25
0
        public void Record(string deviceName, string audioFilePath = @"C:\Temp\output.wav")
        {
            _timer = new Stopwatch();
            _timer.Start();

            // choose the capture mod
            CaptureMode captureMode = CaptureMode.LoopbackCapture;
            DataFlow    dataFlow    = captureMode == CaptureMode.Capture ? DataFlow.Capture : DataFlow.Render;

            //select the device:
            var devices = MMDeviceEnumerator.EnumerateDevices(dataFlow, DeviceState.Active);

            if (!devices.Any())
            {
                Console.WriteLine("### No devices found.");
                return;
            }

            Console.WriteLine($"### Using device {deviceName}");
            var device = devices.First(d => d.FriendlyName.Equals(deviceName));

            //start recording
            //create a new soundIn instance
            _soundIn = captureMode == CaptureMode.Capture
                ? new WasapiCapture()
                : new WasapiLoopbackCapture();


            //optional: set some properties
            _soundIn.Device = device;


            //initialize the soundIn instance
            _soundIn.Initialize();

            //create a SoundSource around the the soundIn instance
            //this SoundSource will provide data, captured by the soundIn instance
            SoundInSource soundInSource = new SoundInSource(_soundIn)
            {
                FillWithZeros = false
            };

            //create a source, that converts the data provided by the
            //soundInSource to any other format
            //in this case the "Fluent"-extension methods are being used
            _convertedSource = soundInSource
                               .ChangeSampleRate(SampleRate) // sample rate
                               .ToSampleSource()
                               .ToWaveSource(BitsPerSample); //bits per sample

            //channels...
            _convertedSource = _convertedSource.ToMono();

            //create a new wavefile
            _waveWriter = new WaveWriter(audioFilePath, _convertedSource.WaveFormat);

            //register an event handler for the DataAvailable event of
            //the soundInSource
            //Important: use the DataAvailable of the SoundInSource
            //If you use the DataAvailable event of the ISoundIn itself
            //the data recorded by that event might won't be available at the
            //soundInSource yet
            soundInSource.DataAvailable += (s, e) =>
            {
                //read data from the converedSource
                //important: don't use the e.Data here
                //the e.Data contains the raw data provided by the
                //soundInSource which won't have your target format
                byte[] buffer = new byte[_convertedSource.WaveFormat.BytesPerSecond / 2];
                int    read;

                //keep reading as long as we still get some data
                //if you're using such a loop, make sure that soundInSource.FillWithZeros is set to false
                while ((read = _convertedSource.Read(buffer, 0, buffer.Length)) > 0)
                {
                    //write the read data to a file
                    // ReSharper disable once AccessToDisposedClosure
                    _waveWriter.Write(buffer, 0, read);
                }
            };

            //we've set everything we need -> start capturing data
            _soundIn.Start();
            Console.WriteLine($"### RECORDING {audioFilePath}");

            while (_timer.ElapsedMilliseconds / 1000 < 15 && _timer.IsRunning)
            {
                Thread.Sleep(500);
            }

            Console.WriteLine("### STOP RECORDING");
            _soundIn.Stop();
            _timer.Stop();

            _waveWriter.Dispose();
            _convertedSource.Dispose();
            _soundIn.Dispose();

            AudioFileCaptured?.Invoke(this, new AudioRecorderEventArgs()
            {
                AudioFilePath = audioFilePath
            });
        }
        /// <summary>
        /// The OnDefaultDeviceChanged method notifies the client that the default audio endpoint
        /// device for a particular device role has changed.
        /// </summary>
        /// <param name="dataFlow">The data-flow direction of the endpoint device.</param>
        /// <param name="role">The device role of the audio endpoint device.</param>
        /// <param name="deviceId">The device id that identifies the audio endpoint device.</param>
        /// <returns>HRESULT</returns>
        void IMMNotificationClient.OnDefaultDeviceChanged(DataFlow dataFlow, Role role, string deviceId)
        {
            DefaultDeviceChanged?.Invoke(this, new DefaultDeviceChangedEventArgs(deviceId, dataFlow, role));

            //return (int) HResult.S_OK;
        }
示例#27
0
 // Audio Devices Events
 public void OnDefaultDeviceChanged(DataFlow flow, Role role, string defaultDeviceId)
 {
     Initialize(); Log($"OnDefaultDeviceChanged {defaultDeviceId}");
 }
示例#28
0
 internal static unsafe int CallI(void* _basePtr, DataFlow dataFlow, Role role, IntPtr* intPtr, void* p)
 {
     throw new NotImplementedException();
 }
示例#29
0
 private static IDataView LoadDprepFile(string pythonPath, string path)
 {
     DPrepSettings.Instance.PythonPath = pythonPath;
     return(DataFlow.FromDPrepFile(path).ToDataView());
 }
示例#30
0
 internal static unsafe int Calli(void *_basePtr1, DataFlow dataFlow, DeviceState stateMask, IntPtr *intPtr,
                                  void *_basePtr2)
 {
     throw new NotImplementedException();
 }
 /// <summary>
 /// Cycle the audio device
 /// </summary>
 /// <param name="type"></param>
 public bool CycleDevice(DataFlow type)
 {
     return(_deviceCyclerFactory.Get(CurrentCycler).CycleAudioDevice(type));
 }
示例#32
0
 /// <summary>Creates a new reference to the audio device with the given ID with the given flow direction.</summary>
 public AudioDeviceProxy(string deviceId, DataFlow flow)
 {
     Flow     = flow;
     DeviceId = deviceId ?? DEFAULT_DEVICE_ID;
 }
 public void Dispose()
 {
     DropTableTask.Drop(SqlConnection, ControlFlow.LogTable);
     ControlFlow.ClearSettings();
     DataFlow.ClearSettings();
 }
示例#34
0
 public DeviceListChanged(IEnumerable <DeviceInfo> seletedDevicesList, DataFlow type)
 {
     SeletedDevicesList = seletedDevicesList;
     Type = type;
 }
示例#35
0
 internal static unsafe int CallI(void* _basePtr1, DataFlow dataFlow, DeviceState stateMask, IntPtr* intPtr,
     void* _basePtr2)
 {
     throw new NotImplementedException();
 }
示例#36
0
        // ReSharper disable once UnusedParameter.Local
        static void Main(string[] args)
        {
            //choose the capture mode
            Console.WriteLine("Select capturing mode:");
            Console.WriteLine("- 1: Capture");
            Console.WriteLine("- 2: LoopbackCapture");

            CaptureMode captureMode = (CaptureMode)ReadInteger(1, 2);
            DataFlow    dataFlow    = captureMode == CaptureMode.Capture ? DataFlow.Capture : DataFlow.Render;

            //---

            //select the device:
            var devices = MMDeviceEnumerator.EnumerateDevices(dataFlow, DeviceState.Active);

            if (!devices.Any())
            {
                Console.WriteLine("No devices found.");
                return;
            }

            Console.WriteLine("Select device:");
            for (int i = 0; i < devices.Count; i++)
            {
                Console.WriteLine("- {0:#00}: {1}", i, devices[i].FriendlyName);
            }
            int selectedDeviceIndex = ReadInteger(Enumerable.Range(0, devices.Count).ToArray());
            var device = devices[selectedDeviceIndex];

            //--- choose format
            Console.WriteLine("Enter sample rate:");
            int sampleRate;

            do
            {
                sampleRate = ReadInteger();
                if (sampleRate >= 100 && sampleRate <= 200000)
                {
                    break;
                }
                Console.WriteLine("Must be between 1kHz and 200kHz.");
            } while (true);

            Console.WriteLine("Choose bits per sample (8, 16, 24 or 32):");
            int bitsPerSample = ReadInteger(8, 16, 24, 32);

            //note: this sample does not support multi channel formats like surround 5.1,...
            //if this is required, the DmoChannelResampler class can be used
            Console.WriteLine("Choose number of channels (1, 2):");
            int channels = ReadInteger(1, 2);

            //---

            //start recording

            //create a new soundIn instance
            using (WasapiCapture soundIn = captureMode == CaptureMode.Capture
                ? new WasapiCapture()
                : new WasapiLoopbackCapture())
            {
                //optional: set some properties
                soundIn.Device = device;
                //...

                //initialize the soundIn instance
                soundIn.Initialize();

                //create a SoundSource around the the soundIn instance
                //this SoundSource will provide data, captured by the soundIn instance
                SoundInSource soundInSource = new SoundInSource(soundIn)
                {
                    FillWithZeros = false
                };

                //create a source, that converts the data provided by the
                //soundInSource to any other format
                //in this case the "Fluent"-extension methods are being used
                IWaveSource convertedSource = soundInSource
                                              .ChangeSampleRate(sampleRate) // sample rate
                                              .ToSampleSource()
                                              .ToWaveSource(bitsPerSample); //bits per sample

                //channels...
                using (convertedSource = channels == 1 ? convertedSource.ToMono() : convertedSource.ToStereo())
                {
                    //create a new wavefile
                    using (WaveWriter waveWriter = new WaveWriter("out.wav", convertedSource.WaveFormat))
                    {
                        //register an event handler for the DataAvailable event of
                        //the soundInSource
                        //Important: use the DataAvailable of the SoundInSource
                        //If you use the DataAvailable event of the ISoundIn itself
                        //the data recorded by that event might won't be available at the
                        //soundInSource yet
                        soundInSource.DataAvailable += (s, e) =>
                        {
                            //read data from the converedSource
                            //important: don't use the e.Data here
                            //the e.Data contains the raw data provided by the
                            //soundInSource which won't have your target format
                            byte[] buffer = new byte[convertedSource.WaveFormat.BytesPerSecond / 2];
                            int    read;

                            //keep reading as long as we still get some data
                            //if you're using such a loop, make sure that soundInSource.FillWithZeros is set to false
                            while ((read = convertedSource.Read(buffer, 0, buffer.Length)) > 0)
                            {
                                //write the read data to a file
                                // ReSharper disable once AccessToDisposedClosure
                                waveWriter.Write(buffer, 0, read);
                            }
                        };

                        //we've set everything we need -> start capturing data
                        soundIn.Start();

                        Console.WriteLine("Capturing started ... press any key to stop.");
                        Console.ReadKey();

                        soundIn.Stop();
                    }
                }
            }

            Process.Start("out.wav");
        }
示例#37
0
 internal static unsafe int Calli(void *_basePtr, DataFlow dataFlow, Role role, IntPtr *intPtr, void *p)
 {
     throw new NotImplementedException();
 }
示例#38
0
 public RowCount AddComponent(DataFlow dataFlowTask)
 {
     base.AddComponent(dataFlowTask.SSISObject, "DTSTransform.RowCount.3");
     ComponentWrapper.SetComponentProperty("VariableName", Variable.QualifiedName);
     return(this);
 }
示例#39
0
 /// <summary>
 /// Returns the default audio endpoint for the specified data-flow direction and role.
 /// </summary>
 /// <param name="dataFlow">The data-flow direction for the endpoint device.</param>
 /// <param name="role">The role of the endpoint device.</param>
 /// <returns><see cref="MMDevice"/> instance of the endpoint object for the default audio endpoint device.</returns>
 public MMDevice GetDefaultAudioEndpoint(DataFlow dataFlow, Role role)
 {
     IntPtr ptr;
     CoreAudioAPIException.Try(GetDefaultAudioEndpointNative(dataFlow, role, out ptr), InterfaceName,
         "GetDefaultAudioEndpoint");
     return new MMDevice(ptr);
 }
示例#40
0
 private static IEnumerable <string> GetFullDeviceNames(DataFlow flow)
 {
     return(new MMDeviceEnumerator()
            .EnumerateAudioEndPoints(flow, DeviceState.Active)
            .Select((device, id) => device.FriendlyName));
 }
示例#41
0
 /// <summary>
 /// Generates a collection of all active audio endpoint devices that meet the specified criteria.
 /// </summary>
 /// <param name="dataFlow">The data-flow direction for the endpoint device.</param>
 /// <returns><see cref="MMDeviceCollection"/> which contains the enumerated devices.</returns>
 public static MMDeviceCollection EnumerateDevices(DataFlow dataFlow)
 {
     return EnumerateDevices(dataFlow, DeviceState.All);
 }
示例#42
0
 public IMMDevice GetDefaultAudioEndpoint(DataFlow dataFlow, Role role)
 {
     GetDefaultAudioEndpoint(dataFlow, role, out IMMDevice endPoint).CheckError();
     return(endPoint);
 }
示例#43
0
 /// <summary>
 /// Returns the default audio endpoint for the specified data-flow direction and role. If no device is available the <see cref="TryGetDefaultAudioEndpoint"/> method returns null.
 /// </summary>
 /// <param name="dataFlow">The data-flow direction for the endpoint device.</param>
 /// <param name="role">The role of the endpoint device.</param>
 /// <returns><see cref="MMDevice"/> instance of the endpoint object for the default audio endpoint device. If no device is available the <see cref="TryGetDefaultAudioEndpoint"/> method returns null.</returns>
 public static MMDevice TryGetDefaultAudioEndpoint(DataFlow dataFlow, Role role)
 {
     try
     {
         return DefaultAudioEndpoint(dataFlow, role);
     }
     catch (CoreAudioAPIException exception)
     {
         if (exception.ErrorCode == (int)HResult.E_NOTFOUND)
         {
             return null;
         }
         throw;
     }
 }
示例#44
0
            private static int GetDefaultAudioEndpoint(IMMDeviceEnumerator self, DataFlow dataflow, Role role,
                out IntPtr ppendpoint)
            {
                var entryPoint = HookRuntimeInfo.Callback as EntryPoint;

                if (entryPoint == null || entryPoint.Interface == null)
                    return self.GetDefaultAudioEndpoint(dataflow, role, out ppendpoint);

                var remoteInterface = entryPoint.Interface;

                try
                {
                    var devId = remoteInterface.GetDefaultDevice(dataflow, role);
                    return self.GetDevice(devId, out ppendpoint);
                }
                catch (Exception ex)
                {
                    remoteInterface.ReportError(RemoteHooking.GetCurrentProcessId(), ex);
                    //Something failed so return the actual default device
                    return self.GetDefaultAudioEndpoint(dataflow, role, out ppendpoint);
                }
            }
示例#45
0
 /// <summary>Creates a new reference to the default audio device with the given flow direction.</summary>
 public AudioDeviceProxy(DataFlow flow) : this(DEFAULT_DEVICE_ID, flow)
 {
 }
示例#46
0
            public string GetDefaultDevice(DataFlow dataFlow, Role role)
            {
                if (SystemId == null)
                    return String.Empty;

                return SystemId(dataFlow, role);
            }
示例#47
0
		/// <summary>
		/// Enumerate Audio Endpoints
		/// </summary>
		/// <param name="dataFlow">Desired DataFlow</param>
		/// <param name="dwStateMask">State Mask</param>
		/// <returns>Device Collection</returns>
		public MMDeviceCollection EnumerateAudioEndPoints(DataFlow dataFlow, DeviceState dwStateMask)
		{
			IMMDeviceCollection result;
			Marshal.ThrowExceptionForHR(_realEnumerator.EnumAudioEndpoints(dataFlow, dwStateMask, out result));
			return new MMDeviceCollection(result);
		}
        public void OnDefaultDeviceChanged(DataFlow flow, Role role, string defaultDeviceId)
        {
            var device = _enumerator.GetDevice(defaultDeviceId);

            DefaultDeviceChanged?.Invoke(this, new DeviceDefaultChangedEvent(device, (DeviceRole)role));
        }
示例#49
0
 public static DeviceFlow ToDeviceFlow(this DataFlow d)
 {
     return(d == DataFlow.Capture ? DeviceFlow.Input : DeviceFlow.Output);
 }
示例#50
0
 public void OnDefaultDeviceChanged(DataFlow flow, Role role, string defaultDeviceId)
 {
     CacheList = null;
     InnerLogger.Info("OnDefaultDeviceChanged --> {0}", flow.ToString());
 }
示例#51
0
 public TestAudioDevice(Guid id, DataFlow dFlow, IDeviceEnumerator enumerator)
     : base(enumerator)
 {
 }
        private void StartCapture(string fileName)
        {
            //Capture Mode
            CaptureMode = (CaptureMode)1;
            DataFlow dataFlow = CaptureMode == CaptureMode.Capture ? DataFlow.Capture : DataFlow.Render;
            //

            //Getting the audio devices from
            var devices = MMDeviceEnumerator.EnumerateDevices(dataFlow, DeviceState.Active);

            if (!devices.Any())
            {
                MessageBox.Show("No devices found.");
                return;
            }

            int selectedDeviceIndex = 0;

            SelectedDevice = devices[selectedDeviceIndex];

            if (SelectedDevice == null)
            {
                return;
            }

            if (CaptureMode == CaptureMode.Capture)
            {
                _soundIn = new WasapiCapture();
            }
            else
            {
                _soundIn = new WasapiLoopbackCapture();
            }

            _soundIn.Device = SelectedDevice;

            //Sample rate of audio
            int sampleRate = 16000;
            //bits per rate
            int bitsPerSample = 16;
            //chanels
            int channels = 1;


            //initialize the soundIn instance
            _soundIn.Initialize();

            //create a SoundSource around the the soundIn instance
            //this SoundSource will provide data, captured by the soundIn instance
            var soundInSource = new SoundInSource(_soundIn)
            {
                FillWithZeros = false
            };

            //create a source, that converts the data provided by the
            //soundInSource to any other format
            //in this case the "Fluent"-extension methods are being used
            IWaveSource convertedSource = soundInSource
                                          .ChangeSampleRate(sampleRate) // sample rate
                                          .ToSampleSource()
                                          .ToWaveSource(bitsPerSample); //bits per sample

            //channels=1 then we  need to create  mono audio
            convertedSource = convertedSource.ToMono();

            AudioToText audioToText = new AudioToText();

            audioToText.SetFolderPermission(_folderPath);

            //create a new wavefile
            waveWriter = new WaveWriter(fileName, convertedSource.WaveFormat);
            //register an event handler for the DataAvailable event of
            //the soundInSource
            //Important: use the DataAvailable of the SoundInSource
            //If you use the DataAvailable event of the ISoundIn itself
            //the data recorded by that event might won't be available at the
            //soundInSource yet
            soundInSource.DataAvailable += (s, e) =>
            {
                //read data from the converedSource
                //important: don't use the e.Data here
                //the e.Data contains the raw data provided by the
                //soundInSource which won't have your target format
                byte[] buffer = new byte[convertedSource.WaveFormat.BytesPerSecond / 2];
                int    read;

                //keep reading as long as we still get some data
                //if you're using such a loop, make sure that soundInSource.FillWithZeros is set to false
                while ((read = convertedSource.Read(buffer, 0, buffer.Length)) > 0)
                {
                    //write the read data to a file
                    // ReSharper disable once AccessToDisposedClosure
                    waveWriter.Write(buffer, 0, read);
                }
            };

            //we've set everything we need -> start capturing data
            objStopWatch.Start();
            _soundIn.Start();
        }
示例#53
0
 /// <summary>
 /// Initializes a new instance of the <see cref="DefaultDeviceChangedEventArgs"/> class.
 /// </summary>
 /// <param name="deviceId">The device id that identifies the audio endpoint device.</param>
 /// <param name="dataFlow">The data-flow direction of the endpoint device.</param>
 /// <param name="role">The device role of the audio endpoint device.</param>
 public DefaultDeviceChangedEventArgs(string deviceId, DataFlow dataFlow, Role role)
     : base(deviceId)
 {
     DataFlow = dataFlow;
     Role     = role;
 }
示例#54
0
 public PSDataFlow(DataFlow dataFlow)
 {
     this.Description = dataFlow?.Description;
     this.Annotations = dataFlow?.Annotations;
     this.Folder      = new PSDataFlowFolder(dataFlow?.Folder);
 }
示例#55
0
 public static void SetDataFlow(AvaloniaObject obj, DataFlow value)
 {
     obj.SetValue(DataFlowProperty, value);
 }
示例#56
0
 protected void SetProperties(DataFlow dataFlow)
 {
     dataFlow.Description = this.Description;
     dataFlow.Folder      = this.Folder?.ToSdkObject();
     this.Annotations?.ForEach(item => dataFlow.Annotations.Add(item));
 }
示例#57
0
 private int GetDefaultDeviceNumber(DataFlow dataFlow)
 {
     return SearchDeviceNumber(DataFlow.Capture,
         deviceEnumerator.GetDefaultAudioEndpoint(dataFlow, Role.Multimedia).FriendlyName);
 }
示例#58
0
        private static void RunGraphCore(EnvironmentBlock *penv, IHostEnvironment env, string graphStr, int cdata, DataSourceBlock **ppdata)
        {
            Contracts.AssertValue(env);

            var    args = new RunGraphArgs();
            string err  = null;

            if (!CmdParser.ParseArguments(env, graphStr, args, e => err = err ?? e))
            {
                throw env.Except(err);
            }

            int?maxThreadsAllowed = Math.Min(args.parallel > 0 ? args.parallel.Value : penv->maxThreadsAllowed, penv->maxThreadsAllowed);

            maxThreadsAllowed = penv->maxThreadsAllowed > 0 ? maxThreadsAllowed : args.parallel;
            var host = env.Register("RunGraph", args.randomSeed, null);

            JObject graph;

            try
            {
                graph = JObject.Parse(args.graph);
            }
            catch (JsonReaderException ex)
            {
                throw host.Except(ex, "Failed to parse experiment graph: {0}", ex.Message);
            }

            var runner = new GraphRunner(host, graph["nodes"] as JArray);

            var dvNative = new IDataView[cdata];

            try
            {
                for (int i = 0; i < cdata; i++)
                {
                    dvNative[i] = new NativeDataView(host, ppdata[i]);
                }

                // Setting inputs.
                var jInputs = graph["inputs"] as JObject;
                if (graph["inputs"] != null && jInputs == null)
                {
                    throw host.Except("Unexpected value for 'inputs': {0}", graph["inputs"]);
                }
                int iDv = 0;
                if (jInputs != null)
                {
                    foreach (var kvp in jInputs)
                    {
                        var pathValue = kvp.Value as JValue;
                        if (pathValue == null)
                        {
                            throw host.Except("Invalid value for input: {0}", kvp.Value);
                        }

                        var path    = pathValue.Value <string>();
                        var varName = kvp.Key;
                        var type    = runner.GetPortDataKind(varName);

                        switch (type)
                        {
                        case TlcModule.DataKind.FileHandle:
                            var fh = new SimpleFileHandle(host, path, false, false);
                            runner.SetInput(varName, fh);
                            break;

                        case TlcModule.DataKind.DataView:
                            IDataView dv;
                            if (!string.IsNullOrWhiteSpace(path))
                            {
                                var extension = Path.GetExtension(path);
                                if (extension == ".txt")
                                {
                                    dv = TextLoader.LoadFile(host, new TextLoader.Options(), new MultiFileSource(path));
                                }
                                else if (extension == ".dprep")
                                {
                                    dv = DataFlow.FromDPrepFile(path).ToDataView();
                                }
                                else
                                {
                                    dv = new BinaryLoader(host, new BinaryLoader.Arguments(), path);
                                }
                            }
                            else
                            {
                                Contracts.Assert(iDv < dvNative.Length);
                                // prefetch all columns
                                dv = dvNative[iDv++];
                                var prefetch = new int[dv.Schema.Count];
                                for (int i = 0; i < prefetch.Length; i++)
                                {
                                    prefetch[i] = i;
                                }
                                dv = new CacheDataView(host, dv, prefetch);
                            }
                            runner.SetInput(varName, dv);
                            break;

                        case TlcModule.DataKind.PredictorModel:
                            PredictorModel pm;
                            if (!string.IsNullOrWhiteSpace(path))
                            {
                                using (var fs = File.OpenRead(path))
                                    pm = new PredictorModelImpl(host, fs);
                            }
                            else
                            {
                                throw host.Except("Model must be loaded from a file");
                            }
                            runner.SetInput(varName, pm);
                            break;

                        case TlcModule.DataKind.TransformModel:
                            TransformModel tm;
                            if (!string.IsNullOrWhiteSpace(path))
                            {
                                using (var fs = File.OpenRead(path))
                                    tm = new TransformModelImpl(host, fs);
                            }
                            else
                            {
                                throw host.Except("Model must be loaded from a file");
                            }
                            runner.SetInput(varName, tm);
                            break;

                        default:
                            throw host.Except("Port type {0} not supported", type);
                        }
                    }
                }
                runner.RunAll();

                // Reading outputs.
                using (var ch = host.Start("Reading outputs"))
                {
                    var jOutputs = graph["outputs"] as JObject;
                    if (jOutputs != null)
                    {
                        foreach (var kvp in jOutputs)
                        {
                            var pathValue = kvp.Value as JValue;
                            if (pathValue == null)
                            {
                                throw host.Except("Invalid value for input: {0}", kvp.Value);
                            }
                            var path    = pathValue.Value <string>();
                            var varName = kvp.Key;
                            var type    = runner.GetPortDataKind(varName);

                            switch (type)
                            {
                            case TlcModule.DataKind.FileHandle:
                                var fh = runner.GetOutput <IFileHandle>(varName);
                                throw host.ExceptNotSupp("File handle outputs not yet supported.");

                            case TlcModule.DataKind.DataView:
                                var idv = runner.GetOutput <IDataView>(varName);
                                if (!string.IsNullOrWhiteSpace(path))
                                {
                                    SaveIdvToFile(idv, path, host);
                                }
                                else
                                {
                                    var infos = ProcessColumns(ref idv, args.maxSlots, host);
                                    SendViewToNative(ch, penv, idv, infos);
                                }
                                break;

                            case TlcModule.DataKind.PredictorModel:
                                var pm = runner.GetOutput <PredictorModel>(varName);
                                if (!string.IsNullOrWhiteSpace(path))
                                {
                                    SavePredictorModelToFile(pm, path, host);
                                }
                                else
                                {
                                    throw host.Except("Returning in-memory models is not supported");
                                }
                                break;

                            case TlcModule.DataKind.TransformModel:
                                var tm = runner.GetOutput <TransformModel>(varName);
                                if (!string.IsNullOrWhiteSpace(path))
                                {
                                    using (var fs = File.OpenWrite(path))
                                        tm.Save(host, fs);
                                }
                                else
                                {
                                    throw host.Except("Returning in-memory models is not supported");
                                }
                                break;

                            case TlcModule.DataKind.Array:
                                var objArray = runner.GetOutput <object[]>(varName);
                                if (objArray is PredictorModel[])
                                {
                                    var modelArray = (PredictorModel[])objArray;
                                    // Save each model separately
                                    for (var i = 0; i < modelArray.Length; i++)
                                    {
                                        var modelPath = string.Format(CultureInfo.InvariantCulture, path, i);
                                        SavePredictorModelToFile(modelArray[i], modelPath, host);
                                    }
                                }
                                else
                                {
                                    throw host.Except("DataKind.Array type {0} not supported", objArray.First().GetType());
                                }
                                break;

                            default:
                                throw host.Except("Port type {0} not supported", type);
                            }
                        }
                    }
                }
            }
            finally
            {
                // The raw data view is disposable so it lets go of unmanaged raw pointers before we return.
                for (int i = 0; i < dvNative.Length; i++)
                {
                    var view = dvNative[i];
                    if (view == null)
                    {
                        continue;
                    }
                    host.Assert(view is IDisposable);
                    var disp = (IDisposable)dvNative[i];
                    disp.Dispose();
                }
            }
        }
 public DefaultDeviceChangedEventArgs(string deviceID, DataFlow dataFlow, Role role)
     : base(deviceID)
 {
     DataFlow = dataFlow;
     Role = role;
 }
示例#60
0
 public void EnumAudioEndpoints(DataFlow dataFlow, DeviceState deviceStates, out IMMDeviceCollection devices)
 {
     _inner.EnumAudioEndpoints(dataFlow, deviceStates, out devices);
 }