public override bool Init() { if (!base.Init()) { return(false); } _Sources = new List <CSoundCardSource>(); DeviceCollection devices = DirectSoundCapture.GetDevices(); foreach (DeviceInformation dev in devices) { using (var ds = new DirectSoundCapture(dev.DriverGuid)) { var device = new CRecordDevice(_Devices.Count, dev.DriverGuid.ToString(), dev.Description, ds.Capabilities.Channels); _Devices.Add(device); } } _Initialized = true; return(true); }
public void Start() { if (_Running) { throw new InvalidOperationException(); } if (_CaptureDevice == null) { _CaptureDevice = new DirectSoundCapture(new Guid(_Guid)); } _WaveFormat.FormatTag = WaveFormatTag.Pcm; // Change to WaveFormatTag.IeeeFloat for float _WaveFormat.BitsPerSample = 16; // Set this to 32 for float _WaveFormat.BlockAlignment = (short)(_Channels * (_WaveFormat.BitsPerSample / 8)); _WaveFormat.Channels = _Channels; _WaveFormat.SamplesPerSecond = (int)(SampleRateKhz * 1000D); _WaveFormat.AverageBytesPerSecond = _WaveFormat.SamplesPerSecond * _WaveFormat.BlockAlignment; _BufferPortionCount = 2; _BufferDescription.BufferBytes = _BufferSize * sizeof(short) * _BufferPortionCount * _Channels; _BufferDescription.Format = _WaveFormat; _BufferDescription.WaveMapped = false; _CaptureBuffer = new CaptureBuffer(_CaptureDevice, _BufferDescription); _BufferPortionSize = _CaptureBuffer.SizeInBytes / _BufferPortionCount; _Notifications = new List <NotificationPosition>(); for (int i = 0; i < _BufferPortionCount; i++) { var notification = new NotificationPosition { Offset = _BufferPortionCount - 1 + (_BufferPortionSize * i), Event = new AutoResetEvent(false) }; _Notifications.Add(notification); } _CaptureBuffer.SetNotificationPositions(_Notifications.ToArray()); _WaitHandles = new WaitHandle[_Notifications.Count]; for (int i = 0; i < _Notifications.Count; i++) { _WaitHandles[i] = _Notifications[i].Event; } _CaptureThread = new Thread(_DoCapture) { Name = "DirectSoundCapture", IsBackground = true }; _Running = true; _CaptureThread.Start(); }
public void Start() { if (this.running) { throw new InvalidOperationException(); } if (this.captureDevice == null) { this.captureDevice = new DirectSoundCapture(guid); } this.waveFormat.FormatTag = WaveFormatTag.Pcm; // Change to WaveFormatTag.IeeeFloat for float this.waveFormat.BitsPerSample = 16; // Set this to 32 for float this.waveFormat.BlockAlignment = (short)(channels * (waveFormat.BitsPerSample / 8)); this.waveFormat.Channels = this.channels; this.waveFormat.SamplesPerSecond = (int)(this.SampleRateKHz * 1000D); this.waveFormat.AverageBytesPerSecond = this.waveFormat.SamplesPerSecond * this.waveFormat.BlockAlignment; this.bufferPortionCount = 2; this.bufferDescription.BufferBytes = this.bufferSize * sizeof(short) * bufferPortionCount * this.channels; this.bufferDescription.Format = this.waveFormat; this.bufferDescription.WaveMapped = false; this.buffer = new CaptureBuffer(this.captureDevice, this.bufferDescription); this.bufferPortionSize = this.buffer.SizeInBytes / this.bufferPortionCount; this.notifications = new List <NotificationPosition>(); for (int i = 0; i < this.bufferPortionCount; i++) { NotificationPosition notification = new NotificationPosition(); notification.Offset = this.bufferPortionCount - 1 + (bufferPortionSize * i); notification.Event = new AutoResetEvent(false); this.notifications.Add(notification); } this.buffer.SetNotificationPositions(this.notifications.ToArray()); this.waitHandles = new WaitHandle[this.notifications.Count]; for (int i = 0; i < this.notifications.Count; i++) { this.waitHandles[i] = this.notifications[i].Event; } this.captureThread = new Thread(new ThreadStart(this.CaptureThread)); this.captureThread.IsBackground = true; this.running = true; this.captureThread.Start(); }
/// <summary> /// Returns an enumerator that iterates through the device collection. /// </summary> /// /// <returns> /// A <see cref="T:System.Collections.Generic.IEnumerator`1"/> that /// can be used to iterate through the collection. /// </returns> /// public IEnumerator <AudioDeviceInfo> GetEnumerator() { DeviceCollection devices = (Category == AudioDeviceCategory.Capture) ? DirectSoundCapture.GetDevices() : DirectSound.GetDevices(); foreach (DeviceInformation info in devices) { yield return(new AudioDeviceInfo(info)); } }
protected virtual void Dispose(bool disposing) { if (disposing) { this.Stop(); if (this.captureDevice != null) { this.captureDevice.Dispose(); this.captureDevice = null; } } }
// ReSharper disable InconsistentNaming protected void Dispose(bool disposing) // ReSharper restore InconsistentNaming { if (disposing) { Stop(); if (_CaptureDevice != null) { _CaptureDevice.Dispose(); _CaptureDevice = null; } } }
public SoundDriver() { iMutex = new Mutex(); iSourceList = new List <ISource>(); iCapture = new DirectSoundCapture(); DeviceCollection cdc = DirectSoundCapture.GetDevices(); for (int i = 0; i < cdc.Count; i++) { DeviceInformation info = cdc[i]; string name = info.Description; iSourceList.Add(new Source(info.Description, info.DriverGuid.ToString())); } }
public AudioCapture() { try { directSoundCapture = new DirectSoundCapture(); } catch { throw new AudioCaptureException("Could not open recording device"); } //var directSoundCaps = directSoundCapture.Capabilities; // Default 44.1kHz 16-bit stereo PCM waveFormat = new WaveFormat(); // Set the buffer size. // Note that the buffer position will roll over to 0 when the buffer fills up, // so set the notification position's offset to one less than the buffer size. bufferSize = waveFormat.ConvertLatencyToByteSize(latency); numberOfSamples = bufferSize / waveFormat.BlockAlign; // Create audio capture buffer captureBufferDesc = new CaptureBufferDescription(); captureBufferDesc.Format = waveFormat; captureBufferDesc.BufferBytes = bufferSize; captureBuffer = new CaptureBuffer(directSoundCapture, captureBufferDesc); // Wait events allow the thread to wait asynchronously for the buffer to fill var evt = new AutoResetEvent(false); fullEvent = new WaitHandle[] { evt }; // Notify the thread when the buffer is full var nf = new NotificationPosition(); nf.Offset = bufferSize - 1; nf.WaitHandle = fullEvent[0]; var nfs = new NotificationPosition[] { nf }; captureBuffer.SetNotificationPositions(nfs); // Start the processing thread thread = new Thread(new ThreadStart(Process)); thread.IsBackground = true; // Allow application to exit thread.Start(); }
private CaptureBuffer CreateCaptureBuffer(ISource aSource, int aBytes) { var format = new WaveFormat { SamplesPerSecond = 44100, BitsPerSample = 16, Channels = 2, FormatTag = WaveFormatTag.Pcm, BlockAlignment = 4, AverageBytesPerSecond = 44100 * 4 // 2 channels, 2 bytes per sample }; var desc = new CaptureBufferDescription { Format = format, BufferBytes = aBytes }; DirectSoundCapture capture = new DirectSoundCapture(); return(new CaptureBuffer(capture, desc)); }
public bool Init() { DeviceCollection devices = DirectSoundCapture.GetDevices(); _Devices = new List <SRecordDevice>(); _Sources = new List <SoundCardSource>(); int id = 0; foreach (DeviceInformation dev in devices) { DirectSoundCapture ds = new DirectSoundCapture(dev.DriverGuid); SRecordDevice device = new SRecordDevice(); device.Driver = dev.DriverGuid.ToString(); device.ID = id; device.Name = dev.Description; device.Inputs = new List <SInput>(); SInput inp = new SInput(); inp.Name = "Default"; inp.Channels = ds.Capabilities.Channels; if (inp.Channels > 2) { inp.Channels = 2; //more are not supported in vocaluxe } device.Inputs.Add(inp); _Devices.Add(device); id++; ds.Dispose(); } _DeviceConfig = _Devices.ToArray(); _initialized = true; return(true); }
public override void Start() { if (this.isCapturing) { throw new InvalidOperationException("Capture is in process"); } this.isCapturing = true; this.bufferLength = this.Format.AverageBytesPerSecond * bufferSeconds; CaptureBufferDescription desciption = new CaptureBufferDescription(); desciption.Format = this.Format; desciption.BufferBytes = bufferLength; this.captureDevice = new DirectSoundCapture(); this.captureBuffer = new CaptureBuffer(captureDevice, desciption); int waitHandleCount = bufferSeconds * notifyPointsInSecond; NotificationPosition[] notificationPositions = new NotificationPosition[waitHandleCount]; for (int i = 0; i < waitHandleCount; i++) { NotificationPosition position = new NotificationPosition(); position.Offset = (i + 1) * bufferLength / notificationPositions.Length - 1; position.Event = positionEvent; notificationPositions[i] = position; } this.captureBuffer.SetNotificationPositions(notificationPositions); this.terminatedEvent.Reset(); this.captureThread = new Thread(new ThreadStart(this.CaptureLoop)); this.captureThread.Name = "Sound capture"; this.captureThread.Start(); }
// bufferSize is a SAMPLE COUNT // NOTE: we always capture 16 bits/sample public WaveCapture(Guid deviceGuid, int Fs, int bufferSize, int timerInterval) { CaptureBufferDescription desc = new CaptureBufferDescription(); desc.BufferBytes = bufferSize * 2; desc.ControlEffects = false; desc.WaveMapped = true; desc.Format = new WaveFormat(); desc.Format.FormatTag = SlimDX.WaveFormatTag.Pcm; desc.Format.SamplesPerSecond = Fs; desc.Format.Channels = 1; desc.Format.BitsPerSample = 16; desc.Format.BlockAlignment = 2; desc.Format.AverageBytesPerSecond = Fs * 2; buf = new byte[bufferSize * 2]; capture = new DirectSoundCapture(deviceGuid); captureBuffer = new CaptureBuffer(capture, desc); timer = new Timer(); timer.Interval = timerInterval; timer.Tick += new EventHandler(timer_Tick); }
public bool Init() { DeviceCollection devices = DirectSoundCapture.GetDevices(); _Devices = new List<SRecordDevice>(); _Sources = new List<SoundCardSource>(); int id = 0; foreach (DeviceInformation dev in devices) { DirectSoundCapture ds = new DirectSoundCapture(dev.DriverGuid); SRecordDevice device = new SRecordDevice(); device.Driver = dev.DriverGuid.ToString(); device.ID = id; device.Name = dev.Description; device.Inputs = new List<SInput>(); SInput inp = new SInput(); inp.Name = "Default"; inp.Channels = ds.Capabilities.Channels; if (inp.Channels > 2) inp.Channels = 2; //more are not supported in vocaluxe device.Inputs.Add(inp); _Devices.Add(device); id++; ds.Dispose(); } _DeviceConfig = _Devices.ToArray(); _initialized = true; return true; }
/// <summary> /// Worker thread. /// </summary> /// private void WorkerThread() { // Get the selected capture device DirectSoundCapture captureDevice = new DirectSoundCapture(device); // Set the capture format WaveFormat format = new WaveFormat(); format.Channels = 1; format.SamplesPerSecond = sampleRate; format.FormatTag = sampleFormat.ToWaveFormat(); format.BitsPerSample = (short)Signal.GetSampleSize(sampleFormat); format.BlockAlignment = (short)(format.BitsPerSample / 8); format.AverageBytesPerSecond = format.SamplesPerSecond * format.BlockAlignment; // Setup the capture buffer CaptureBufferDescription captureBufferDescription = new CaptureBufferDescription(); captureBufferDescription.Format = format; captureBufferDescription.BufferBytes = 2 * desiredCaptureSize * format.BlockAlignment; captureBufferDescription.WaveMapped = true; captureBufferDescription.ControlEffects = false; CaptureBuffer captureBuffer = null; NotificationPosition[] notifications = new NotificationPosition[2]; try { captureBuffer = new CaptureBuffer(captureDevice, captureBufferDescription); // Setup the notification positions int bufferPortionSize = captureBuffer.SizeInBytes / 2; notifications[0] = new NotificationPosition(); notifications[0].Offset = bufferPortionSize - 1; notifications[0].Event = new AutoResetEvent(false); notifications[1] = new NotificationPosition(); notifications[1].Offset = bufferPortionSize - 1 + bufferPortionSize; notifications[1].Event = new AutoResetEvent(false); captureBuffer.SetNotificationPositions(notifications); // Make a copy of the wait handles WaitHandle[] waitHandles = new WaitHandle[notifications.Length]; for (int i = 0; i < notifications.Length; i++) { waitHandles[i] = notifications[i].Event; } // Start capturing captureBuffer.Start(true); if (sampleFormat == SampleFormat.Format32BitIeeeFloat) { float[] currentSample = new float[desiredCaptureSize]; while (!stopEvent.WaitOne(0, true)) { int bufferPortionIndex = WaitHandle.WaitAny(waitHandles); captureBuffer.Read(currentSample, 0, currentSample.Length, bufferPortionSize * bufferPortionIndex); OnNewFrame(currentSample); } } else if (sampleFormat == SampleFormat.Format16Bit) { short[] currentSample = new short[desiredCaptureSize]; while (!stopEvent.WaitOne(0, true)) { int bufferPortionIndex = WaitHandle.WaitAny(waitHandles); captureBuffer.Read(currentSample, 0, currentSample.Length, bufferPortionSize * bufferPortionIndex); OnNewFrame(currentSample); } } } catch (Exception ex) { if (AudioSourceError != null) { AudioSourceError(this, new AudioSourceErrorEventArgs(ex.Message)); } else { throw; } } finally { if (captureBuffer != null) { captureBuffer.Stop(); captureBuffer.Dispose(); } if (captureDevice != null) { captureDevice.Dispose(); } for (int i = 0; i < notifications.Length; i++) { if (notifications[i].Event != null) { notifications[i].Event.Close(); } } } }
/// <summary> /// Worker thread. /// </summary> /// private void WorkerThread() { // Get the selected capture device DirectSoundCapture captureDevice = new DirectSoundCapture(device); // Set the capture format WaveFormat format = new WaveFormat(); format.Channels = 1; format.SamplesPerSecond = sampleRate; format.FormatTag = sampleFormat.ToWaveFormat(); format.BitsPerSample = (short)Signal.GetSampleSize(sampleFormat); format.BlockAlignment = (short)(format.BitsPerSample / 8); format.AverageBytesPerSecond = format.SamplesPerSecond * format.BlockAlignment; // Setup the capture buffer CaptureBufferDescription captureBufferDescription = new CaptureBufferDescription(); captureBufferDescription.Format = format; captureBufferDescription.BufferBytes = 2 * desiredCaptureSize * format.BlockAlignment; captureBufferDescription.WaveMapped = true; captureBufferDescription.ControlEffects = false; CaptureBuffer captureBuffer = null; NotificationPosition[] notifications = new NotificationPosition[2]; try { captureBuffer = new CaptureBuffer(captureDevice, captureBufferDescription); // Setup the notification positions int bufferPortionSize = captureBuffer.SizeInBytes / 2; notifications[0] = new NotificationPosition(); notifications[0].Offset = bufferPortionSize - 1; notifications[0].Event = new AutoResetEvent(false); notifications[1] = new NotificationPosition(); notifications[1].Offset = bufferPortionSize - 1 + bufferPortionSize; notifications[1].Event = new AutoResetEvent(false); captureBuffer.SetNotificationPositions(notifications); // Make a copy of the wait handles WaitHandle[] waitHandles = new WaitHandle[notifications.Length]; for (int i = 0; i < notifications.Length; i++) waitHandles[i] = notifications[i].Event; // Start capturing captureBuffer.Start(true); if (sampleFormat == SampleFormat.Format32BitIeeeFloat) { float[] currentSample = new float[desiredCaptureSize]; while (!stopEvent.WaitOne(0, true)) { int bufferPortionIndex = WaitHandle.WaitAny(waitHandles); captureBuffer.Read(currentSample, 0, currentSample.Length, bufferPortionSize * bufferPortionIndex); OnNewFrame(currentSample); } } else if (sampleFormat == SampleFormat.Format16Bit) { short[] currentSample = new short[desiredCaptureSize]; while (!stopEvent.WaitOne(0, true)) { int bufferPortionIndex = WaitHandle.WaitAny(waitHandles); captureBuffer.Read(currentSample, 0, currentSample.Length, bufferPortionSize * bufferPortionIndex); OnNewFrame(currentSample); } } } catch (Exception ex) { if (AudioSourceError != null) AudioSourceError(this, new AudioSourceErrorEventArgs(ex.Message)); else throw; } finally { if (captureBuffer != null) { captureBuffer.Stop(); captureBuffer.Dispose(); } if (captureDevice != null) captureDevice.Dispose(); for (int i = 0; i < notifications.Length; i++) if (notifications[i].Event != null) notifications[i].Event.Close(); } }
static void Main(string[] args) { DeviceInformation device; var DevList = DirectSoundCapture.GetDevices(); for (int i = 0; i < DevList.Count; i++) { DeviceInformation Device = DevList[i]; Console.WriteLine($"[{i}] {Device.Description}"); } Console.Write("> "); int answer = Convert.ToInt32(Console.ReadLine()); if (answer < 0 || answer >= DevList.Count) { Console.WriteLine("u r stupid"); } int index = answer; device = DevList[index]; Console.WriteLine(device.DriverGuid); directSoundDeviceIn = new DirectSoundCapture(device.DriverGuid); captureBufferDescription.BufferBytes = 192000; // 2 Second Buffer captureBufferDescription.Format = waveFormat; //captureBufferDescription.ControlEffects = false; //captureBufferDescription.WaveMapped = true; captureBuffer = new CaptureBuffer(directSoundDeviceIn, captureBufferDescription); FourierTransformer = new MyMath.DFT(MyMath.DFT.DFTModes.FFTW, waveFormat.SampleRate); frequencies = new float[60]; for (int i = 0; i < frequencies.Length; i++) { frequencies[i] = MyMath.DFT.FrequencyFromIndex(i, frequencies.Length, 1500); } Thread listenThread = new Thread(new ThreadStart(Listen)) { Name = "ListenThread", IsBackground = true }; Thread infoThread = new Thread(new ThreadStart(DrawBufferInfo)) { Name = "DrawBufferInfoThread", IsBackground = true }; Thread KeyboardListenThread = new Thread(new ThreadStart(KeyboardListen)) { Name = "KeyboardListen", IsBackground = true }; Console.Clear(); listenThread.Start(); KeyboardListenThread.Start(); infoThread.Start(); listenThread.Join(); KeyboardListenThread.Join(); infoThread.Join(); }
static void Main() { const int samples = 512; const int latency = 24; var devices = DirectSoundCapture.GetDevices(); //var capture = new DirectSoundCapture(devices.OrderByDescending(d => d.Description.Contains("Mic")).First().DriverGuid); var capture = new DirectSoundCapture(devices.OrderByDescending(d => d.Description.Contains("Mix")).First().DriverGuid); var audioFormat = new WaveFormat(); var audioBuffer = new CaptureBuffer(capture, new CaptureBufferDescription { BufferBytes = audioFormat.ConvertLatencyToByteSize(latency), Format = audioFormat }); audioBuffer.Start(true); using (var form = new Form()) using (var factory = new Factory4()) { form.Text = "AudioDX"; form.ClientSize = new System.Drawing.Size(1024, 768); form.StartPosition = FormStartPosition.CenterScreen; Device11 device; SwapChain swapChain; Device11.CreateWithSwapChain( DriverType.Hardware, DeviceCreationFlags.None, new SwapChainDescription { IsWindowed = true, BufferCount = 1, OutputHandle = form.Handle, SampleDescription = new SampleDescription(1, 0), ModeDescription = new ModeDescription(form.ClientSize.Width, form.ClientSize.Height, new Rational(60, 1), Format.B8G8R8A8_UNorm), Usage = Usage.RenderTargetOutput, SwapEffect = SwapEffect.Discard }, out device, out swapChain); var context = device.ImmediateContext; var backBuffer = swapChain.GetBackBuffer <Texture2D>(0); var backBufferView = new RenderTargetView(device, backBuffer); backBuffer.Dispose(); var depthBuffer = new Texture2D(device, new Texture2DDescription { Format = Format.D16_UNorm, ArraySize = 1, MipLevels = 1, Width = form.ClientSize.Width, Height = form.ClientSize.Height, SampleDescription = new SampleDescription(1, 0), BindFlags = BindFlags.DepthStencil }); var depthBufferView = new DepthStencilView(device, depthBuffer); depthBuffer.Dispose(); Shapes.Sphere.Load(device); Shapes.Cube.Load(device); Shapes.Billboard.Load(device); Shaders.Normal.Load(device); Shaders.Color.Load(device); var rasterizerStateDescription = RasterizerStateDescription.Default(); //rasterizerStateDescription.FillMode = FillMode.Wireframe; //rasterizerStateDescription.IsFrontCounterClockwise = true; //rasterizerStateDescription.CullMode = CullMode.Back; var rasterizerState = new RasterizerState(device, rasterizerStateDescription); var blendStateDescription = BlendStateDescription.Default(); //blendStateDescription.RenderTarget[0] = new RenderTargetBlendDescription(true, BlendOption.SourceAlpha, BlendOption.InverseSourceAlpha, BlendOperation.Add, BlendOption.SourceAlpha, BlendOption.DestinationAlpha, BlendOperation.Add, ColorWriteMaskFlags.All); var blendState = new BlendState(device, blendStateDescription); var depthStateDescription = DepthStencilStateDescription.Default(); depthStateDescription.DepthComparison = Comparison.LessEqual; depthStateDescription.IsDepthEnabled = true; depthStateDescription.IsStencilEnabled = false; var depthStencilState = new DepthStencilState(device, depthStateDescription); var samplerStateDescription = SamplerStateDescription.Default(); samplerStateDescription.Filter = Filter.MinMagMipLinear; samplerStateDescription.AddressU = TextureAddressMode.Wrap; samplerStateDescription.AddressV = TextureAddressMode.Wrap; var samplerState = new SamplerState(device, samplerStateDescription); var startTime = DateTime.Now; var frame = 0; var size = form.ClientSize; var audioData = new byte[audioFormat.ConvertLatencyToByteSize(latency)]; var audioIndex = 0; var leftWaveForm = new float[samples * 8]; var rightWaveForm = new float[samples * 8]; for (var sample = 0; sample < samples; sample++) { leftWaveForm[(sample * 8) + 0] = -1.0f + ((float)sample / (samples - 1) * 2.0f); rightWaveForm[(sample * 8) + 0] = -1.0f + ((float)sample / (samples - 1) * 2.0f); } var waveFormBufferDescription = new BufferDescription { BindFlags = BindFlags.VertexBuffer, SizeInBytes = leftWaveForm.Length * sizeof(float), CpuAccessFlags = CpuAccessFlags.Write, Usage = ResourceUsage.Dynamic }; //var leftWaveFormVertexBuffer = Buffer11.Create(device, leftWaveForm, waveFormBufferDescription); //var rightWaveFormVertexBuffer = Buffer11.Create(device, rightWaveForm, waveFormBufferDescription); //var leftWaveFormVertexBufferBinding = new VertexBufferBinding(leftWaveFormVertexBuffer, 8 * sizeof(float), 0); //var rightWaveFormVertexBufferBinding = new VertexBufferBinding(rightWaveFormVertexBuffer, 8 * sizeof(float), 0); var leftFrequencies = new float[samples]; var rightFrequencies = new float[samples]; //var rotation = 0.0f; RenderLoop.Run(form, () => { if (audioBuffer.CurrentCapturePosition != audioBuffer.CurrentRealPosition) { audioBuffer.Read(audioData, 0, audioData.Length, 0, LockFlags.None); //for (var sample = 0; sample < samples; sample++) //{ // leftWaveForm[(sample * 8) + 1] = -BitConverter.ToInt16(audioData, sample * 4) / (float)short.MinValue; // rightWaveForm[(sample * 8) + 1] = -BitConverter.ToInt16(audioData, (sample * 4) + 2) / (float)short.MinValue; //} //DataStream stream; //context.MapSubresource(leftWaveFormVertexBuffer, 0, MapMode.WriteDiscard, SharpDX.Direct3D11.MapFlags.None, out stream); //stream.WriteRange(leftWaveForm); //context.UnmapSubresource(leftWaveFormVertexBuffer, 0); //stream.Dispose(); //context.MapSubresource(rightWaveFormVertexBuffer, 0, MapMode.WriteDiscard, SharpDX.Direct3D11.MapFlags.None, out stream); //stream.WriteRange(rightWaveForm); //context.UnmapSubresource(rightWaveFormVertexBuffer, 0); //stream.Dispose(); for (var sample = 0; sample < samples; sample++) { leftFrequencies[sample] = 0.0f; rightFrequencies[sample] = 0.0f; for (var sample2 = 0; sample2 < samples; sample2++) { var theta = -2.0f * MathUtil.Pi * (float)sample2 * (float)sample / (samples << 1); var value = (float)Math.Cos(theta); leftFrequencies[sample] += value * (-BitConverter.ToInt16(audioData, sample2 * 4) / (float)short.MinValue); rightFrequencies[sample] += value * (-BitConverter.ToInt16(audioData, (sample2 * 4) + 2) / (float)short.MinValue); } } //for (var sample = 0; sample < samples; sample++) //{ // leftWaveForm[(sample * 8) + 1] = Math.Abs(leftFrequencies[sample]); // rightWaveForm[(sample * 8) + 1] = Math.Abs(rightFrequencies[sample]); //var angle = ((float)sample / (float)samples) * MathUtil.TwoPi; //var sin = (float)Math.Sin(angle); //var cos = (float)Math.Cos(angle); //leftWaveForm[(sample * 8) + 0] = (Math.Abs(leftFrequencies[sample]) + 10.0f) * sin * -0.01f; //leftWaveForm[(sample * 8) + 1] = (Math.Abs(leftFrequencies[sample]) + 10.0f) * cos * -0.01f; //rightWaveForm[(sample * 8) + 0] = (Math.Abs(rightFrequencies[sample]) + 10.0f) * sin * 0.01f; //rightWaveForm[(sample * 8) + 1] = (Math.Abs(rightFrequencies[sample]) + 10.0f) * cos * -0.01f; //} //context.MapSubresource(leftWaveFormVertexBuffer, 0, MapMode.WriteDiscard, SharpDX.Direct3D11.MapFlags.None, out stream); //stream.WriteRange(leftWaveForm); //context.UnmapSubresource(leftWaveFormVertexBuffer, 0); //stream.Dispose(); //context.MapSubresource(rightWaveFormVertexBuffer, 0, MapMode.WriteDiscard, SharpDX.Direct3D11.MapFlags.None, out stream); //stream.WriteRange(rightWaveForm); //context.UnmapSubresource(rightWaveFormVertexBuffer, 0); //stream.Dispose(); } if (form.ClientSize != size) { Utilities.Dispose(ref backBufferView); Utilities.Dispose(ref depthBufferView); if (form.ClientSize.Width != 0 && form.ClientSize.Height != 0) { swapChain.ResizeBuffers(1, form.ClientSize.Width, form.ClientSize.Height, Format.B8G8R8A8_UNorm, SwapChainFlags.None); backBuffer = swapChain.GetBackBuffer <Texture2D>(0); backBufferView = new RenderTargetView(device, backBuffer); backBuffer.Dispose(); depthBuffer = new Texture2D(device, new Texture2DDescription { Format = Format.D16_UNorm, ArraySize = 1, MipLevels = 1, Width = form.ClientSize.Width, Height = form.ClientSize.Height, SampleDescription = new SampleDescription(1, 0), BindFlags = BindFlags.DepthStencil }); depthBufferView = new DepthStencilView(device, depthBuffer); depthBuffer.Dispose(); } size = form.ClientSize; } var ratio = (float)form.ClientSize.Width / (float)form.ClientSize.Height; var projection = Matrix.PerspectiveFovRH(3.14f / 3.0f, ratio, 0.01f, 1000); var view = Matrix.LookAtRH(new Vector3(0, 2, 50), Vector3.Zero, Vector3.UnitY); //var world = Matrix.Scaling(1.0f + Math.Abs(((leftWaveForm[audioIndex + 1]) * 0.01f))) * Matrix.RotationY(Environment.TickCount / 2000.0f); //var world = Matrix.RotationY(rotation); //var world = Matrix.Scaling(1.0f + ((audioData[audioIndex] + audioData[audioIndex + 1] << 8) * 0.00001f)) * Matrix.RotationY(Environment.TickCount / 1000.0f); //audioIndex += 8; //if (audioIndex >= leftWaveForm.Length) // audioIndex = 0; //rotation += 0.01f; //var worldViewProjection = world * view * projection; //var diffuse = new Vector4(1, 0, 0, 0.5f); //Shaders.Color.WorldViewProjection(context, ref worldViewProjection); //Shaders.Color.Emissive(context, ref diffuse); context.Rasterizer.SetViewport(0, 0, form.ClientSize.Width, form.ClientSize.Height); context.OutputMerger.SetTargets(depthBufferView, backBufferView); context.ClearRenderTargetView(backBufferView, new RawColor4(0, 0, 0, 1)); context.ClearDepthStencilView(depthBufferView, DepthStencilClearFlags.Depth, 1.0f, 0); //Shaders.Color.Apply(context); //Shapes.Sphere.Begin(context); //Shapes.Cube.Begin(context); //Shapes.Billboard.Begin(context); context.Rasterizer.State = rasterizerState; context.OutputMerger.SetBlendState(blendState); context.OutputMerger.SetDepthStencilState(depthStencilState); context.PixelShader.SetSampler(0, samplerState); context.PixelShader.SetShaderResource(0, null); //Shapes.Sphere.Draw(context); //Shapes.Cube.Draw(context); //Shapes.Billboard.Draw(context); // Draw Waveforms //diffuse = new Vector4(0, 0, 1, 0.5f); //Shaders.Color.Apply(context); //worldViewProjection = Matrix.Scaling(1, 0.1f, 1) * Matrix.Translation(0, 0.1f, 0); //worldViewProjection = Matrix.Scaling(1, 1, 1) * Matrix.Translation(-0.5f, 0, 0); //Shaders.Color.WorldViewProjection(context, ref worldViewProjection); //Shaders.Color.Emissive(context, ref diffuse); //context.InputAssembler.PrimitiveTopology = PrimitiveTopology.LineStrip; //context.InputAssembler.SetVertexBuffers(0, leftWaveFormVertexBufferBinding); //context.Draw(samples, 0); //worldViewProjection = Matrix.Scaling(1, 0.1f, 1) * Matrix.Translation(0, -0.1f, 0); //Shaders.Color.WorldViewProjection(context, ref worldViewProjection); //context.InputAssembler.SetVertexBuffers(0, rightWaveFormVertexBufferBinding); //context.Draw(samples, 0); // Draw Frequencies Shapes.Billboard.Begin(context); Shaders.Color.Apply(context); var emissive = new Vector4(0.2f, 0.2f, 0.8f, 1); Shaders.Color.Emissive(context, ref emissive); for (var sample = 0; sample < samples; sample++) { var volume = 1 + (int)(Math.Abs(leftFrequencies[sample]) * 10.0f); for (var pixel = 0; pixel < volume; pixel++) { //var worldViewProjection = Matrix.Scaling(0.5f) * Matrix.Translation(-256.0f + sample, Math.Abs(leftFrequencies[sample]) * 10.0f, 0) * view * projection; var worldViewProjection = Matrix.Scaling(0.5f) * Matrix.Translation(-50.0f + sample, pixel, 0) * view * projection; Shaders.Color.WorldViewProjection(context, ref worldViewProjection); emissive = new Vector4(pixel * 0.06f, 0.0f, 0.8f - (pixel * 0.02f), 1); Shaders.Color.Emissive(context, ref emissive); Shapes.Billboard.Draw(context); } } swapChain.Present(1, PresentFlags.None); frame++; }); MessageBox.Show((frame / DateTime.Now.Subtract(startTime).TotalSeconds).ToString() + " FPS"); } }
/// <summary> /// Worker thread. /// </summary> /// private void WorkerThread() { needToStop = false; // Get the selected capture device DirectSoundCapture captureDevice = new DirectSoundCapture(device); // Set the capture format var bitsPerSample = Signal.GetSampleSize(sampleFormat); WaveFormat format = WaveFormat.CreateCustomFormat(sampleFormat.ToWaveFormat(), sampleRate, 1, sampleRate * bitsPerSample / 8, bitsPerSample / 8, bitsPerSample); // Setup the capture buffer CaptureBufferDescription captureBufferDescription = new CaptureBufferDescription(); captureBufferDescription.Format = format; captureBufferDescription.BufferBytes = 2 * desiredCaptureSize * format.BlockAlign; captureBufferDescription.Flags |= CaptureBufferCapabilitiesFlags.WaveMapped; captureBufferDescription.Flags &= ~CaptureBufferCapabilitiesFlags.ControlEffects; CaptureBuffer captureBuffer = null; NotificationPosition[] notifications = new NotificationPosition[2]; try { captureBuffer = new CaptureBuffer(captureDevice, captureBufferDescription); // Setup the notification positions int bufferPortionSize = captureBuffer.Capabilities.BufferBytes / 2; notifications[0] = new NotificationPosition(); notifications[0].Offset = bufferPortionSize - 1; notifications[0].WaitHandle = new AutoResetEvent(false); notifications[1] = new NotificationPosition(); notifications[1].Offset = bufferPortionSize - 1 + bufferPortionSize; notifications[1].WaitHandle = new AutoResetEvent(false); captureBuffer.SetNotificationPositions(notifications); // Make a copy of the wait handles WaitHandle[] waitHandles = new WaitHandle[notifications.Length]; for (int i = 0; i < notifications.Length; i++) { waitHandles[i] = notifications[i].WaitHandle; } // Start capturing captureBuffer.Start(true); if (sampleFormat == SampleFormat.Format32BitIeeeFloat) { float[] currentSample = new float[desiredCaptureSize]; Signal signal = Signal.FromArray(currentSample, sampleRate, sampleFormat); while (!needToStop) { int bufferPortionIndex = WaitHandle.WaitAny(waitHandles); captureBuffer.Read(currentSample, 0, currentSample.Length, bufferPortionSize * bufferPortionIndex, LockFlags.None); OnNewFrame(signal); } } else if (sampleFormat == SampleFormat.Format16Bit) { short[] currentSample = new short[desiredCaptureSize]; Signal signal = Signal.FromArray(currentSample, sampleRate, sampleFormat); while (!needToStop) { int bufferPortionIndex = WaitHandle.WaitAny(waitHandles); captureBuffer.Read(currentSample, 0, currentSample.Length, bufferPortionSize * bufferPortionIndex, LockFlags.None); OnNewFrame(signal); } } } catch (Exception ex) { if (AudioSourceError == null) { throw; } AudioSourceError(this, new AudioSourceErrorEventArgs(ex)); } finally { if (captureBuffer != null) { captureBuffer.Stop(); captureBuffer.Dispose(); } if (captureDevice != null) { captureDevice.Dispose(); } #if !NETSTANDARD1_4 for (int i = 0; i < notifications.Length; i++) { if (notifications[i].WaitHandle != null) { notifications[i].WaitHandle.Close(); } } #endif } }
public void Start() { if (this.running) { throw new InvalidOperationException(); } if (this.captureDevice == null) { this.captureDevice = new DirectSoundCapture(guid); } this.waveFormat.FormatTag = WaveFormatTag.Pcm; // Change to WaveFormatTag.IeeeFloat for float this.waveFormat.BitsPerSample = 16; // Set this to 32 for float this.waveFormat.BlockAlignment = (short)(channels * (waveFormat.BitsPerSample / 8)); this.waveFormat.Channels = this.channels; this.waveFormat.SamplesPerSecond = (int)(this.SampleRateKHz * 1000D); this.waveFormat.AverageBytesPerSecond = this.waveFormat.SamplesPerSecond * this.waveFormat.BlockAlignment; this.bufferPortionCount = 2; this.bufferDescription.BufferBytes = this.bufferSize * sizeof(short) * bufferPortionCount * this.channels; this.bufferDescription.Format = this.waveFormat; this.bufferDescription.WaveMapped = false; this.buffer = new CaptureBuffer(this.captureDevice, this.bufferDescription); this.bufferPortionSize = this.buffer.SizeInBytes / this.bufferPortionCount; this.notifications = new List<NotificationPosition>(); for (int i = 0; i < this.bufferPortionCount; i++) { NotificationPosition notification = new NotificationPosition(); notification.Offset = this.bufferPortionCount - 1 + (bufferPortionSize * i); notification.Event = new AutoResetEvent(false); this.notifications.Add(notification); } this.buffer.SetNotificationPositions(this.notifications.ToArray()); this.waitHandles = new WaitHandle[this.notifications.Count]; for (int i = 0; i < this.notifications.Count; i++) { this.waitHandles[i] = this.notifications[i].Event; } this.captureThread = new Thread(new ThreadStart(this.CaptureThread)); this.captureThread.IsBackground = true; this.running = true; this.captureThread.Start(); }
static void Main() { var initError = EVRInitError.None; system = OpenVR.Init(ref initError); if (initError != EVRInitError.None) { return; } compositor = OpenVR.Compositor; compositor.CompositorBringToFront(); compositor.FadeGrid(5.0f, false); count = OpenVR.k_unMaxTrackedDeviceCount; currentPoses = new TrackedDevicePose_t[count]; nextPoses = new TrackedDevicePose_t[count]; controllers = new List <uint>(); controllerModels = new RenderModel_t[count]; controllerTextures = new RenderModel_TextureMap_t[count]; controllerTextureViews = new ShaderResourceView[count]; controllerVertexBuffers = new SharpDX.Direct3D11.Buffer[count]; controllerIndexBuffers = new SharpDX.Direct3D11.Buffer[count]; controllerVertexBufferBindings = new VertexBufferBinding[count]; for (uint device = 0; device < count; device++) { var deviceClass = system.GetTrackedDeviceClass(device); switch (deviceClass) { case ETrackedDeviceClass.HMD: headset = device; break; case ETrackedDeviceClass.Controller: controllers.Add(device); break; } } uint width = 0; uint height = 0; system.GetRecommendedRenderTargetSize(ref width, ref height); headsetSize = new Size((int)width, (int)height); windowSize = new Size(960, 540); var leftEyeProjection = Convert(system.GetProjectionMatrix(EVREye.Eye_Left, 0.01f, 1000.0f)); var rightEyeProjection = Convert(system.GetProjectionMatrix(EVREye.Eye_Right, 0.01f, 1000.0f)); var leftEyeView = Convert(system.GetEyeToHeadTransform(EVREye.Eye_Left)); var rightEyeView = Convert(system.GetEyeToHeadTransform(EVREye.Eye_Right)); foreach (var controller in controllers) { var modelName = new StringBuilder(255, 255); var propertyError = ETrackedPropertyError.TrackedProp_Success; var length = system.GetStringTrackedDeviceProperty(controller, ETrackedDeviceProperty.Prop_RenderModelName_String, modelName, 255, ref propertyError); if (propertyError == ETrackedPropertyError.TrackedProp_Success) { var modelName2 = modelName.ToString(); while (true) { var pointer = IntPtr.Zero; var modelError = EVRRenderModelError.None; modelError = OpenVR.RenderModels.LoadRenderModel_Async(modelName2, ref pointer); if (modelError == EVRRenderModelError.Loading) { continue; } if (modelError == EVRRenderModelError.None) { var renderModel = System.Runtime.InteropServices.Marshal.PtrToStructure <RenderModel_t>(pointer); controllerModels[controller] = renderModel; break; } } while (true) { var pointer = IntPtr.Zero; var textureError = EVRRenderModelError.None; textureError = OpenVR.RenderModels.LoadTexture_Async(controllerModels[controller].diffuseTextureId, ref pointer); if (textureError == EVRRenderModelError.Loading) { continue; } if (textureError == EVRRenderModelError.None) { var texture = System.Runtime.InteropServices.Marshal.PtrToStructure <RenderModel_TextureMap_t>(pointer); controllerTextures[controller] = texture; break; } } } } int adapterIndex = 0; system.GetDXGIOutputInfo(ref adapterIndex); using (var form = new Form()) using (var factory = new Factory4()) { form.ClientSize = windowSize; var adapter = factory.GetAdapter(adapterIndex); var swapChainDescription = new SwapChainDescription { BufferCount = 1, Flags = SwapChainFlags.None, IsWindowed = true, ModeDescription = new ModeDescription { Format = Format.B8G8R8A8_UNorm, Width = form.ClientSize.Width, Height = form.ClientSize.Height, RefreshRate = new Rational(60, 1) }, OutputHandle = form.Handle, SampleDescription = new SampleDescription(1, 0), SwapEffect = SwapEffect.Discard, Usage = Usage.RenderTargetOutput }; SharpDX.Direct3D11.Device.CreateWithSwapChain(adapter, DeviceCreationFlags.None, swapChainDescription, out device, out swapChain); factory.MakeWindowAssociation(form.Handle, WindowAssociationFlags.None); context = device.ImmediateContext; using (var backBuffer = swapChain.GetBackBuffer <Texture2D>(0)) backBufferView = new RenderTargetView(device, backBuffer); var depthBufferDescription = new Texture2DDescription { Format = Format.D16_UNorm, ArraySize = 1, MipLevels = 1, Width = form.ClientSize.Width, Height = form.ClientSize.Height, SampleDescription = new SampleDescription(1, 0), Usage = ResourceUsage.Default, BindFlags = BindFlags.DepthStencil, CpuAccessFlags = CpuAccessFlags.None, OptionFlags = ResourceOptionFlags.None }; using (var depthBuffer = new Texture2D(device, depthBufferDescription)) depthStencilView = new DepthStencilView(device, depthBuffer); // Create Eye Textures var eyeTextureDescription = new Texture2DDescription { ArraySize = 1, BindFlags = BindFlags.RenderTarget, CpuAccessFlags = CpuAccessFlags.None, Format = Format.B8G8R8A8_UNorm, Width = headsetSize.Width, Height = headsetSize.Height, MipLevels = 1, OptionFlags = ResourceOptionFlags.None, SampleDescription = new SampleDescription(1, 0), Usage = ResourceUsage.Default }; var leftEyeTexture = new Texture2D(device, eyeTextureDescription); var rightEyeTexture = new Texture2D(device, eyeTextureDescription); var leftEyeTextureView = new RenderTargetView(device, leftEyeTexture); var rightEyeTextureView = new RenderTargetView(device, rightEyeTexture); // Create Eye Depth Buffer eyeTextureDescription.BindFlags = BindFlags.DepthStencil; eyeTextureDescription.Format = Format.D32_Float; var eyeDepth = new Texture2D(device, eyeTextureDescription); var eyeDepthView = new DepthStencilView(device, eyeDepth); Shapes.Cube.Load(device); Shapes.Sphere.Load(device); Shaders.Position.Load(device); Shaders.Normal.Load(device); Shaders.NormalTexture.Load(device); // Load Controller Models foreach (var controller in controllers) { var model = controllerModels[controller]; controllerVertexBuffers[controller] = new SharpDX.Direct3D11.Buffer(device, model.rVertexData, new BufferDescription { BindFlags = BindFlags.VertexBuffer, SizeInBytes = (int)model.unVertexCount * 32 }); controllerVertexBufferBindings[controller] = new VertexBufferBinding(controllerVertexBuffers[controller], 32, 0); controllerIndexBuffers[controller] = new SharpDX.Direct3D11.Buffer(device, model.rIndexData, new BufferDescription { BindFlags = BindFlags.IndexBuffer, SizeInBytes = (int)model.unTriangleCount * 3 * 2 }); var texture = controllerTextures[controller]; using (var texture2d = new Texture2D(device, new Texture2DDescription { ArraySize = 1, BindFlags = BindFlags.ShaderResource, Format = Format.R8G8B8A8_UNorm, Width = texture.unWidth, Height = texture.unHeight, MipLevels = 1, SampleDescription = new SampleDescription(1, 0) }, new DataRectangle(texture.rubTextureMapData, texture.unWidth * 4))) controllerTextureViews[controller] = new ShaderResourceView(device, texture2d); } worldViewProjectionBuffer = new SharpDX.Direct3D11.Buffer(device, Utilities.SizeOf <Matrix>(), ResourceUsage.Default, BindFlags.ConstantBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0); var rasterizerStateDescription = RasterizerStateDescription.Default(); //rasterizerStateDescription.FillMode = FillMode.Wireframe; rasterizerStateDescription.IsFrontCounterClockwise = true; //rasterizerStateDescription.CullMode = CullMode.None; rasterizerState = new RasterizerState(device, rasterizerStateDescription); var blendStateDescription = BlendStateDescription.Default(); blendStateDescription.RenderTarget[0].BlendOperation = BlendOperation.Add; blendStateDescription.RenderTarget[0].SourceBlend = BlendOption.SourceAlpha; blendStateDescription.RenderTarget[0].DestinationBlend = BlendOption.InverseSourceAlpha; blendStateDescription.RenderTarget[0].IsBlendEnabled = false; blendState = new BlendState(device, blendStateDescription); var depthStateDescription = DepthStencilStateDescription.Default(); depthStateDescription.DepthComparison = Comparison.LessEqual; depthStateDescription.IsDepthEnabled = true; depthStateDescription.IsStencilEnabled = false; depthStencilState = new DepthStencilState(device, depthStateDescription); var samplerStateDescription = SamplerStateDescription.Default(); samplerStateDescription.Filter = Filter.MinMagMipLinear; samplerStateDescription.AddressU = TextureAddressMode.Wrap; samplerStateDescription.AddressV = TextureAddressMode.Wrap; samplerState = new SamplerState(device, samplerStateDescription); startTime = DateTime.Now; frame = 0; windowSize = form.ClientSize; backgroundColor = new RawColor4(0.1f, 0.1f, 0.1f, 1); var vrEvent = new VREvent_t(); var eventSize = (uint)Utilities.SizeOf <VREvent_t>(); head = Matrix.Identity; // Initialize Audio var audioSamples = 1024; var audioDevices = DirectSoundCapture.GetDevices(); //var audioCapture = new DirectSoundCapture(devices.OrderByDescending(d => d.Description.Contains("Mic")).First().DriverGuid); var audioCapture = new DirectSoundCapture(audioDevices.OrderByDescending(d => d.Description.Contains("Mix")).First().DriverGuid); var audioFormat = new WaveFormat(); var audioLength = audioFormat.ConvertLatencyToByteSize(24); var audioData = new byte[audioLength]; var audioPosition = 0; var leftWaveForm = new float[1024 * 8]; var rightWaveForm = new float[1024 * 8]; for (var sample = 0; sample < 1024; sample++) { leftWaveForm[(sample * 8) + 0] = -1.0f + ((float)sample / 512.0f); rightWaveForm[(sample * 8) + 0] = -1.0f + ((float)sample / 512.0f); } var audioBuffer = new CaptureBuffer(audioCapture, new CaptureBufferDescription { BufferBytes = audioLength, Format = audioFormat }); audioBuffer.Start(true); var waveFormBufferDescription = new BufferDescription { BindFlags = BindFlags.VertexBuffer, SizeInBytes = leftWaveForm.Length * sizeof(float), CpuAccessFlags = CpuAccessFlags.Write, Usage = ResourceUsage.Dynamic }; var leftWaveFormVertexBuffer = SharpDX.Direct3D11.Buffer.Create(device, leftWaveForm, waveFormBufferDescription); var rightWaveFormVertexBuffer = SharpDX.Direct3D11.Buffer.Create(device, rightWaveForm, waveFormBufferDescription); var leftWaveFormVertexBufferBinding = new VertexBufferBinding(leftWaveFormVertexBuffer, 8 * sizeof(float), 0); var rightWaveFormVertexBufferBinding = new VertexBufferBinding(rightWaveFormVertexBuffer, 8 * sizeof(float), 0); RenderLoop.Run(form, () => { if (audioBuffer.CurrentCapturePosition != audioBuffer.CurrentRealPosition) { audioBuffer.Read(audioData, 0, audioData.Length, 0, LockFlags.None); for (var sample = 0; sample < 1024; sample++) { leftWaveForm[(sample * 8) + 1] = -BitConverter.ToInt16(audioData, sample * 4) / (float)short.MinValue; rightWaveForm[(sample * 8) + 1] = -BitConverter.ToInt16(audioData, (sample * 4) + 2) / (float)short.MinValue; } DataStream stream; context.MapSubresource(leftWaveFormVertexBuffer, 0, MapMode.WriteDiscard, SharpDX.Direct3D11.MapFlags.None, out stream); stream.WriteRange(leftWaveForm); context.UnmapSubresource(leftWaveFormVertexBuffer, 0); stream.Dispose(); context.MapSubresource(rightWaveFormVertexBuffer, 0, MapMode.WriteDiscard, SharpDX.Direct3D11.MapFlags.None, out stream); stream.WriteRange(rightWaveForm); context.UnmapSubresource(rightWaveFormVertexBuffer, 0); stream.Dispose(); } audioPosition += 8; if (audioPosition >= leftWaveForm.Length) { audioPosition = 0; } while (system.PollNextEvent(ref vrEvent, eventSize)) { switch ((EVREventType)vrEvent.eventType) { case EVREventType.VREvent_TrackedDeviceActivated: var controller = vrEvent.trackedDeviceIndex; controllers.Add(controller); var modelName = new StringBuilder(255, 255); var propertyError = ETrackedPropertyError.TrackedProp_Success; var length = system.GetStringTrackedDeviceProperty(controller, ETrackedDeviceProperty.Prop_RenderModelName_String, modelName, 255, ref propertyError); if (propertyError == ETrackedPropertyError.TrackedProp_Success) { var modelName2 = modelName.ToString(); while (true) { var pointer = IntPtr.Zero; var modelError = EVRRenderModelError.None; modelError = OpenVR.RenderModels.LoadRenderModel_Async(modelName2, ref pointer); if (modelError == EVRRenderModelError.Loading) { continue; } if (modelError == EVRRenderModelError.None) { var renderModel = System.Runtime.InteropServices.Marshal.PtrToStructure <RenderModel_t>(pointer); controllerModels[controller] = renderModel; // Load Controller Model var model = controllerModels[controller]; controllerVertexBuffers[controller] = new SharpDX.Direct3D11.Buffer(device, model.rVertexData, new BufferDescription { BindFlags = BindFlags.VertexBuffer, SizeInBytes = (int)model.unVertexCount * 32 }); controllerVertexBufferBindings[controller] = new VertexBufferBinding(controllerVertexBuffers[controller], 32, 0); controllerIndexBuffers[controller] = new SharpDX.Direct3D11.Buffer(device, model.rIndexData, new BufferDescription { BindFlags = BindFlags.IndexBuffer, SizeInBytes = (int)model.unTriangleCount * 3 * 2 }); break; } } while (true) { var pointer = IntPtr.Zero; var textureError = EVRRenderModelError.None; textureError = OpenVR.RenderModels.LoadTexture_Async(controllerModels[controller].diffuseTextureId, ref pointer); if (textureError == EVRRenderModelError.Loading) { continue; } if (textureError == EVRRenderModelError.None) { var textureMap = System.Runtime.InteropServices.Marshal.PtrToStructure <RenderModel_TextureMap_t>(pointer); controllerTextures[controller] = textureMap; using (var texture2d = new Texture2D(device, new Texture2DDescription { ArraySize = 1, BindFlags = BindFlags.ShaderResource, Format = Format.R8G8B8A8_UNorm, Width = textureMap.unWidth, Height = textureMap.unHeight, MipLevels = 1, SampleDescription = new SampleDescription(1, 0) }, new DataRectangle(textureMap.rubTextureMapData, textureMap.unWidth * 4))) controllerTextureViews[controller] = new ShaderResourceView(device, texture2d); break; } } } break; case EVREventType.VREvent_TrackedDeviceDeactivated: controllers.RemoveAll(c => c == vrEvent.trackedDeviceIndex); break; default: System.Diagnostics.Debug.WriteLine((EVREventType)vrEvent.eventType); break; } } if (form.ClientSize != windowSize) { Utilities.Dispose(ref backBufferView); if (form.ClientSize.Width != 0 && form.ClientSize.Height != 0) { swapChain.ResizeBuffers(1, form.ClientSize.Width, form.ClientSize.Height, Format.B8G8R8A8_UNorm, SwapChainFlags.None); using (var backBuffer = swapChain.GetBackBuffer <Texture2D>(0)) backBufferView = new RenderTargetView(device, backBuffer); } windowSize = form.ClientSize; } // Update Device Tracking compositor.WaitGetPoses(currentPoses, nextPoses); if (currentPoses[headset].bPoseIsValid) { Convert(ref currentPoses[headset].mDeviceToAbsoluteTracking, ref head); } foreach (var controller in controllers) { var controllerMatrix = Matrix.Identity; Convert(ref currentPoses[controller].mDeviceToAbsoluteTracking, ref controllerMatrix); } // Render Left Eye context.Rasterizer.SetViewport(0, 0, headsetSize.Width, headsetSize.Height); context.OutputMerger.SetTargets(eyeDepthView, leftEyeTextureView); context.OutputMerger.SetDepthStencilState(depthStencilState); context.ClearRenderTargetView(leftEyeTextureView, backgroundColor); context.ClearDepthStencilView(eyeDepthView, DepthStencilClearFlags.Depth, 1.0f, 0); Shaders.Normal.Apply(context); context.Rasterizer.State = rasterizerState; context.OutputMerger.SetBlendState(blendState); context.OutputMerger.SetDepthStencilState(depthStencilState); context.PixelShader.SetSampler(0, samplerState); var ratio = (float)headsetSize.Width / (float)headsetSize.Height; var projection = leftEyeProjection; var view = Matrix.Invert(leftEyeView * head); var world = Matrix.Scaling(1.0f + (Math.Abs(leftWaveForm[audioPosition + 1]) * 0.1f)) * Matrix.Translation(0, 1.0f, 0); worldViewProjection = world * view * projection; context.UpdateSubresource(ref worldViewProjection, worldViewProjectionBuffer); context.VertexShader.SetConstantBuffer(0, worldViewProjectionBuffer); //Shapes.Cube.Begin(context); //Shapes.Cube.Draw(context); Shapes.Sphere.Begin(context); Shapes.Sphere.Draw(context); // Draw Controllers context.InputAssembler.PrimitiveTopology = PrimitiveTopology.TriangleList; Shaders.NormalTexture.Apply(context); context.PixelShader.SetSampler(0, samplerState); foreach (var controller in controllers) { context.InputAssembler.SetVertexBuffers(0, controllerVertexBufferBindings[controller]); context.InputAssembler.SetIndexBuffer(controllerIndexBuffers[controller], Format.R16_UInt, 0); context.PixelShader.SetShaderResource(0, controllerTextureViews[controller]); Convert(ref currentPoses[controller].mDeviceToAbsoluteTracking, ref world); world = Matrix.Scaling(1.0f + (Math.Abs(leftWaveForm[audioPosition + 1]) * 0.5f)) * world; worldViewProjection = world * view * projection; context.UpdateSubresource(ref worldViewProjection, worldViewProjectionBuffer); context.VertexShader.SetConstantBuffer(0, worldViewProjectionBuffer); context.DrawIndexed((int)controllerModels[controller].unTriangleCount * 3 * 4, 0, 0); } // Draw Waveforms Shaders.Position.Apply(context); world = Matrix.Scaling(100, 2.5f, 1) * Matrix.Translation(0, 1, 1); worldViewProjection = world * view * projection; context.UpdateSubresource(ref worldViewProjection, worldViewProjectionBuffer); context.InputAssembler.PrimitiveTopology = PrimitiveTopology.LineStrip; context.InputAssembler.SetVertexBuffers(0, leftWaveFormVertexBufferBinding); context.Draw(1024, 0); world = Matrix.Scaling(100, 2.5f, 1) * Matrix.Translation(0, 1, -1); worldViewProjection = world * view * projection; context.UpdateSubresource(ref worldViewProjection, worldViewProjectionBuffer); context.InputAssembler.SetVertexBuffers(0, rightWaveFormVertexBufferBinding); context.Draw(1024, 0); // Present Left Eye var texture = new Texture_t { eType = ETextureType.DirectX, eColorSpace = EColorSpace.Gamma, handle = leftEyeTextureView.Resource.NativePointer }; var bounds = new VRTextureBounds_t { uMin = 0.0f, uMax = 1.0f, vMin = 0.0f, vMax = 1.0f, }; var submitError = compositor.Submit(EVREye.Eye_Left, ref texture, ref bounds, EVRSubmitFlags.Submit_Default); if (submitError != EVRCompositorError.None) { System.Diagnostics.Debug.WriteLine(submitError); } // Render Right Eye context.Rasterizer.SetViewport(0, 0, headsetSize.Width, headsetSize.Height); context.OutputMerger.SetTargets(eyeDepthView, rightEyeTextureView); context.OutputMerger.SetDepthStencilState(depthStencilState); context.ClearRenderTargetView(rightEyeTextureView, backgroundColor); context.ClearDepthStencilView(eyeDepthView, DepthStencilClearFlags.Depth, 1.0f, 0); Shaders.Normal.Apply(context); context.Rasterizer.State = rasterizerState; context.OutputMerger.SetBlendState(blendState); context.OutputMerger.SetDepthStencilState(depthStencilState); context.PixelShader.SetSampler(0, samplerState); projection = rightEyeProjection; view = Matrix.Invert(rightEyeView * head); world = Matrix.Scaling(1.0f + (Math.Abs(leftWaveForm[audioPosition + 1]) * 0.1f)) * Matrix.Translation(0, 1.0f, 0); worldViewProjection = world * view * projection; context.UpdateSubresource(ref worldViewProjection, worldViewProjectionBuffer); context.VertexShader.SetConstantBuffer(0, worldViewProjectionBuffer); //Shapes.Cube.Begin(context); //Shapes.Cube.Draw(context); Shapes.Sphere.Begin(context); Shapes.Sphere.Draw(context); // Draw Controllers context.InputAssembler.PrimitiveTopology = PrimitiveTopology.TriangleList; Shaders.NormalTexture.Apply(context); context.PixelShader.SetSampler(0, samplerState); foreach (var controller in controllers) { context.InputAssembler.SetVertexBuffers(0, controllerVertexBufferBindings[controller]); context.InputAssembler.SetIndexBuffer(controllerIndexBuffers[controller], Format.R16_UInt, 0); context.PixelShader.SetShaderResource(0, controllerTextureViews[controller]); Convert(ref currentPoses[controller].mDeviceToAbsoluteTracking, ref world); world = Matrix.Scaling(1.0f + (Math.Abs(leftWaveForm[audioPosition + 1]) * 0.5f)) * world; worldViewProjection = world * view * projection; context.UpdateSubresource(ref worldViewProjection, worldViewProjectionBuffer); context.VertexShader.SetConstantBuffer(0, worldViewProjectionBuffer); context.DrawIndexed((int)controllerModels[controller].unTriangleCount * 3 * 4, 0, 0); } // Draw Waveforms Shaders.Position.Apply(context); world = Matrix.Scaling(100, 2.5f, 1) * Matrix.Translation(0, 1, 1); worldViewProjection = world * view * projection; context.UpdateSubresource(ref worldViewProjection, worldViewProjectionBuffer); context.InputAssembler.PrimitiveTopology = PrimitiveTopology.LineStrip; context.InputAssembler.SetVertexBuffers(0, leftWaveFormVertexBufferBinding); context.Draw(1024, 0); world = Matrix.Scaling(100, 2.5f, 1) * Matrix.Translation(0, 1, -1); worldViewProjection = world * view * projection; context.UpdateSubresource(ref worldViewProjection, worldViewProjectionBuffer); context.InputAssembler.SetVertexBuffers(0, rightWaveFormVertexBufferBinding); context.Draw(1024, 0); // Present Right Eye texture.handle = rightEyeTextureView.Resource.NativePointer; submitError = compositor.Submit(EVREye.Eye_Right, ref texture, ref bounds, EVRSubmitFlags.Submit_Default); if (submitError != EVRCompositorError.None) { System.Diagnostics.Debug.WriteLine(submitError); } // Render Window context.Rasterizer.SetViewport(0, 0, windowSize.Width, windowSize.Height); context.OutputMerger.SetTargets(depthStencilView, backBufferView); context.OutputMerger.SetDepthStencilState(depthStencilState); context.ClearRenderTargetView(backBufferView, backgroundColor); context.ClearDepthStencilView(depthStencilView, DepthStencilClearFlags.Depth, 1.0f, 0); Shaders.Normal.Apply(context); context.Rasterizer.State = rasterizerState; context.OutputMerger.SetBlendState(blendState); context.OutputMerger.SetDepthStencilState(depthStencilState); context.PixelShader.SetSampler(0, samplerState); ratio = (float)form.ClientSize.Width / (float)form.ClientSize.Height; projection = Matrix.PerspectiveFovRH(3.14f / 3.0f, ratio, 0.01f, 1000); view = Matrix.Invert(head); world = Matrix.Scaling(1.0f + (Math.Abs(leftWaveForm[audioPosition + 1]) * 0.1f)) * Matrix.Translation(0, 1.0f, 0); worldViewProjection = world * view * projection; context.UpdateSubresource(ref worldViewProjection, worldViewProjectionBuffer); context.VertexShader.SetConstantBuffer(0, worldViewProjectionBuffer); //Shapes.Cube.Begin(context); //Shapes.Cube.Draw(context); Shapes.Sphere.Begin(context); Shapes.Sphere.Draw(context); // Draw Controllers context.InputAssembler.PrimitiveTopology = PrimitiveTopology.TriangleList; Shaders.NormalTexture.Apply(context); context.PixelShader.SetSampler(0, samplerState); foreach (var controller in controllers) { context.InputAssembler.SetVertexBuffers(0, controllerVertexBufferBindings[controller]); context.InputAssembler.SetIndexBuffer(controllerIndexBuffers[controller], Format.R16_UInt, 0); Convert(ref currentPoses[controller].mDeviceToAbsoluteTracking, ref world); world = Matrix.Scaling(1.0f + (Math.Abs(leftWaveForm[audioPosition + 1]) * 0.5f)) * world; worldViewProjection = world * view * projection; context.UpdateSubresource(ref worldViewProjection, worldViewProjectionBuffer); context.VertexShader.SetConstantBuffer(0, worldViewProjectionBuffer); context.DrawIndexed((int)controllerModels[controller].unTriangleCount * 3 * 4, 0, 0); } // Draw Waveforms Shaders.Position.Apply(context); world = Matrix.Scaling(100, 2.5f, 1) * Matrix.Translation(0, 1, 1); worldViewProjection = world * view * projection; context.UpdateSubresource(ref worldViewProjection, worldViewProjectionBuffer); context.InputAssembler.PrimitiveTopology = PrimitiveTopology.LineStrip; context.InputAssembler.SetVertexBuffers(0, leftWaveFormVertexBufferBinding); context.Draw(1024, 0); world = Matrix.Scaling(100, 2.5f, 1) * Matrix.Translation(0, 1, -1); worldViewProjection = world * view * projection; context.UpdateSubresource(ref worldViewProjection, worldViewProjectionBuffer); context.InputAssembler.SetVertexBuffers(0, rightWaveFormVertexBufferBinding); context.Draw(1024, 0); // Show Backbuffer swapChain.Present(0, PresentFlags.None); }); } }