public RESULT addInput(DSP target, ref DSPConnection connection) { RESULT result = RESULT.OK; IntPtr dspconnectionraw = new IntPtr(); DSPConnection dspconnectionnew = null; try { result = FMOD_DSP_AddInput(dspraw, target.getRaw(), ref dspconnectionraw); } catch { result = RESULT.ERR_INVALID_PARAM; } if (result != RESULT.OK) { return result; } if (connection == null) { dspconnectionnew = new DSPConnection(); dspconnectionnew.setRaw(dspconnectionraw); connection = dspconnectionnew; } else { connection.setRaw(dspconnectionraw); } return result; }
internal DspConnection(DSPConnection connection) : this() { if (connection == null) { throw new ArgumentNullException(nameof(connection)); } FmodDspConnection = connection; }
public void Attenuate(DSPConnection conn, int interpolationLength, params float[] value) { using (var block = Graph.CreateCommandBlock()) { unsafe { fixed(float *valuePtr = value) { block.SetAttenuation(conn, valuePtr, (byte)value.Length, interpolationLength); } } } }
public void ActivateDSP(int index) { _dspIndex = index; var channel = new Channel(); _dsp = new DSP(); var dspc = new DSPConnection(); CheckFMODErrors(_fmodSystem.getChannel(index, ref channel)); CheckFMODErrors(_fmodSystem.createDSPByType(DSP_TYPE.PARAMEQ, ref _dsp)); _dsp.setParameter((int)DSP_PARAMEQ.CENTER, 70); _dsp.setParameter((int)DSP_PARAMEQ.BANDWIDTH, 1.5f); CheckFMODErrors(channel.addDSP(_dsp, ref dspc)); }
void Start() { var format = ChannelEnumConverter.GetSoundFormatFromSpeakerMode(AudioSettings.speakerMode); var channels = ChannelEnumConverter.GetChannelCountFromSoundFormat(format); AudioSettings.GetDSPBufferSize(out var bufferLength, out var numBuffers); var sampleRate = AudioSettings.outputSampleRate; m_Graph = DSPGraph.Create(format, channels, bufferLength, sampleRate); var driver = new DefaultDSPGraphDriver { Graph = m_Graph }; m_Output = driver.AttachToDefaultOutput(); // Add an event handler delegate to the graph for ClipStopped. So we are notified // of when a clip is stopped in the node and can handle the resources on the main thread. m_HandlerID = m_Graph.AddNodeEventHandler <ClipStopped>((node, evt) => { Debug.Log("Received ClipStopped event on main thread, cleaning resources"); }); // All async interaction with the graph must be done through a DSPCommandBlock. // Create it here and complete it once all commands are added. var block = m_Graph.CreateCommandBlock(); m_Node = block.CreateDSPNode <PlayClipNode.Parameters, PlayClipNode.SampleProviders, PlayClipNode>(); // Currently input and output ports are dynamic and added via this API to a node. // This will change to a static definition of nodes in the future. block.AddOutletPort(m_Node, 2, SoundFormat.Stereo); // Connect the node to the root of the graph. m_Connection = block.Connect(m_Node, 0, m_Graph.RootDSP, 0); // We are done, fire off the command block atomically to the mixer thread. block.Complete(); }
public RESULT getInput (int index, ref DSP input, ref DSPConnection inputconnection) { RESULT result = RESULT.OK; IntPtr dsprawnew = new IntPtr(); DSP dspnew = null; IntPtr dspconnectionraw = new IntPtr(); DSPConnection dspconnectionnew = null; try { result = FMOD_DSP_GetInput(dspraw, index, ref dsprawnew, ref dspconnectionraw); } catch { result = RESULT.ERR_INVALID_PARAM; } if (result != RESULT.OK) { return result; } if (input == null) { dspnew = new DSP(); dspnew.setRaw(dsprawnew); input = dspnew; } else { input.setRaw(dsprawnew); } if (inputconnection == null) { dspconnectionnew = new DSPConnection(); dspconnectionnew.setRaw(dspconnectionraw); inputconnection = dspconnectionnew; } else { inputconnection.setRaw(dspconnectionraw); } return result; }
public RESULT addDSP (DSP dsp, ref DSPConnection connection) { RESULT result = RESULT.OK; IntPtr dspconnectionraw = new IntPtr(); DSPConnection dspconnectionnew = null; try { result = FMOD_Channel_AddDSP(channelraw, dsp.getRaw(), ref dspconnectionraw); } catch { result = RESULT.ERR_INVALID_PARAM; } if (result != RESULT.OK) { return result; } if (connection == null) { dspconnectionnew = new DSPConnection(); dspconnectionnew.setRaw(dspconnectionraw); connection = dspconnectionnew; } else { connection.setRaw(dspconnectionraw); } return result; }
public void Attenuate(DSPConnection conn, float value, int interpolationLength = 0) { using (var block = Graph.CreateCommandBlock()) block.SetAttenuation(conn, value, interpolationLength); }
protected override void OnCreate() { //Initialize containers first m_mixNodePortFreelist = new NativeList <int>(Allocator.Persistent); m_mixNodePortCount = new NativeReference <int>(Allocator.Persistent); m_ildNodePortCount = new NativeReference <int>(Allocator.Persistent); m_packedFrameCounterBufferId = new NativeReference <long>(Allocator.Persistent); m_audioFrame = new NativeReference <int>(Allocator.Persistent); m_lastReadBufferId = new NativeReference <int>(Allocator.Persistent); m_buffersInFlight = new List <ManagedIldBuffer>(); worldBlackboardEntity.AddComponentDataIfMissing(new AudioSettings { audioFramesPerUpdate = 3, audioSubframesPerFrame = 1, logWarningIfBuffersAreStarved = false }); //Create graph and driver var format = ChannelEnumConverter.GetSoundFormatFromSpeakerMode(UnityEngine.AudioSettings.speakerMode); var channels = ChannelEnumConverter.GetChannelCountFromSoundFormat(format); UnityEngine.AudioSettings.GetDSPBufferSize(out m_samplesPerSubframe, out _); m_sampleRate = UnityEngine.AudioSettings.outputSampleRate; m_graph = DSPGraph.Create(format, channels, m_samplesPerSubframe, m_sampleRate); m_driver = new LatiosDSPGraphDriver { Graph = m_graph }; m_outputHandle = m_driver.AttachToDefaultOutput(); var commandBlock = m_graph.CreateCommandBlock(); m_mixNode = commandBlock.CreateDSPNode <MixStereoPortsNode.Parameters, MixStereoPortsNode.SampleProviders, MixStereoPortsNode>(); commandBlock.AddOutletPort(m_mixNode, 2); m_mixToOutputConnection = commandBlock.Connect(m_mixNode, 0, m_graph.RootDSP, 0); m_ildNode = commandBlock.CreateDSPNode <ReadIldBuffersNode.Parameters, ReadIldBuffersNode.SampleProviders, ReadIldBuffersNode>(); unsafe { commandBlock.UpdateAudioKernel <SetReadIldBuffersNodePackedFrameBufferId, ReadIldBuffersNode.Parameters, ReadIldBuffersNode.SampleProviders, ReadIldBuffersNode>( new SetReadIldBuffersNodePackedFrameBufferId { ptr = (long *)m_packedFrameCounterBufferId.GetUnsafePtr() }, m_ildNode); } commandBlock.Complete(); //Create queries m_aliveListenersQuery = Fluent.WithAll <AudioListener>(true).Build(); m_deadListenersQuery = Fluent.Without <AudioListener>().WithAll <ListenerGraphState>().Build(); m_oneshotsToDestroyWhenFinishedQuery = Fluent.WithAll <AudioSourceOneShot>().WithAll <AudioSourceDestroyOneShotWhenFinished>(true).Build(); m_oneshotsQuery = Fluent.WithAll <AudioSourceOneShot>().Build(); m_loopedQuery = Fluent.WithAll <AudioSourceLooped>().Build(); //Force initialization of Burst commandBlock = m_graph.CreateCommandBlock(); var dummyNode = commandBlock.CreateDSPNode <MixPortsToStereoNode.Parameters, MixPortsToStereoNode.SampleProviders, MixPortsToStereoNode>(); StateVariableFilterNode.Create(commandBlock, StateVariableFilterNode.FilterType.Bandpass, 0f, 0f, 0f, 1); commandBlock.UpdateAudioKernel <MixPortsToStereoNodeUpdate, MixPortsToStereoNode.Parameters, MixPortsToStereoNode.SampleProviders, MixPortsToStereoNode>( new MixPortsToStereoNodeUpdate { leftChannelCount = 0 }, dummyNode); commandBlock.UpdateAudioKernel <ReadIldBuffersNodeUpdate, ReadIldBuffersNode.Parameters, ReadIldBuffersNode.SampleProviders, ReadIldBuffersNode>(new ReadIldBuffersNodeUpdate { ildBuffer = new IldBuffer(), }, m_ildNode); commandBlock.Cancel(); }
public AudioClipPlayerSystemState(DSPNode node, DSPConnection connection) { this.node = node; this.connection = connection; }