public static Voice.LocalVoice CreateLocalVoiceShort(Voice.VoiceInfo voiceInfo, Voice.IBufferReader <float> reader) { var localVoice = instance.client.VoiceClient.CreateLocalVoiceAudio <short>(voiceInfo); localVoice.LocalUserServiceable = new BufferReaderPushAdapterAsyncPoolFloatToShort(localVoice, reader); return(localVoice); }
public void OnRemoteVoiceInfo(int playerId, byte voiceId, Voice.VoiceInfo voiceInfo) { var key = new VoiceIdPair(playerId, voiceId); if (this.voiceSpeakers.ContainsKey(key)) { Debug.LogWarning("PUNVoice: Info duplicate for voice #" + voiceId + " of player " + playerId); } PhotonVoiceSpeaker speaker = null; PhotonVoiceSpeaker[] speakers = GameObject.FindObjectsOfType <PhotonVoiceSpeaker>(); foreach (var s in speakers) { if (s.photonView.viewID == (int)voiceInfo.UserData) { speaker = s; break; } } if (speaker == null) { // Debug.LogWarning("PUNVoice: No PhotonVoiceMic found for info of voice #" + voiceId + " of player " + playerId); } else { this.linkVoice(playerId, voiceId, voiceInfo, speaker); } }
/// <summary> /// Creates new local voice (outgoing audio stream). /// </summary> /// <param name="voiceInfo">Outgoing audio stream parameters (should be set according to Opus encoder restrictions).</param> /// <param name="serviceableFactory">Optional factory called after LovalVoice<T> initialized to create ILocalVoiceServiceable instance attached to the LocalVoice"/>.</param> /// <returns>Outgoing stream handler.</returns> /// <remarks> /// audioStream.SamplingRate and voiceInfo.SamplingRate may do not match. Automatic resampling will occur in this case. /// </remarks> public static Voice.LocalVoiceAudioFloat CreateLocalVoice(Voice.VoiceInfo voiceInfo, Voice.IBufferReader <float> reader) { var localVoice = (Voice.LocalVoiceAudioFloat)instance.client.VoiceClient.CreateLocalVoiceAudio <float>(voiceInfo); localVoice.LocalUserServiceable = new Voice.BufferReaderPushAdapterAsyncPool <float>(localVoice, reader); return(localVoice); }
protected virtual Voice.LocalVoice createLocalVoiceAudio(Voice.VoiceInfo voiceInfo, Voice.IAudioSource source) { if (source is Voice.IAudioPusher <float> ) { if (forceShort) { throw new NotImplementedException("Voice.IAudioPusher<float> at 'short' voice is not supported currently"); } else { var localVoice = PhotonVoiceNetwork.VoiceClient.CreateLocalVoiceAudio <float>(voiceInfo); ((Voice.IAudioPusher <float>)source).SetCallback(buf => localVoice.PushDataAsync(buf), localVoice); localVoice.Encrypt = PhotonVoiceSettings.Instance.Encrypt; return(localVoice); } } else if (source is Voice.IAudioPusher <short> ) { var localVoice = PhotonVoiceNetwork.VoiceClient.CreateLocalVoiceAudio <short>(voiceInfo); ((Voice.IAudioPusher <short>)source).SetCallback(buf => localVoice.PushDataAsync(buf), localVoice); localVoice.Encrypt = PhotonVoiceSettings.Instance.Encrypt; return(localVoice); } else if (source is Voice.IAudioReader <float> ) { if (forceShort) { if (PhotonVoiceSettings.Instance.DebugInfo) { Debug.LogFormat("PUNVoice: Creating local voice with source samples type conversion from float to short."); } var localVoice = PhotonVoiceNetwork.VoiceClient.CreateLocalVoiceAudio <short>(voiceInfo); localVoice.LocalUserServiceable = new Voice.BufferReaderPushAdapterAsyncPoolFloatToShort(localVoice, source as Voice.IAudioReader <float>); localVoice.Encrypt = PhotonVoiceSettings.Instance.Encrypt; return(localVoice); } else { var localVoice = PhotonVoiceNetwork.VoiceClient.CreateLocalVoiceAudio <float>(voiceInfo); localVoice.LocalUserServiceable = new Voice.BufferReaderPushAdapterAsyncPool <float>(localVoice, source as Voice.IAudioReader <float>); localVoice.Encrypt = PhotonVoiceSettings.Instance.Encrypt; return(localVoice); } } else if (source is Voice.IAudioReader <short> ) { var localVoice = PhotonVoiceNetwork.VoiceClient.CreateLocalVoiceAudio <short>(voiceInfo); localVoice.LocalUserServiceable = new Voice.BufferReaderPushAdapterAsyncPool <short>(localVoice, source as Voice.IAudioReader <short>); localVoice.Encrypt = PhotonVoiceSettings.Instance.Encrypt; return(localVoice); } else { Debug.LogErrorFormat("PUNVoice: PhotonVoiceRecorder createLocalVoiceAudio does not support Voice.IAudioReader of type {0}", source.GetType()); return(Voice.LocalVoiceAudio.Dummy); } }
// give user a chance to change MicrophoneDevice in Awake() void Start() { if (photonView.isMine) { var pvs = PhotonVoiceSettings.Instance; Application.RequestUserAuthorization(UserAuthorization.Microphone); // put required sample rate into audio source and encoder - both adjust it if needed Voice.IBufferReader <float> audioStream; int channels = 0; int sourceSamplingRate = 0; if (AudioClip == null) { if (Microphone.devices.Length < 1) { // Error already logged in PhotonVoiceNetwork.Awake() return; } var micDev = this.MicrophoneDevice != null ? this.MicrophoneDevice : PhotonVoiceNetwork.MicrophoneDevice; if (PhotonVoiceSettings.Instance.DebugInfo) { Debug.LogFormat("PUNVoice: Setting recorder's microphone device to {0}", micDev); } var mic = new MicWrapper(micDev, (int)pvs.SamplingRate); sourceSamplingRate = mic.SourceSamplingRate; channels = mic.Channels; audioStream = mic; } else { audioStream = new AudioClipWrapper(AudioClip); sourceSamplingRate = AudioClip.frequency; channels = AudioClip.channels; if (this.LoopAudioClip) { ((AudioClipWrapper)audioStream).Loop = true; } } Voice.VoiceInfo voiceInfo = Voice.VoiceInfo.CreateAudioOpus(pvs.SamplingRate, sourceSamplingRate, channels, pvs.FrameDuration, pvs.Bitrate, photonView.viewID); this.voice = createLocalVoice(voiceInfo, audioStream); this.VoiceDetector.On = PhotonVoiceSettings.Instance.VoiceDetection; this.VoiceDetector.Threshold = PhotonVoiceSettings.Instance.VoiceDetectionThreshold; if (this.voice != Voice.LocalVoiceAudio.Dummy) { this.voice.Transmit = PhotonVoiceSettings.Instance.AutoTransmit; } else if (PhotonVoiceSettings.Instance.AutoTransmit) { Debug.LogWarning("PUNVoice: Cannot Transmit."); } sendVoiceCreatedMessage(voiceInfo); } }
// give user a chance to change MicrophoneDevice in Awake() void Start() { if (photonView.isMine) { var pvs = PhotonVoiceSettings.Instance; if (!this.microphoneDeviceSet) { this.MicrophoneDevice = PhotonVoiceNetwork.MicrophoneDevice; } Application.RequestUserAuthorization(UserAuthorization.Microphone); // put required sample rate into audio source and encoder - both adjust it if needed Voice.IAudioStream audioStream; int channels = 0; if (AudioClip == null) { if (PhotonVoiceSettings.Instance.DebugInfo) { Debug.Log("PUNVoice: Setting recorder's microphone device to " + this.MicrophoneDevice); } var mic = new MicWrapper(this.MicrophoneDevice, (int)pvs.SamplingRate); this.microphoneDeviceUsed = true; channels = mic.Channels; audioStream = mic; } else { audioStream = new AudioClipWrapper(AudioClip); channels = AudioClip.channels; if (this.LoopAudioClip) { ((AudioClipWrapper)audioStream).Loop = true; } } Voice.VoiceInfo voiceInfo = new Voice.VoiceInfo((int)pvs.SamplingRate, channels, (int)pvs.Delay, pvs.Bitrate, photonView.viewID); this.voice = PhotonVoiceNetwork.CreateLocalVoice(audioStream, voiceInfo); this.VoiceDetector.On = PhotonVoiceSettings.Instance.VoiceDetection; this.VoiceDetector.Threshold = PhotonVoiceSettings.Instance.VoiceDetectionThreshold; } }
private void linkVoice(int playerId, byte voiceId, Voice.VoiceInfo voiceInfo, PhotonVoiceSpeaker speaker) { speaker.OnVoiceLinked(voiceInfo.SamplingRate, voiceInfo.Channels, voiceInfo.FrameDurationSamples, PhotonVoiceSettings.Instance.PlayDelayMs); var key = new VoiceIdPair(playerId, voiceId); PhotonVoiceSpeaker oldSpeaker; if (this.voiceSpeakers.TryGetValue(key, out oldSpeaker)) { if (oldSpeaker == speaker) { return; } else { Debug.LogFormat("PUNVoice: Player {0} voice #{1} speaker replaced.", playerId, voiceId); } } else { Debug.LogFormat("PUNVoice: Player {0} voice #{1} speaker created.", playerId, voiceId); } this.voiceSpeakers[key] = speaker; }
public void OnRemoteVoiceInfo(int channelId, int playerId, byte voiceId, Voice.VoiceInfo voiceInfo, ref Voice.RemoteVoiceOptions options) { options.OnDecodedFrameFloatAction += (frame) => OnAudioFrame(playerId, voiceId, frame); options.OnRemoteVoiceRemoveAction += () => OnRemoteVoiceRemove(playerId, voiceId); var key = new VoiceIdPair(playerId, voiceId); if (this.voiceSpeakers.ContainsKey(key)) { Debug.LogWarningFormat("PUNVoice: Info duplicate for voice #{0} of player {1}", voiceId, playerId); } PhotonVoiceSpeaker speaker = null; PhotonVoiceSpeaker[] speakers = GameObject.FindObjectsOfType <PhotonVoiceSpeaker>(); foreach (var s in speakers) { if (s.photonView.viewID == (int)voiceInfo.UserData) { speaker = s; break; } } if (speaker == null) { // Debug.LogWarning("PUNVoice: No PhotonVoiceMic found for info of voice #" + voiceId + " of player " + playerId); } else { this.linkVoice(playerId, voiceId, voiceInfo, speaker); } // do not expose options to user code if (this.OnRemoteVoiceInfoAction != null) { this.OnRemoteVoiceInfoAction(playerId, voiceId, voiceInfo); } }
private void linkVoice(int playerId, byte voiceId, Voice.VoiceInfo voiceInfo, PhotonVoiceSpeaker speaker) { speaker.OnVoiceLinked(voiceInfo.SamplingRate, voiceInfo.Channels, voiceInfo.EncoderDelay, PhotonVoiceSettings.Instance.PlayDelayMs); var key = new VoiceIdPair(playerId, voiceId); PhotonVoiceSpeaker oldSpeaker; if (this.voiceSpeakers.TryGetValue(key, out oldSpeaker)) { if (oldSpeaker == speaker) { return; } else { Debug.Log("PUNVoice: Player " + playerId + " voice #" + voiceId + " speaker replaced"); } } else { Debug.Log("PUNVoice: Player " + playerId + " voice #" + voiceId + " speaker created"); } this.voiceSpeakers[key] = speaker; }
private void InitialVoiceSetUp() { var pvs = PhotonVoiceSettings.Instance; Application.RequestUserAuthorization(UserAuthorization.Microphone); // put required sample rate into audio source and encoder - both adjust it if needed Voice.IAudioStream audioStream; int channels = 0; if (AudioClip == null) { var micDev = microphoneDevice ?? PhotonVoiceNetwork.MicrophoneDevice ?? (Microphone.devices.Any() ? Microphone.devices.First() : null); microphoneDevice = micDev; if (PhotonVoiceSettings.Instance.DebugInfo) { Debug.LogFormat("PUNVoice: Setting recorder's microphone device to {0}", micDev); } var mic = new MicWrapper(micDev, (int)pvs.SamplingRate); channels = mic.Channels; audioStream = mic; } else { audioStream = new AudioClipWrapper(AudioClip); channels = AudioClip.channels; if (LoopAudioClip) { ((AudioClipWrapper)audioStream).Loop = true; } } Voice.VoiceInfo voiceInfo = new Voice.VoiceInfo((int)pvs.SamplingRate, channels, (int)pvs.Delay, pvs.Bitrate, photonView.viewID); voice = PhotonVoiceNetwork.CreateLocalVoice(audioStream, voiceInfo); VoiceDetector.On = PhotonVoiceSettings.Instance.VoiceDetection; VoiceDetector.Threshold = PhotonVoiceSettings.Instance.VoiceDetectionThreshold; MicrophoneDevice = microphoneDevice; }
public static LocalVoiceAudio <T> Create <T>(VoiceClient voiceClient, byte voiceId, IEncoder encoder, VoiceInfo voiceInfo, int channelId) { if (typeof(T) == typeof(float)) { if (encoder == null || encoder is IEncoderDataFlow <float> ) { return(new LocalVoiceAudioFloat(voiceClient, encoder as IEncoderDataFlow <float>, voiceId, voiceInfo, channelId) as LocalVoiceAudio <T>); } else { throw new Exception("[PV] CreateLocalVoice: encoder for IFrameStream<float> is not IBufferEncoder<float>: " + encoder.GetType()); } } else if (typeof(T) == typeof(short)) { if (encoder == null || encoder is IEncoderDataFlow <short> ) { return(new LocalVoiceAudioShort(voiceClient, encoder as IEncoderDataFlow <short>, voiceId, voiceInfo, channelId) as LocalVoiceAudio <T>); } else { throw new Exception("[PV] CreateLocalVoice: encoder for IFrameStream<short> is not IBufferEncoder<short>: " + encoder.GetType()); } } else { throw new UnsupportedSampleTypeException(typeof(T)); } }
private Voice.LocalVoice createLocalVoiceAudioAndSource() { var pvs = PhotonVoiceSettings.Instance; switch (Source) { case AudioSource.Microphone: { Application.RequestUserAuthorization(UserAuthorization.Microphone); if (this.MicrophoneType == MicAudioSourceType.Settings && pvs.MicrophoneType == PhotonVoiceSettings.MicAudioSourceType.Photon || this.MicrophoneType == MicAudioSourceType.Photon) { var hwMicDev = this.PhotonMicrophoneDeviceID != -1 ? this.PhotonMicrophoneDeviceID : PhotonVoiceNetwork.PhotonMicrophoneDeviceID; #if UNITY_STANDALONE_WIN || UNITY_EDITOR_WIN if (PhotonVoiceSettings.Instance.DebugInfo) { Debug.LogFormat("PUNVoice: Setting recorder's source to Photon microphone device {0}", hwMicDev); } audioSource = new WindowsAudioInPusher(hwMicDev); if (PhotonVoiceSettings.Instance.DebugInfo) { Debug.LogFormat("PUNVoice: Setting recorder's source to WindowsAudioInPusher"); } break; #elif UNITY_IOS && !UNITY_EDITOR || (UNITY_EDITOR_OSX || UNITY_STANDALONE_OSX) audioSource = new AppleAudioInPusher(hwMicDev); if (PhotonVoiceSettings.Instance.DebugInfo) { Debug.LogFormat("PUNVoice: Setting recorder's source to AppleAudioInPusher"); } break; #elif UNITY_ANDROID && !UNITY_EDITOR audioSource = new AndroidAudioInAEC(); if (PhotonVoiceSettings.Instance.DebugInfo) { Debug.LogFormat("PUNVoice: Setting recorder's source to AndroidAudioInAEC"); } break; #else Debug.LogFormat("PUNVoice: Photon microphone type is not supported for the current platform. Using Unity microphone."); #endif } if (Microphone.devices.Length < 1) { // Error already logged in PhotonVoiceNetwork.Start() return(Voice.LocalVoiceAudio.Dummy); } var micDev = this.MicrophoneDevice != null ? this.MicrophoneDevice : PhotonVoiceNetwork.MicrophoneDevice; if (PhotonVoiceSettings.Instance.DebugInfo) { Debug.LogFormat("PUNVoice: Setting recorder's source to microphone device {0}", micDev); } // mic can ignore passed sampling rate and set it's own var mic = new MicWrapper(micDev, (int)pvs.SamplingRate); audioSource = mic; } break; case AudioSource.AudioClip: { if (AudioClip == null) { Debug.LogErrorFormat("PUNVoice: AudioClip property must be set for AudioClip audio source"); return(Voice.LocalVoiceAudio.Dummy); } audioSource = new AudioClipWrapper(AudioClip); if (this.LoopAudioClip) { ((AudioClipWrapper)audioSource).Loop = true; } } break; case AudioSource.Factory: { if (PhotonVoiceNetwork.AudioSourceFactory == null) { Debug.LogErrorFormat("PUNVoice: PhotonVoiceNetwork.AudioSourceFactory must be specified if PhotonVoiceRecorder.Source set to Factory"); return(Voice.LocalVoiceAudio.Dummy); } audioSource = PhotonVoiceNetwork.AudioSourceFactory(this); } break; default: Debug.LogErrorFormat("PUNVoice: unknown Source value {0}", Source); return(Voice.LocalVoiceAudio.Dummy); } Voice.VoiceInfo voiceInfo = Voice.VoiceInfo.CreateAudioOpus(pvs.SamplingRate, audioSource.SamplingRate, audioSource.Channels, pvs.FrameDuration, pvs.Bitrate, photonView.viewID); return(createLocalVoiceAudio(voiceInfo, audioSource)); }
internal LocalVoiceAudioShort(VoiceClient voiceClient, IEncoderDataFlow <short> encoder, byte id, VoiceInfo voiceInfo, int channelId) : base(voiceClient, encoder, id, voiceInfo, channelId) { // these 2 processors go after resampler this.levelMeter = new AudioUtil.LevelMeterShort(this.info.SamplingRate, this.info.Channels); //1/2 sec this.voiceDetector = new AudioUtil.VoiceDetectorShort(this.info.SamplingRate, this.info.Channels); initBuiltinProcessors(); }
/// <summary> /// Creates outgoing video stream consuming sequence of image buffers. /// </summary> /// <param name="voiceInfo">Outgoing stream parameters. Set applicable fields to read them by encoder and by receiving client when voice created.</param> /// <param name="channelId">Transport channel specific to frontend. Set to VoiceClient.ChannelAuto to let frontend automatically assign channel.</param> /// <param name="encoder">Encoder compressing video data. Set to null to use default VP8 implementation.</param> /// <returns>Outgoing stream handler.</returns> public LocalVoiceVideo CreateLocalVoiceVideo(VoiceInfo voiceInfo, int channelId = ChannelAuto, IEncoder encoder = null) { return((LocalVoiceVideo)createLocalVoice(voiceInfo, channelId, (vId, chId) => new LocalVoiceVideo(this, encoder, vId, voiceInfo, chId))); }
internal static IDecoder CreateDefaultDecoder(int channelId, int playerId, byte voiceId, VoiceInfo info) { switch (info.Codec) { case Codec.AudioOpus: return(new OpusCodec.Decoder()); #if PHOTON_VOICE_VIDEO_ENABLE case Codec.VideoVP8: return(new VPxCodec.Decoder()); #endif default: return(null); } }
protected virtual Voice.LocalVoice createLocalVoice(Voice.VoiceInfo voiceInfo, Voice.IBufferReader <float> reader) { return(PhotonVoiceNetwork.CreateLocalVoice(voiceInfo, reader)); }
/// <summary> /// Creates outgoing audio stream. Adds audio specific features (e.g. resampling, level meter) to processing pipeline and to returning stream handler. /// </summary> /// <typeparam name="T">Element type of audio array buffers.</typeparam> /// <param name="voiceInfo">Outgoing audio stream parameters. Set applicable fields to read them by encoder and by receiving client when voice created.</param> /// <param name="channelId">Transport channel specific to frontend. Set to VoiceClient.ChannelAuto to let frontend automatically assign channel.</param> /// <param name="encoder">Audio encoder. Set to null to use default Opus encoder.</param> /// <returns>Outgoing stream handler.</returns> /// <remarks> /// voiceInfo.sourceSamplingRate and voiceInfo.SamplingRate may do not match. Automatic resampling will occur in this case. /// </remarks> public LocalVoiceAudio <T> CreateLocalVoiceAudio <T>(VoiceInfo voiceInfo, int channelId = ChannelAuto, IEncoderDataFlow <T> encoder = null) { return((LocalVoiceAudio <T>)createLocalVoice(voiceInfo, channelId, (vId, chId) => LocalVoiceAudio.Create <T>(this, vId, encoder, voiceInfo, chId))); }
internal RemoteVoice(VoiceClient client, int channelId, int playerId, byte voiceId, VoiceInfo info, byte lastEventNumber) { this.opusDecoder = new OpusDecoder((SamplingRate)info.SamplingRate, (Channels)info.Channels); this.voiceClient = client; this.channelId = channelId; this.playerId = playerId; this.voiceId = voiceId; this.Info = info; this.lastEvNumber = lastEventNumber; }
internal EncoderFloat(VoiceInfo i) : base(i) { }
internal EncoderShort(VoiceInfo i) : base(i) { }
internal LocalVoice(IVoiceFrontend client, byte id, IAudioStream audioStream, VoiceInfo voiceInfo, int channelId) { this.info = voiceInfo; this.channelId = channelId; this.opusEncoder = new OpusEncoder((SamplingRate)voiceInfo.SamplingRate, (Channels)voiceInfo.Channels, voiceInfo.Bitrate, OpusApplicationType.Voip, (POpusCodec.Enums.Delay)(voiceInfo.FrameDurationUs * 2 / 1000)); this.frontend = client; this.id = id; this.audioStream = audioStream; this.sourceSamplingRateHz = audioStream.SamplingRate; this.sourceFrameSize = this.info.FrameSize * this.sourceSamplingRateHz / (int)this.opusEncoder.InputSamplingRate; this.frameBuffer = new float[this.info.FrameSize]; if (this.sourceFrameSize == this.info.FrameSize) { this.sourceFrameBuffer = this.frameBuffer; } else { this.sourceSamplingRateHz = audioStream.SamplingRate; this.sourceFrameBuffer = new float[this.sourceFrameSize]; this.frontend.DebugReturn(DebugLevel.WARNING, "[PV] Local voice #" + this.id + " audio source frequency " + this.sourceSamplingRateHz + " and encoder sampling rate " + (int)this.opusEncoder.InputSamplingRate + " do not match. Resampling will occur before encoding."); } this.LevelMeter = new LevelMeter(this.sourceSamplingRateHz, this.info.Channels); //1/2 sec this.VoiceDetector = new VoiceDetector(this.sourceSamplingRateHz, this.info.Channels); // _debug_decoder = new OpusDecoder(this.InputSamplingRate, this.InputChannels); }
internal LocalVoiceFramed(VoiceClient voiceClient, IEncoder encoder, byte id, VoiceInfo voiceInfo, int channelId, int frameSize) : base(voiceClient, encoder, id, voiceInfo, channelId) { this.FrameSize = frameSize; }
public VoiceCreatedParams(Voice.LocalVoice voice, Voice.VoiceInfo info) { Voice = voice; Info = info; }
protected virtual void sendVoiceCreatedMessage(Voice.VoiceInfo voiceInfo) { gameObject.SendMessage("VoiceCreated", new VoiceCreatedParams(this.voice, voiceInfo), SendMessageOptions.DontRequireReceiver); }
/// <summary> /// Creates new local voice (outgoing audio stream). /// </summary> /// <param name="audioStream">Object providing audio data for the outgoing stream.</param> /// <param name="voiceInfo">Outgoing audio stream parameters (should be set according to Opus encoder restrictions).</param> /// <returns>Outgoing stream handler.</returns> /// <remarks> /// audioStream.SamplingRate and voiceInfo.SamplingRate may do not match. Automatic resampling will occur in this case. /// </remarks> public static Voice.LocalVoice CreateLocalVoice(Voice.IAudioStream audioClip, Voice.VoiceInfo voiceInfo) { return(instance.client.CreateLocalVoice(audioClip, voiceInfo)); }
protected override Voice.LocalVoice createLocalVoice(Voice.VoiceInfo voiceInfo, Voice.IBufferReader <float> reader) { return(PhotonVoiceNetwork.CreateLocalVoiceShort(voiceInfo, reader)); }
internal RemoteVoice(VoiceClient client, RemoteVoiceOptions options, int channelId, int playerId, byte voiceId, VoiceInfo info, byte lastEventNumber) { this.options = options; this.voiceClient = client; this.channelId = channelId; this.playerId = playerId; this.voiceId = voiceId; this.Info = info; this.lastEvNumber = lastEventNumber; if (this.options.Decoder == null) // init fields first for proper logging { voiceClient.frontend.LogError(LogPrefix + ": decoder is null"); disposed = true; return; } #if NETFX_CORE ThreadPool.RunAsync((x) => { decodeThread(this.options.Decoder); }); #else var t = new Thread(() => decodeThread(this.options.Decoder)); t.Name = LogPrefix + " decode"; t.Start(); #endif }
// Assigns channel per known voice type reserving channel 0 for user events. // For more control, set channel explicitly when creating a voice. public int AssignChannel(VoiceInfo v) { // 0 is for user events return(1 + Array.IndexOf(Enum.GetValues(typeof(Codec)), v.Codec)); }
/// <summary> /// Creates outgoing stream consuming sequence of values passed in array buffers of arbitrary length which repacked in frames of constant length for further processing and encoding. /// </summary> /// <typeparam name="T">Type of data consumed by outgoing stream (element type of array buffers).</typeparam> /// <param name="voiceInfo">Outgoing stream parameters. Set applicable fields to read them by encoder and by receiving client when voice created.</param> /// <param name="channelId">Transport channel specific to frontend. Set to VoiceClient.ChannelAuto to let frontend automatically assign channel.</param> /// <param name="encoder">Encoder compressing data stream in pipeline.</param> /// <returns>Outgoing stream handler.</returns> public LocalVoiceFramed <T> CreateLocalVoiceFramed <T>(VoiceInfo voiceInfo, int frameSize, int channelId = ChannelAuto, IEncoderDataFlow <T> encoder = null) { return((LocalVoiceFramed <T>)createLocalVoice(voiceInfo, channelId, (vId, chId) => new LocalVoiceFramed <T>(this, encoder, vId, voiceInfo, chId, frameSize))); }
internal RemoteVoiceInfo(int playerId, byte voiceId, VoiceInfo info) { this.PlayerId = playerId; this.VoiceId = voiceId; this.Info = info; }