Exemplo n.º 1
0
        internal RemoteVoice(VoiceClient client, RemoteVoiceOptions options, int channelId, int playerId, byte voiceId, VoiceInfo info, byte lastEventNumber)
        {
            this.options      = options;
            this.voiceClient  = client;
            this.channelId    = channelId;
            this.playerId     = playerId;
            this.voiceId      = voiceId;
            this.Info         = info;
            this.lastEvNumber = lastEventNumber;
            if (this.options.Decoder == null) // init fields first for proper logging
            {
                voiceClient.transport.LogError(LogPrefix + ": decoder is null");
                disposed = true;
                return;
            }
#if NETFX_CORE
            ThreadPool.RunAsync((x) =>
            {
                decodeThread(this.options.Decoder);
            });
#else
            var t = new Thread(() => decodeThread(this.options.Decoder));
            t.Name = LogPrefix + " decode";
            t.Start();
#endif
        }
 /// <summary>
 /// Creates outgoing stream consuming sequence of values passed in array buffers of arbitrary length which repacked in frames of constant length for further processing and encoding.
 /// </summary>
 /// <typeparam name="T">Type of data consumed by outgoing stream (element type of array buffers).</typeparam>
 /// <param name="voiceInfo">Outgoing stream parameters. Set applicable fields to read them by encoder and by receiving client when voice created.</param>
 /// <param name="channelId">Transport channel specific to transport. Set to VoiceClient.ChannelAuto to let transport automatically assign channel.</param>
 /// <param name="encoder">Encoder compressing data stream in pipeline.</param>
 /// <returns>Outgoing stream handler.</returns>
 public LocalVoiceFramed <T> CreateLocalVoiceFramed <T>(VoiceInfo voiceInfo, int frameSize, int channelId = ChannelAuto, IEncoderDataFlow <T> encoder = null)
 {
     return((LocalVoiceFramed <T>)createLocalVoice(voiceInfo, channelId, (vId, chId) => new LocalVoiceFramed <T>(this, encoder, vId, voiceInfo, chId, frameSize)));
 }
 /// <summary>
 /// Creates outgoing video stream consuming sequence of image buffers.
 /// </summary>
 /// <param name="voiceInfo">Outgoing stream parameters. Set applicable fields to read them by encoder and by receiving client when voice created.</param>
 /// <param name="channelId">Transport channel specific to transport. Set to VoiceClient.ChannelAuto to let transport automatically assign channel.</param>
 /// <param name="encoder">Encoder compressing video data. Set to null to use default VP8 implementation.</param>
 /// <returns>Outgoing stream handler.</returns>
 public LocalVoiceVideo CreateLocalVoiceVideo(VoiceInfo voiceInfo, int channelId = ChannelAuto, IEncoder encoder = null)
 {
     return((LocalVoiceVideo)createLocalVoice(voiceInfo, channelId, (vId, chId) => new LocalVoiceVideo(this, encoder, vId, voiceInfo, chId)));
 }
Exemplo n.º 4
0
 internal EncoderShort(VoiceInfo i, ILogger logger) : base(i, logger)
 {
 }
Exemplo n.º 5
0
 // Methods
 public static IEncoder Create <T>(VoiceInfo i, ILogger logger) => default;
Exemplo n.º 6
0
 /// <summary>
 /// Creates basic outgoing stream w/o data processing support. Provided encoder should generate output data stream.
 /// </summary>
 /// <param name="voiceInfo">Outgoing stream parameters. Set applicable fields to read them by encoder and by receiving client when voice created.</param>
 /// <param name="channelId">Transport channel specific to transport.</param>
 /// <param name="encoder">Encoder producing the stream.</param>
 /// <returns>Outgoing stream handler.</returns>
 public LocalVoice CreateLocalVoice(VoiceInfo voiceInfo, int channelId = 0, IEncoder encoder = null)
 {
     return((LocalVoice)createLocalVoice(voiceInfo, channelId, (vId, chId) => new LocalVoice(this, encoder, vId, voiceInfo, chId)));
 }
Exemplo n.º 7
0
        internal void onVoiceInfo(int channelId, int playerId, byte voiceId, byte eventNumber, VoiceInfo info)
        {
            Dictionary <byte, RemoteVoice> playerVoices = null;

            if (!remoteVoices.TryGetValue(playerId, out playerVoices))
            {
                playerVoices           = new Dictionary <byte, RemoteVoice>();
                remoteVoices[playerId] = playerVoices;
            }
            if (!playerVoices.ContainsKey(voiceId))
            {
                this.transport.LogInfo("[PV] ch#" + this.channelStr(channelId) + " p#" + this.playerStr(playerId) + " v#" + voiceId + " Info received: " + info.ToString() + " ev=" + eventNumber);
                RemoteVoiceOptions options = new RemoteVoiceOptions()
                {
                    OutputImageFormat = ImageFormat.Undefined, OutputImageFlip = Flip.Undefined
                };
                if (this.OnRemoteVoiceInfoAction != null)
                {
                    this.OnRemoteVoiceInfoAction(channelId, playerId, voiceId, info, ref options);
                }
                playerVoices[voiceId] = new RemoteVoice(this, options, channelId, playerId, voiceId, info, eventNumber);
            }
            else
            {
                if (!this.SuppressInfoDuplicateWarning)
                {
                    this.transport.LogWarning("[PV] Info duplicate for voice #" + voiceId + " of player " + this.playerStr(playerId) + " at channel " + this.channelStr(channelId));
                }
            }
        }
Exemplo n.º 8
0
        static public IVideoRecorder CreateDefaultVideoRecorder(ILogger logger, PreviewManager previewManager, VoiceInfo info, string camDevice, Action <IVideoRecorder> onReady)
        {
            // native platform-specific recorders
#if UNITY_ANDROID && !UNITY_EDITOR
            var ve = new Unity.UnityAndroidVideoEncoder(logger, previewManager, info);
            return(new Unity.UnityAndroidVideoRecorder(ve, ve.Preview, onReady));
#elif UNITY_IOS && !UNITY_EDITOR
            if (info.Codec == Codec.VideoH264)
            {
                var ve = new IOS.VideoEncoder(logger, info);
                return(new IOS.VideoRecorder(ve, ve.Preview, onReady));
            }
            throw new UnsupportedCodecException("Platform.CreateDefaultVideoRecorder", info.Codec, logger);
#elif WINDOWS_UWP || (UNITY_WSA && !UNITY_EDITOR)
            if (info.Codec == Codec.VideoH264)
            {
                var ve = new UWP.VideoEncoder(logger, info);
                return(new UWP.VideoRecorder(ve, ve.Preview, onReady));
            }
            throw new UnsupportedCodecException("Platform.CreateDefaultVideoRecorder", info.Codec, logger);
#else // multi-platform VideoRecorderUnity or generic VideoRecorder
            IEncoderDirectImage ve;
            switch (info.Codec)
            {
            case Codec.VideoVP8:
            case Codec.VideoVP9:
                ve = new VPxCodec.Encoder(logger, info);
                break;

#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
            case Codec.VideoH264:
                //ve = new FFmpegCodec.Encoder(logger, info);
                ve = new Windows.MFTCodec.VideoEncoder(logger, info);
                break;
#elif UNITY_EDITOR_OSX || UNITY_STANDALONE_OSX
            case Codec.VideoH264:
                //ve = new FFmpegCodec.Encoder(logger, info);
                ve = new MacOS.VideoEncoder(logger, info);
                break;
#endif
            default:
                throw new UnsupportedCodecException("Platform.CreateDefaultVideoRecorder", info.Codec, logger);
            }
#if UNITY_5_3_OR_NEWER // #if UNITY
            return(new Unity.VideoRecorderUnity(ve, null, camDevice, info.Width, info.Height, info.FPS, onReady));
#else
            return(new VideoRecorder(ve, null));
#endif
#endif
        }
Exemplo n.º 9
0
        static public IVideoPlayer CreateDefaultVideoPlayer(ILogger logger, PreviewManager previewManager, VoiceInfo info)
        {
            // native platform-specific players
#if UNITY_ANDROID && !UNITY_EDITOR
            var vd = new Unity.UnityAndroidVideoDecoder(logger, previewManager, info.Codec);
            return(new VideoPlayer(vd, vd.Preview, info.Width, info.Height));
#elif UNITY_IOS && !UNITY_EDITOR
            if (info.Codec == Codec.VideoH264)
            {
                var vd = new IOS.VideoDecoder(logger);
                return(new VideoPlayer(vd, vd.Preview, info.Width, info.Height));
            }
            throw new UnsupportedCodecException("Platform.CreateDefaultVideoPlayer", info.Codec, logger);
#elif WINDOWS_UWP || (UNITY_WSA && !UNITY_EDITOR)
            if (info.Codec == Codec.VideoH264)
            {
                var vd = new UWP.VideoDecoder(logger, info);
                return(new VideoPlayer(vd, vd.Preview, info.Width, info.Height));
            }
            throw new UnsupportedCodecException("Platform.CreateDefaultVideoPlayer", info.Codec, logger);
#else  // multi-platform VideoPlayerUnity or generic VideoPlayer
            IDecoderQueuedOutputImageNative vd;
            switch (info.Codec)
            {
            case Codec.VideoVP8:
            case Codec.VideoVP9:
                vd = new VPxCodec.Decoder(logger);
                break;

#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
            case Codec.VideoH264:
                //vd = new FFmpegCodec.Decoder(logger);
                vd = new Windows.MFTCodec.VideoDecoder(logger, info);
                break;
#elif UNITY_EDITOR_OSX || UNITY_STANDALONE_OSX
            case Codec.VideoH264:
                //vd = new FFmpegCodec.Decoder(logger);
                vd = new MacOS.VideoDecoder(logger, info);
                break;
#endif
            default:
                throw new UnsupportedCodecException("Platform.CreateDefaultVideoPlayer", info.Codec, logger);
            }
#if UNITY_5_3_OR_NEWER // #if UNITY
            var vp = new Unity.VideoPlayerUnity(vd);
            // assign Draw method copying Image to Unity texture as software decoder Output
            vd.Output = vp.Draw;
            return(vp);
#else
            return(new VideoPlayer(vd, null, 0, 0));
#endif
#endif
        }
Exemplo n.º 10
0
        /// <summary>
        /// Creates outgoing audio stream of type automatically assigned and adds procedures (callback or serviceable) for consuming given audio source data.
        /// Adds audio specific features (e.g. resampling, level meter) to processing pipeline and to returning stream handler.
        /// </summary>
        /// <param name="voiceInfo">Outgoing audio stream parameters. Set applicable fields to read them by encoder and by receiving client when voice created.</param>
        /// <param name="source">Streaming audio source.</param>
        /// <param name="sampleType">Voice's audio sample type. If does not match source audio sample type, conversion will occur.</param>
        /// <param name="channelId">Transport channel specific to transport.</param>
        /// <param name="encoder">Audio encoder. Set to null to use default Opus encoder.</param>
        /// <returns>Outgoing stream handler.</returns>
        /// <remarks>
        /// audioSourceDesc.SamplingRate and voiceInfo.SamplingRate may do not match. Automatic resampling will occur in this case.
        /// </remarks>
        public LocalVoice CreateLocalVoiceAudioFromSource(VoiceInfo voiceInfo, IAudioDesc source, AudioSampleType sampleType, IEncoder encoder = null, int channelId = 0)
        {
            // resolve AudioSampleType.Source to concrete type for encoder creation
            if (sampleType == AudioSampleType.Source)
            {
                if (source is IAudioPusher <float> || source is IAudioReader <float> )
                {
                    sampleType = AudioSampleType.Float;
                }
                else if (source is IAudioPusher <short> || source is IAudioReader <short> )
                {
                    sampleType = AudioSampleType.Short;
                }
            }

            if (encoder == null)
            {
                switch (sampleType)
                {
                case AudioSampleType.Float:
                    encoder = Platform.CreateDefaultAudioEncoder <float>(transport, voiceInfo);
                    break;

                case AudioSampleType.Short:
                    encoder = Platform.CreateDefaultAudioEncoder <short>(transport, voiceInfo);
                    break;
                }
            }

            if (source is IAudioPusher <float> )
            {
                if (sampleType == AudioSampleType.Short)
                {
                    transport.LogInfo("[PV] Creating local voice with source samples type conversion from IAudioPusher float to short.");
                    var localVoice = CreateLocalVoiceAudio <short>(voiceInfo, source, encoder, channelId);
                    // we can safely reuse the same buffer in callbacks from native code
                    //
                    var bufferFactory = new FactoryReusableArray <float>(0);
                    ((IAudioPusher <float>)source).SetCallback(buf => {
                        var shortBuf = localVoice.BufferFactory.New(buf.Length);
                        AudioUtil.Convert(buf, shortBuf, buf.Length);
                        localVoice.PushDataAsync(shortBuf);
                    }, bufferFactory);
                    return(localVoice);
                }
                else
                {
                    var localVoice = CreateLocalVoiceAudio <float>(voiceInfo, source, encoder, channelId);
                    ((IAudioPusher <float>)source).SetCallback(buf => localVoice.PushDataAsync(buf), localVoice.BufferFactory);
                    return(localVoice);
                }
            }
            else if (source is IAudioPusher <short> )
            {
                if (sampleType == AudioSampleType.Float)
                {
                    transport.LogInfo("[PV] Creating local voice with source samples type conversion from IAudioPusher short to float.");
                    var localVoice = CreateLocalVoiceAudio <float>(voiceInfo, source, encoder, channelId);
                    // we can safely reuse the same buffer in callbacks from native code
                    //
                    var bufferFactory = new FactoryReusableArray <short>(0);
                    ((IAudioPusher <short>)source).SetCallback(buf =>
                    {
                        var floatBuf = localVoice.BufferFactory.New(buf.Length);
                        AudioUtil.Convert(buf, floatBuf, buf.Length);
                        localVoice.PushDataAsync(floatBuf);
                    }, bufferFactory);
                    return(localVoice);
                }
                else
                {
                    var localVoice = CreateLocalVoiceAudio <short>(voiceInfo, source, encoder, channelId);
                    ((IAudioPusher <short>)source).SetCallback(buf => localVoice.PushDataAsync(buf), localVoice.BufferFactory);
                    return(localVoice);
                }
            }
            else if (source is IAudioReader <float> )
            {
                if (sampleType == AudioSampleType.Short)
                {
                    transport.LogInfo("[PV] Creating local voice with source samples type conversion from IAudioReader float to short.");
                    var localVoice = CreateLocalVoiceAudio <short>(voiceInfo, source, encoder, channelId);
                    localVoice.LocalUserServiceable = new BufferReaderPushAdapterAsyncPoolFloatToShort(localVoice, source as IAudioReader <float>);
                    return(localVoice);
                }
                else
                {
                    var localVoice = CreateLocalVoiceAudio <float>(voiceInfo, source, encoder, channelId);
                    localVoice.LocalUserServiceable = new BufferReaderPushAdapterAsyncPool <float>(localVoice, source as IAudioReader <float>);
                    return(localVoice);
                }
            }
            else if (source is IAudioReader <short> )
            {
                if (sampleType == AudioSampleType.Float)
                {
                    transport.LogInfo("[PV] Creating local voice with source samples type conversion from IAudioReader short to float.");
                    var localVoice = CreateLocalVoiceAudio <float>(voiceInfo, source, encoder, channelId);
                    localVoice.LocalUserServiceable = new BufferReaderPushAdapterAsyncPoolShortToFloat(localVoice, source as IAudioReader <short>);
                    return(localVoice);
                }
                else
                {
                    var localVoice = CreateLocalVoiceAudio <short>(voiceInfo, source, encoder, channelId);
                    localVoice.LocalUserServiceable = new BufferReaderPushAdapterAsyncPool <short>(localVoice, source as IAudioReader <short>);
                    return(localVoice);
                }
            }
            else
            {
                transport.LogError("[PV] CreateLocalVoiceAudioFromSource does not support Voice.IAudioDesc of type {0}", source.GetType());
                return(LocalVoiceAudioDummy.Dummy);
            }
        }
Exemplo n.º 11
0
        }                                        // Dummy constructor

        internal LocalVoiceAudioShort(VoiceClient voiceClient, IEncoder encoder, byte id, VoiceInfo voiceInfo, IAudioDesc audioSourceDesc, int channelId)
        {
        }                                                                                                                                                            // 0x0000000180F1EE20-0x0000000180F1EFA0
Exemplo n.º 12
0
        static public IVideoRecorder CreateDefaultVideoRecorder(ILogger logger, PreviewManager previewManager, VoiceInfo info, string camDevice, Action <IVideoRecorder> onReady)
        {
            // native platform-specific recorders
#if UNITY_ANDROID && !UNITY_EDITOR
            var ve = new Unity.AndroidVideoEncoder(logger, previewManager, info);
            return(new Unity.AndroidVideoRecorder(ve, ve.Preview, onReady));
#elif UNITY_IOS && !UNITY_EDITOR
            if (info.Codec == Codec.VideoH264)
            {
                var ve = new IOS.VideoEncoder(logger, info);
                return(new IOS.VideoRecorder(ve, ve.Preview, onReady));
            }
            throw new UnsupportedCodecException("Platform.CreateDefaultVideoRecorder", info.Codec, logger);
#elif WINDOWS_UWP || (UNITY_WSA && !UNITY_EDITOR)
            if (info.Codec == Codec.VideoH264)
            {
                var ve = new UWP.VideoEncoder(logger, info);
                return(new UWP.VideoRecorder(ve, ve.Preview, onReady));
            }
            throw new UnsupportedCodecException("Platform.CreateDefaultVideoRecorder", info.Codec, logger);
#else // multi-platform VideoRecorderUnity or generic VideoRecorder
            var ve = CreateDefaultVideoEncoder(logger, info);
#if UNITY_5_3_OR_NEWER // #if UNITY
            return(new Unity.VideoRecorderUnity(ve, null, camDevice, info.Width, info.Height, info.FPS, onReady));
#else
            return(new VideoRecorder(ve, null));
#endif
#endif
        }
        internal LocalVoiceFramed(VoiceClient voiceClient, IEncoder encoder, byte id, VoiceInfo voiceInfo, int channelId, int frameSize)
            : base(voiceClient, encoder, id, voiceInfo, channelId, frameSize)
        {
#if DUMP_TO_FILE
            file = File.Open("dump-" + fileCnt++ + ".raw", FileMode.Create);
#endif
            if (frameSize == 0)
            {
                throw new Exception(LogPrefix + ": non 0 frame size required for framed stream");
            }
            this.framer = new Framer <T>(FrameSize);

            this.bufferFactory = new FactoryPrimitiveArrayPool <T>(DATA_POOL_CAPACITY, Name + " Data", FrameSize);
        }
Exemplo n.º 14
0
 internal LocalVoiceFramed(VoiceClient voiceClient, IEncoder encoder, byte id, VoiceInfo voiceInfo, int channelId, int frameSize)
     : base(voiceClient, encoder, id, voiceInfo, channelId, frameSize)
 {
     this.framer        = new Framer <T>(FrameSize);
     this.bufferFactory = new FactoryPrimitiveArrayPool <T>(DATA_POOL_CAPACITY, Name + " Data", FrameSize);
 }
Exemplo n.º 15
0
 internal LocalVoiceFramed(VoiceClient voiceClient, IEncoder encoder, byte id, VoiceInfo voiceInfo, int channelId, int frameSize)
     : base(voiceClient, encoder, id, voiceInfo, channelId, frameSize)
 {
     if (frameSize == 0)
     {
         throw new Exception(LogPrefix + ": non 0 frame size required for framed stream");
     }
     this.framer        = new Framer <T>(FrameSize);
     this.bufferFactory = new FactoryPrimitiveArrayPool <T>(DATA_POOL_CAPACITY, Name + " Data", FrameSize);
 }
Exemplo n.º 16
0
 virtual public IEncoder CreateDefaultEncoder(VoiceInfo info)
 {
     throw new UnsupportedCodecException("LocalVoice.CreateDefaultEncoder", info.Codec, Logger);
 }
 public int AssignChannel(VoiceInfo v)
 {
     // 0 is for user events
     return(1 + Array.IndexOf(Enum.GetValues(typeof(Codec)), v.Codec));
 }
Exemplo n.º 18
0
 internal LocalVoiceAudio(VoiceClient voiceClient, IEncoderDataFlow <T> encoder, byte id, VoiceInfo voiceInfo, int channelId)
     : base(voiceClient, encoder, id, voiceInfo, channelId,
            voiceInfo.SamplingRate != 0 ? voiceInfo.FrameSize * voiceInfo.SourceSamplingRate / voiceInfo.SamplingRate : voiceInfo.FrameSize
            )
 {
     this.channels             = voiceInfo.Channels;
     this.sourceSamplingRateHz = voiceInfo.SourceSamplingRate;
     if (this.sourceSamplingRateHz != voiceInfo.SamplingRate)
     {
         this.resampleSource = true;
         this.voiceClient.transport.LogWarning("[PV] Local voice #" + this.id + " audio source frequency " + this.sourceSamplingRateHz + " and encoder sampling rate " + voiceInfo.SamplingRate + " do not match. Resampling will occur before encoding.");
     }
 }
Exemplo n.º 19
0
 /// <summary>
 /// Creates outgoing audio stream. Adds audio specific features (e.g. resampling, level meter) to processing pipeline and to returning stream handler.
 /// </summary>
 /// <typeparam name="T">Element type of audio array buffers.</typeparam>
 /// <param name="voiceInfo">Outgoing audio stream parameters. Set applicable fields to read them by encoder and by receiving client when voice created.</param>
 /// <param name="channelId">Transport channel specific to transport.</param>
 /// <param name="encoder">Audio encoder. Set to null to use default Opus encoder.</param>
 /// <returns>Outgoing stream handler.</returns>
 /// <remarks>
 /// audioSourceDesc.SamplingRate and voiceInfo.SamplingRate may do not match. Automatic resampling will occur in this case.
 /// </remarks>
 public LocalVoiceAudio <T> CreateLocalVoiceAudio <T>(VoiceInfo voiceInfo, IAudioDesc audioSourceDesc, int channelId = 0, IEncoder encoder = null)
 {
     return((LocalVoiceAudio <T>)createLocalVoice(voiceInfo, channelId, (vId, chId) => LocalVoiceAudio <T> .Create(this, vId, encoder, voiceInfo, audioSourceDesc, chId)));
 }
Exemplo n.º 20
0
 internal LocalVoiceAudioFloat(VoiceClient voiceClient, IEncoderDataFlow <float> encoder, byte id, VoiceInfo voiceInfo, int channelId)
     : base(voiceClient, encoder, id, voiceInfo, channelId)
 {
     // these 2 processors go after resampler
     this.levelMeter    = new AudioUtil.LevelMeterFloat(this.info.SamplingRate, this.info.Channels);
     this.voiceDetector = new AudioUtil.VoiceDetectorFloat(this.info.SamplingRate, this.info.Channels);
     initBuiltinProcessors();
 }
Exemplo n.º 21
0
 internal EncoderFloat(VoiceInfo i, ILogger logger) : base(i, logger)
 {
 }
Exemplo n.º 22
0
 /// <summary>Create a new LocalVoiceAudio<T> instance.</summary>
 /// <param name="voiceClient">The VoiceClient to use for this outgoing stream.</param>
 /// <param name="voiceId">Numeric ID for this voice.</param>
 /// <param name="encoder">Encoder to use for this voice.</param>
 /// <param name="channelId">Voice transport channel ID to use for this voice.</param>
 /// <returns>The new LocalVoiceAudio<T> instance.</returns>
 public static LocalVoiceAudio <T> Create(VoiceClient voiceClient, byte voiceId, IEncoder encoder, VoiceInfo voiceInfo, int channelId)
 {
     if (typeof(T) == typeof(float))
     {
         if (encoder == null || encoder is IEncoderDataFlow <float> )
         {
             return(new LocalVoiceAudioFloat(voiceClient, encoder as IEncoderDataFlow <float>, voiceId, voiceInfo, channelId) as LocalVoiceAudio <T>);
         }
         else
         {
             throw new Exception("[PV] CreateLocalVoice: encoder for LocalVoiceAudio<float> is not IEncoderDataFlow<float>: " + encoder.GetType());
         }
     }
     else if (typeof(T) == typeof(short))
     {
         if (encoder == null || encoder is IEncoderDataFlow <short> )
         {
             return(new LocalVoiceAudioShort(voiceClient, encoder as IEncoderDataFlow <short>, voiceId, voiceInfo, channelId) as LocalVoiceAudio <T>);
         }
         else
         {
             throw new Exception("[PV] CreateLocalVoice: encoder for LocalVoiceAudio<short> is not IEncoderDataFlow<short>: " + encoder.GetType());
         }
     }
     else
     {
         throw new UnsupportedSampleTypeException(typeof(T));
     }
 }
Exemplo n.º 23
0
        internal RemoteVoice(VoiceClient client, RemoteVoiceOptions options, int channelId, int playerId, byte voiceId, VoiceInfo info, byte lastEventNumber)
        {
            this.options      = options;
            this.voiceClient  = client;
            this.channelId    = channelId;
            this.playerId     = playerId;
            this.voiceId      = voiceId;
            this.Info         = info;
            this.lastEvNumber = lastEventNumber;

#if NETFX_CORE
            Windows.System.Threading.ThreadPool.RunAsync((x) =>
            {
                decodeThread();
            });
#else
            var t = new Thread(() => decodeThread());
            t.Name = LogPrefix + " decode";
            t.Start();
#endif
        }
Exemplo n.º 24
0
 /// <summary>Create a new LocalVoiceAudio<T> instance.</summary>
 /// <param name="voiceClient">The VoiceClient to use for this outgoing stream.</param>
 /// <param name="voiceId">Numeric ID for this voice.</param>
 /// <param name="encoder">Encoder to use for this voice.</param>
 /// <param name="channelId">Voice transport channel ID to use for this voice.</param>
 /// <returns>The new LocalVoiceAudio<T> instance.</returns>
 public static LocalVoiceAudio <T> Create(VoiceClient voiceClient, byte voiceId, IEncoder encoder, VoiceInfo voiceInfo, IAudioDesc audioSourceDesc, int channelId)
 {
     if (typeof(T) == typeof(float))
     {
         return(new LocalVoiceAudioFloat(voiceClient, encoder, voiceId, voiceInfo, audioSourceDesc, channelId) as LocalVoiceAudio <T>);
     }
     else if (typeof(T) == typeof(short))
     {
         return(new LocalVoiceAudioShort(voiceClient, encoder, voiceId, voiceInfo, audioSourceDesc, channelId) as LocalVoiceAudio <T>);
     }
     else
     {
         throw new UnsupportedSampleTypeException(typeof(T));
     }
 }
Exemplo n.º 25
0
            }                                  // Dummy constructor

            protected Encoder(VoiceInfo i, ILogger logger)
            {
            }
Exemplo n.º 26
0
 internal LocalVoiceAudioShort(VoiceClient voiceClient, IEncoder encoder, byte id, VoiceInfo voiceInfo, IAudioDesc audioSourceDesc, int channelId)
     : base(voiceClient, encoder, id, voiceInfo, audioSourceDesc, channelId)
 {
     // these 2 processors go after resampler
     this.levelMeter    = new AudioUtil.LevelMeterShort(this.info.SamplingRate, this.info.Channels); //1/2 sec
     this.voiceDetector = new AudioUtil.VoiceDetectorShort(this.info.SamplingRate, this.info.Channels);
     initBuiltinProcessors();
 }
 /// <summary>
 /// Creates outgoing audio stream. Adds audio specific features (e.g. resampling, level meter) to processing pipeline and to returning stream handler.
 /// </summary>
 /// <typeparam name="T">Element type of audio array buffers.</typeparam>
 /// <param name="voiceInfo">Outgoing audio stream parameters. Set applicable fields to read them by encoder and by receiving client when voice created.</param>
 /// <param name="channelId">Transport channel specific to transport. Set to VoiceClient.ChannelAuto to let transport automatically assign channel.</param>
 /// <param name="encoder">Audio encoder. Set to null to use default Opus encoder.</param>
 /// <returns>Outgoing stream handler.</returns>
 /// <remarks>
 /// voiceInfo.sourceSamplingRate and voiceInfo.SamplingRate may do not match. Automatic resampling will occur in this case.
 /// </remarks>
 public LocalVoiceAudio <T> CreateLocalVoiceAudio <T>(VoiceInfo voiceInfo, int channelId = ChannelAuto, IEncoder encoder = null)
 {
     return((LocalVoiceAudio <T>)createLocalVoice(voiceInfo, channelId, (vId, chId) => LocalVoiceAudio <T> .Create(this, vId, encoder, voiceInfo, chId)));
 }
Exemplo n.º 28
0
 public void Open(VoiceInfo info)
 {
 }
Exemplo n.º 29
0
 internal LocalVoiceFramedBase(VoiceClient voiceClient, IEncoder encoder, byte id, VoiceInfo voiceInfo, int channelId, int frameSize)
     : base(voiceClient, encoder, id, voiceInfo, channelId)
 {
     this.FrameSize = frameSize;
 }
        }                                        // Dummy constructor

        internal LocalVoiceFramedBase(VoiceClient voiceClient, IEncoder encoder, byte id, VoiceInfo voiceInfo, int channelId, int frameSize)
        {
        }                                                                                                                                               // 0x0000000180F1EFA0-0x0000000180F1F010