示例#1
0
 /// <summary>Create a new LocalVoiceAudio<T> instance.</summary>
 /// <param name="voiceClient">The VoiceClient to use for this outgoing stream.</param>
 /// <param name="voiceId">Numeric ID for this voice.</param>
 /// <param name="encoder">Encoder to use for this voice.</param>
 /// <param name="channelId">Voice transport channel ID to use for this voice.</param>
 /// <returns>The new LocalVoiceAudio<T> instance.</returns>
 public static LocalVoiceAudio <T> Create(VoiceClient voiceClient, byte voiceId, IEncoder encoder, VoiceInfo voiceInfo, int channelId)
 {
     if (typeof(T) == typeof(float))
     {
         if (encoder == null || encoder is IEncoderDataFlow <float> )
         {
             return(new LocalVoiceAudioFloat(voiceClient, encoder as IEncoderDataFlow <float>, voiceId, voiceInfo, channelId) as LocalVoiceAudio <T>);
         }
         else
         {
             throw new Exception("[PV] CreateLocalVoice: encoder for LocalVoiceAudio<float> is not IEncoderDataFlow<float>: " + encoder.GetType());
         }
     }
     else if (typeof(T) == typeof(short))
     {
         if (encoder == null || encoder is IEncoderDataFlow <short> )
         {
             return(new LocalVoiceAudioShort(voiceClient, encoder as IEncoderDataFlow <short>, voiceId, voiceInfo, channelId) as LocalVoiceAudio <T>);
         }
         else
         {
             throw new Exception("[PV] CreateLocalVoice: encoder for LocalVoiceAudio<short> is not IEncoderDataFlow<short>: " + encoder.GetType());
         }
     }
     else
     {
         throw new UnsupportedSampleTypeException(typeof(T));
     }
 }
示例#2
0
 internal LocalVoiceAudioFloat(VoiceClient voiceClient, IEncoderDataFlow <float> encoder, byte id, VoiceInfo voiceInfo, int channelId)
     : base(voiceClient, encoder, id, voiceInfo, channelId)
 {
     // these 2 processors go after resampler
     this.levelMeter    = new AudioUtil.LevelMeterFloat(this.info.SamplingRate, this.info.Channels);
     this.voiceDetector = new AudioUtil.VoiceDetectorFloat(this.info.SamplingRate, this.info.Channels);
     initBuiltinProcessors();
 }
示例#3
0
 internal LocalVoiceAudioShort(VoiceClient voiceClient, IEncoder encoder, byte id, VoiceInfo voiceInfo, IAudioDesc audioSourceDesc, int channelId)
     : base(voiceClient, encoder, id, voiceInfo, audioSourceDesc, channelId)
 {
     // these 2 processors go after resampler
     this.levelMeter    = new AudioUtil.LevelMeterShort(this.info.SamplingRate, this.info.Channels); //1/2 sec
     this.voiceDetector = new AudioUtil.VoiceDetectorShort(this.info.SamplingRate, this.info.Channels);
     initBuiltinProcessors();
 }
 internal LocalVoice(VoiceClient voiceClient, IEncoder encoder, byte id, VoiceInfo voiceInfo, int channelId)
 {
     this.TransmitEnabled = true;
     this.info            = voiceInfo;
     this.channelId       = channelId;
     this.voiceClient     = voiceClient;
     this.id      = id;
     this.encoder = encoder == null?CreateDefaultEncoder(voiceInfo) : encoder;
 }
示例#5
0
 internal LocalVoiceFramed(VoiceClient voiceClient, IEncoder encoder, byte id, VoiceInfo voiceInfo, int channelId, int frameSize)
     : base(voiceClient, encoder, id, voiceInfo, channelId, frameSize)
 {
     if (frameSize == 0)
     {
         throw new Exception(LogPrefix + ": non 0 frame size required for framed stream");
     }
     this.framer        = new Framer <T>(FrameSize);
     this.bufferFactory = new FactoryPrimitiveArrayPool <T>(DATA_POOL_CAPACITY, Name + " Data", FrameSize);
 }
示例#6
0
 internal LocalVoiceAudio(VoiceClient voiceClient, IEncoder encoder, byte id, VoiceInfo voiceInfo, IAudioDesc audioSourceDesc, int channelId)
     : base(voiceClient, encoder, id, voiceInfo, channelId,
            voiceInfo.SamplingRate != 0 ? voiceInfo.FrameSize * audioSourceDesc.SamplingRate / voiceInfo.SamplingRate : voiceInfo.FrameSize
            )
 {
     this.channels = voiceInfo.Channels;
     if (audioSourceDesc.SamplingRate != voiceInfo.SamplingRate)
     {
         this.resampleSource = true;
         this.voiceClient.transport.LogWarning("[PV] Local voice #" + this.id + " audio source frequency " + audioSourceDesc.SamplingRate + " and encoder sampling rate " + voiceInfo.SamplingRate + " do not match. Resampling will occur before encoding.");
     }
 }
        /// <summary>
        /// Initializes a new <see cref="LoadBalancingFrontend"/>.
        /// </summary>
        /// <param name="connectionProtocol">Connection protocol (UDP or TCP). <see cref="ConnectionProtocol"></see></param>
        public LoadBalancingFrontend(ConnectionProtocol connectionProtocol = ConnectionProtocol.Udp) : base(connectionProtocol)
        {
            base.EventReceived += onEventActionVoiceClient;
            base.StateChanged  += onStateChangeVoiceClient;
            this.voiceClient    = new VoiceClient(this);
            var voiceChannelsCount = Enum.GetValues(typeof(Codec)).Length + 1; // channel per stream type, channel 0 is for user events

            if (LoadBalancingPeer.ChannelCount < voiceChannelsCount)
            {
                this.LoadBalancingPeer.ChannelCount = (byte)voiceChannelsCount;
            }
        }
        internal LocalVoiceFramed(VoiceClient voiceClient, IEncoder encoder, byte id, VoiceInfo voiceInfo, int channelId, int frameSize)
            : base(voiceClient, encoder, id, voiceInfo, channelId, frameSize)
        {
#if DUMP_TO_FILE
            file = File.Open("dump-" + fileCnt++ + ".raw", FileMode.Create);
#endif
            if (frameSize == 0)
            {
                throw new Exception(LogPrefix + ": non 0 frame size required for framed stream");
            }
            this.framer = new Framer <T>(FrameSize);

            this.bufferFactory = new FactoryPrimitiveArrayPool <T>(DATA_POOL_CAPACITY, Name + " Data", FrameSize);
        }
示例#9
0
 /// <summary>Create a new LocalVoiceAudio<T> instance.</summary>
 /// <param name="voiceClient">The VoiceClient to use for this outgoing stream.</param>
 /// <param name="voiceId">Numeric ID for this voice.</param>
 /// <param name="encoder">Encoder to use for this voice.</param>
 /// <param name="channelId">Voice transport channel ID to use for this voice.</param>
 /// <returns>The new LocalVoiceAudio<T> instance.</returns>
 public static LocalVoiceAudio <T> Create(VoiceClient voiceClient, byte voiceId, IEncoder encoder, VoiceInfo voiceInfo, IAudioDesc audioSourceDesc, int channelId)
 {
     if (typeof(T) == typeof(float))
     {
         return(new LocalVoiceAudioFloat(voiceClient, encoder, voiceId, voiceInfo, audioSourceDesc, channelId) as LocalVoiceAudio <T>);
     }
     else if (typeof(T) == typeof(short))
     {
         return(new LocalVoiceAudioShort(voiceClient, encoder, voiceId, voiceInfo, audioSourceDesc, channelId) as LocalVoiceAudio <T>);
     }
     else
     {
         throw new UnsupportedSampleTypeException(typeof(T));
     }
 }
示例#10
0
 internal LocalVoice(VoiceClient voiceClient, IEncoder encoder, byte id, VoiceInfo voiceInfo, int channelId)
 {
     this.info        = voiceInfo;
     this.channelId   = channelId;
     this.voiceClient = voiceClient;
     this.id          = id;
     if (encoder == null)
     {
         var m = LogPrefix + ": encoder is null";
         voiceClient.logger.LogError(m);
         throw new ArgumentNullException("encoder");
     }
     this.encoder        = encoder;
     this.encoder.Output = sendFrame;
 }
示例#11
0
        /// <summary>
        /// Initializes a new <see cref="LoadBalancingTransport"/>.
        /// </summary>
        /// <param name="logger">ILogger instance. If null, this instance LoadBalancingClient.DebugReturn implementation is used.<see cref="ConnectionProtocol"></see></param>
        /// <param name="connectionProtocol">Connection protocol (UDP or TCP). <see cref="ConnectionProtocol"></see></param>
        public LoadBalancingTransport(ILogger logger = null, ConnectionProtocol connectionProtocol = ConnectionProtocol.Udp) : base(connectionProtocol)
        {
            if (logger == null)
            {
                logger = this;
            }
            base.EventReceived += onEventActionVoiceClient;
            base.StateChanged  += onStateChangeVoiceClient;
            this.voiceClient    = new VoiceClient(this, logger);
            var voiceChannelsCount = Enum.GetValues(typeof(Codec)).Length + 1; // channel per stream type, channel 0 is for user events

            if (LoadBalancingPeer.ChannelCount < voiceChannelsCount)
            {
                this.LoadBalancingPeer.ChannelCount = (byte)voiceChannelsCount;
            }
            this.protocol = new PhotonTransportProtocol(voiceClient, logger);
        }
 internal LocalVoice(VoiceClient voiceClient, IEncoder encoder, byte id, VoiceInfo voiceInfo, int channelId)
 {
     this.info        = voiceInfo;
     this.channelId   = channelId;
     this.voiceClient = voiceClient;
     this.id          = id;
     if (encoder == null)
     {
         voiceClient.transport.LogInfo(LogPrefix + ": Creating default encoder");
         this.encoder = CreateDefaultEncoder(voiceInfo);
     }
     else
     {
         this.encoder = encoder;
     }
     this.encoder.Output = sendFrame;
 }
示例#13
0
 internal LocalVoiceAudio(VoiceClient voiceClient, IEncoderDataFlow <T> encoder, byte id, VoiceInfo voiceInfo, int channelId)
     : base(voiceClient, encoder, id, voiceInfo, channelId,
            voiceInfo.SamplingRate != 0 ? voiceInfo.FrameSize * voiceInfo.SourceSamplingRate / voiceInfo.SamplingRate : voiceInfo.FrameSize
            )
 {
     if (this.encoder == null)
     {
         this.encoder = VoiceCodec.CreateDefaultEncoder(voiceInfo, this);
     }
     this.channels             = voiceInfo.Channels;
     this.sourceSamplingRateHz = voiceInfo.SourceSamplingRate;
     if (this.sourceSamplingRateHz != voiceInfo.SamplingRate)
     {
         this.resampleSource = true;
         this.voiceClient.frontend.LogWarning("[PV] Local voice #" + this.id + " audio source frequency " + this.sourceSamplingRateHz + " and encoder sampling rate " + voiceInfo.SamplingRate + " do not match. Resampling will occur before encoding.");
     }
 }
        internal RemoteVoice(VoiceClient client, RemoteVoiceOptions options, int channelId, int playerId, byte voiceId, VoiceInfo info, byte lastEventNumber)
        {
            this.options      = options;
            this.voiceClient  = client;
            this.channelId    = channelId;
            this.playerId     = playerId;
            this.voiceId      = voiceId;
            this.Info         = info;
            this.lastEvNumber = lastEventNumber;
#if NETFX_CORE
            ThreadPool.RunAsync((x) =>
            {
                decodeThread();
            });
#else
            var t = new Thread(() => decodeThread());
            t.Name = LogPrefix + " decode";
            t.Start();
#endif
        }
示例#15
0
        }                                        // Dummy constructor

        internal LocalVoiceAudioShort(VoiceClient voiceClient, IEncoder encoder, byte id, VoiceInfo voiceInfo, IAudioDesc audioSourceDesc, int channelId)
        {
        }                                                                                                                                                            // 0x0000000180F1EE20-0x0000000180F1EFA0
示例#16
0
 internal LocalVoiceFramedBase(VoiceClient voiceClient, IEncoder encoder, byte id, VoiceInfo voiceInfo, int channelId, int frameSize)
     : base(voiceClient, encoder, id, voiceInfo, channelId)
 {
     this.FrameSize = frameSize;
 }
示例#17
0
 internal LocalVoiceFramed(VoiceClient voiceClient, IEncoder encoder, byte id, VoiceInfo voiceInfo, int channelId, int frameSize)
     : base(voiceClient, encoder, id, voiceInfo, channelId, frameSize)
 {
     this.framer        = new Framer <T>(FrameSize);
     this.bufferFactory = new FactoryPrimitiveArrayPool <T>(DATA_POOL_CAPACITY, Name + " Data", FrameSize);
 }
示例#18
0
        }                                    // Dummy constructor

        internal LocalVoiceFramed(VoiceClient voiceClient, IEncoder encoder, byte id, VoiceInfo voiceInfo, int channelId, int frameSize)
        {
        }
 public PhotonTransportProtocol(VoiceClient voiceClient, ILogger logger)
 {
     this.voiceClient = voiceClient;
     this.logger      = logger;
 }
示例#20
0
        }                               // Dummy constructor

        internal RemoteVoice(VoiceClient client, RemoteVoiceOptions options, int channelId, int playerId, byte voiceId, VoiceInfo info, byte lastEventNumber)
        {
        }                                                                                                                                                                // 0x0000000180F26060-0x0000000180F262E0
示例#21
0
        }                                           // Dummy constructor

        public PhotonTransportProtocol(VoiceClient voiceClient, ILogger logger)
        {
        }                                                                                  // 0x0000000180294750-0x00000001802947A0
        }                                      // Dummy constructor

        internal LocalVoiceAudio(VoiceClient voiceClient, IEncoder encoder, byte id, VoiceInfo voiceInfo, IAudioDesc audioSourceDesc, int channelId)
        {
        }
 // Methods
 public static LocalVoiceAudio <T> Create(VoiceClient voiceClient, byte voiceId, IEncoder encoder, VoiceInfo voiceInfo, IAudioDesc audioSourceDesc, int channelId) => default;
        }                                        // Dummy constructor

        internal LocalVoiceFramedBase(VoiceClient voiceClient, IEncoder encoder, byte id, VoiceInfo voiceInfo, int channelId, int frameSize)
        {
        }                                                                                                                                               // 0x0000000180F1EFA0-0x0000000180F1F010