Exemple #1
0
 private void Application_Startup(object sender, StartupEventArgs e)
 {
     try
     {
         TestGlobals.InitialPage = DataGlobals.GetStringConfigValue(e.InitParams, Constants.InitialPageReference, "");
         Page page = null;
         if (!string.IsNullOrEmpty(TestGlobals.InitialPage))
         {
             var assembly = Assembly.GetExecutingAssembly();
             page = (Page)assembly.CreateInstance(TestGlobals.InitialPage);
         }
         if (page == null)
         {
             page = new MainPage();
         }
         RootVisual = page;
         DataGlobals.LoadGlobalParams(e.InitParams);
         Uri uri = HtmlPage.Document.DocumentUri;
         DataGlobals.BaseServiceUri  = new Uri(uri.Scheme + "://" + uri.Host + ":" + uri.Port + "/");
         DataGlobals.MediaServerHost = uri.Host;
     }
     catch (Exception ex)
     {
         ClientLogger.Debug(ex.ToString());
     }
 }
        protected override void OnSample(long sampleTime, long frameDuration, byte[] sampleData)
        {
            // Raise an event if we've managed to successfully capture data.
            try
            {
                if (!_dataReceived)
                {
                    _dataReceived = true;
                    if (CaptureSuccessful != null)
                    {
                        CaptureSuccessful(this, new EventArgs());
                    }
                }

                if (FrameShouldBeSubmitted())
                {
                    var resampledData = ResizeFrame(sampleData);
                    SubmitFrame(resampledData, _videoFormat.Stride);
                }
            }
            catch (Exception ex)
            {
                if (_errorCount++ % 100 == 0)
                {
                    ClientLogger.Debug("Error {0} submitting frame: {1}", _errorCount, ex);
                }
            }
        }
Exemple #3
0
 private void HandleReceiveComplete(SocketAsyncEventArgs e)
 {
     try
     {
         if (e.BytesTransferred == 0)
         {
             ClientLogger.Debug("Remote socket closed for endpoint {0}", _endPoint);
             Disconnect();
         }
         else
         {
             receiveCompleteHandler(e.Buffer, e.Offset, e.BytesTransferred);
             if (logger != null)
             {
                 logger.LogDataReceived(e.BytesTransferred);
             }
             if (socket.Connected)
             {
                 socket.ReceiveAsync(receiveArgsRecycler.GetNext());
             }
         }
     }
     finally
     {
         receiveArgsRecycler.Recycle(e);
     }
 }
Exemple #4
0
 public void Disconnect()
 {
     try
     {
         if (socket != null)
         {
             // ks 12/12/11 - We've been getting some 10057 errors here. The only way I can think that they might be happening
             // is if two threads are trying to close the socket simultaneously. I've added in some locking
             // code to help protect against that. We'll see if it makes any difference.
             lock (socket)
             {
                 if (socket.Connected)
                 {
                     ClientLogger.Debug("Disconnecting from " + _endPoint);
                     socket.Shutdown(SocketShutdown.Both);
                     socket.Close();
                 }
             }
         }
     }
     catch (Exception ex)
     {
         // ks 05/11/12 - Switched from ErrorException to DebugException, as swallowing the exception seems to
         // work just fine.
         ClientLogger.DebugException(ex, "Disconnecting failed");
     }
 }
        public void LogAudioFrameSet()
        {
            if (_firstAudioFrameSet == DateTime.MinValue)
            {
                _firstAudioFrameSet = DateTime.Now;
                _lastAudioFrameSet  = _firstAudioFrameSet;
            }
            if (++_audioFramesSet % 1000 == 0)
            {
                var recentElapsed     = DateTime.Now - _lastAudioFrameSet;
                var totalElapsed      = DateTime.Now - _firstAudioFrameSet;
                var averageRecentTime = recentElapsed.TotalMilliseconds / 1000d;
                var averageTotalTime  = totalElapsed.TotalMilliseconds / _audioFramesSet;
                ClientLogger.Debug(
                    "Total audio frames set={0}; recentElapsed={1}; time/entry={2:f3}; recent time/entry={3:f3}",
                    _audioFramesSet, recentElapsed.TotalMilliseconds, averageTotalTime, averageRecentTime);
                _lastAudioFrameSet = DateTime.Now;

                // If we're not getting frames fast enough, this may be an indication that the CPU is running hot,
                // and we need to back down the video quality.
                if (_videoQualityController == null)
                {
                    return;
                }
                _videoQualityController.LogGlitch((int)(averageRecentTime - 30) / 10);
            }
        }
Exemple #6
0
        /// <summary>
        /// Retrieves the next available instance of the targeted class, or creates one if none is available.
        /// </summary>
        /// <returns>An instance of the targeted class.</returns>
        public virtual T GetNext()
        {
            T obj;

            lock (stack)
            {
                if (stack.Count == 0)
                {
#if DEBUG
                    if (++objectsCreated % 100 == 0)
                    {
                        ClientLogger.Debug("Object Pool for type {0}: Created={1}; Reused={2}; Recycled={3}; Outstanding={4}",
                                           typeof(T).FullName, objectsCreated, objectsRecycled, objectsReused, (objectsCreated + objectsReused) - objectsRecycled);
                    }
#endif
                    return(NewFunction());
                }
#if DEBUG
                objectsReused++;
#endif
                obj = stack.Pop();
            }
            if (ResetAction != null)
            {
                ResetAction(obj);
            }
            return(obj);
        }
Exemple #7
0
        //private DateTime _firstSampleReportedAt = DateTime.MinValue;
        //private int _samplesReported;
        protected virtual void ReportSample(MemoryStream memoryStream)
        {
            try
            {
                //if (_firstSampleReportedAt == DateTime.MinValue)
                //{
                //    _firstSampleReportedAt = DateTime.Now;
                //}
                //if (++_samplesReported % 200 == 0)
                //{
                //    double averageSampleRequestTime = (DateTime.Now - _firstSampleRequestedAt).TotalMilliseconds/_samplesRequested;
                //    double averageSampleReportTime = (DateTime.Now - _firstSampleReportedAt).TotalMilliseconds/_samplesReported;
                //    ClientLogger.Debug("Samples requested:{0}; reported:{1}; avgRequestInterval:{2}; avgReportInterval:{3}", _samplesRequested, _samplesReported, averageSampleRequestTime, averageSampleReportTime);
                //}

                var sample = new MediaStreamSample(
                    _mediaStreamDescription,
                    memoryStream,
                    0,
                    memoryStream.Length,
                    (DateTime.Now - _startTime).Ticks,
                    _emptySampleDict);
                ReportGetSampleCompleted(sample);
            }
            catch (Exception ex)
            {
                ClientLogger.Debug(ex.ToString);
            }
        }
Exemple #8
0
 private void InitializeCaptureSource()
 {
     if (mCaptureSource == null)
     {
         // Setup the capture source (for recording audio)
         mCaptureSource = new CaptureSource {
             VideoCaptureDevice = CaptureDeviceConfiguration.GetDefaultVideoCaptureDevice()
         };
         if (mCaptureSource.VideoCaptureDevice != null)
         {
             MediaDeviceConfig.SelectBestVideoFormat(mCaptureSource.VideoCaptureDevice);
             if (mCaptureSource.AudioCaptureDevice.DesiredFormat != null)
             {
                 mCaptureSource.AudioCaptureDevice.AudioFrameSize = AudioConstants.MillisecondsPerFrame;                         // 20 milliseconds
                 mVideoSink = new VideoSinkAdapter(mCaptureSource, mRecorder, mVideoQualityController);
                 ClientLogger.Debug("CaptureSource initialized.");
             }
             else
             {
                 ClientLogger.Debug("No suitable audio format was found.");
             }
             panelWebcam.DataContext = mCaptureSource;
         }
         else
         {
             // Do something more here eventually, once we figure out what the user experience should be.
             ClientLogger.Debug("No audio capture device was found.");
         }
     }
 }
Exemple #9
0
        private void ConfigureVideoCaptureDevice(VideoCaptureDevice device)
        {
            // Configure the video capture device.
            // The weird thing about this is that sometimes (at least on a Macintosh), a video capture device
            // can have an empty device.SupportedFormats collection, but still be able to capture video.
            // Generally that format seems to work, but I don't think we can guarantee that.
            if (device != null)
            {
                MediaDeviceConfig.SelectBestVideoFormat(device);
                if (device.DesiredFormat == null)
                {
                    ClientLogger.Debug("No appropriate video format was found; current format = {0}.", device.DesiredFormat);

                    // ks 12/13/10 - Since limited testing has shown that some cameras on the Mac that work fine have an empty SupportedFormats collection,
                    // we'd rather not show this error on Mac platforms.  There may be other instances where we don't want to show an error,
                    // and there may also be a better way of handling this situation in general.  But good enough for now.
                    if (Environment.OSVersion.Platform != PlatformID.MacOSX)
                    {
                        ClientLogger.Error(CommonStrings.Media_NoVideoFormat);
                        MessageService.ShowErrorHint(CommonStrings.Media_NoVideoFormat);
                    }
                }
            }
            else
            {
                // Only show an error if there really is no webcam attached.
                var videoDevices = CaptureDeviceConfiguration.GetAvailableVideoCaptureDevices();
                if (videoDevices.Count == 0)
                {
                    ClientLogger.Debug(CommonStrings.Media_NoVideoDevice);
                    MessageService.ShowErrorHint(CommonStrings.Media_NoVideoDevice);
                }
            }
        }
Exemple #10
0
        private void InitializeCaptureSource()
        {
            if (captureSource != null)
            {
                captureSource.Stop();
            }
            captureSource = new CaptureSource();
            captureSource.AudioCaptureDevice = (AudioCaptureDevice)listBoxAudioSources.SelectedItem;

            MediaDeviceConfig.SelectBestAudioFormat(captureSource.AudioCaptureDevice);

            captureSource.AudioCaptureDevice.DesiredFormat = captureSource.AudioCaptureDevice.SupportedFormats
                                                             .First(format => format.BitsPerSample == AudioConstants.BitsPerSample &&
                                                                    format.WaveFormat == WaveFormatType.Pcm &&
                                                                    format.Channels == 1 &&
                                                                    format.SamplesPerSecond == sampleRate);
            captureSource.AudioCaptureDevice.AudioFrameSize = AudioFormat.Default.MillisecondsPerFrame;             // 20 milliseconds

            audioSink = new TestAudioSinkAdapter(captureSource, new NullAudioController());
            audioSink.RawFrameAvailable       += audioSink_RawFrameAvailable;
            audioSink.ProcessedFrameAvailable += audioSink_FrameArrived;

            ClientLogger.Debug("Checking device access.");
            if (CaptureDeviceConfiguration.AllowedDeviceAccess || CaptureDeviceConfiguration.RequestDeviceAccess())
            {
                savedFramesForDebug = new List <byte[]>();
                captureSource.Start();
                ClientLogger.Debug("CaptureSource started.");
            }
        }
Exemple #11
0
        protected override void GetSampleAsync(MediaStreamType mediaStreamType)
        {
            try
            {
                MemoryStream rawSampleStream;
                byte[]       rawSample;
                if ((rawSample = frameSource.GetNextFrame()) != null)
                {
                    rawSampleStream      = new MemoryStream(rawSample);
                    LastPulseSubmittedAt = DateTime.Now;
                }
                else
                {
                    rawSampleStream = new MemoryStream(emptyFrame);
                }

                MediaStreamSample sample = new MediaStreamSample(
                    mediaStreamDescription,
                    rawSampleStream,
                    0,
                    rawSampleStream.Length,
                    (DateTime.Now - startTime).Ticks,
                    emptySampleDict);
                ReportGetSampleCompleted(sample);
            }
            catch (Exception ex)
            {
                ClientLogger.Debug(ex.ToString());
            }
        }
Exemple #12
0
        protected override void OnSamples(long sampleTime, long sampleDuration, byte[] sampleData)
        {
            try
            {
                totalSamplesProvided++;
                totalSampleTime     += sampleTime;
                totalSampleDuration += sampleDuration;
                recentSamplesProvided++;
                recentSampleTime     += sampleTime;
                recentSampleDuration += sampleDuration;
                if (firstFrameArrivedAt == DateTime.MinValue)
                {
                    firstFrameArrivedAt = DateTime.Now;
                    lastResetAt         = DateTime.Now;
                }

                int scaledLength = (int)((sampleData.Length / (double)desiredSampleSizeInBytes) / scalingFactor) * desiredSampleSizeInBytes;
                ScaleSampleOntoBuffer(sampleData, scaledLength);
                PullFramesFromBuffer();
            }
            catch (Exception ex)
            {
                ClientLogger.Debug(ex.Message);
            }
        }
 protected override void GetSampleAsync(MediaStreamType mediaStreamType)
 {
     try
     {
         if (mediaStreamType == MediaStreamType.Video)
         {
             videoController.GetNextVideoFrame(ssrcId, frameStream =>
             {
                 if (frameStream != null)
                 {
                     // Send out the next sample
                     frameStream.Position = 0;
                     var msSamp           = new MediaStreamSample(
                         videoDesc,
                         frameStream,
                         0,
                         frameStream.Length,
                         (DateTime.Now - startTime).Ticks,
                         emptySampleDict);
                     ReportGetSampleCompleted(msSamp);
                 }
             });
         }
     }
     catch (Exception ex)
     {
         ClientLogger.Debug(ex.ToString());
     }
 }
Exemple #14
0
        public void DeleteUserMultipleTimes()
        {
            const int maxCalls    = 100;
            int       actualCalls = 0;
            var       client      = _testController.RoomService;

            OperationCallback <object> handler = (error, state) =>
            {
                var userState = (object[])state;
                var call      = (int)userState[0];
                var startTime = (DateTime)userState[1];
                var elapsed   = DateTime.Now - startTime;
                actualCalls++;
                ClientLogger.Debug("Processing delete user call #{0}; actual call = {1}, recentElapsed time = {2} ms", call, actualCalls, elapsed.TotalMilliseconds);
                Assert.IsNull(error);
            };

            // Kick everything off.
            JoinRoom(error =>
            {
                for (int i = 0; i < maxCalls; i++)
                {
                    ClientLogger.Debug("Queueing delete user call #{0}", i);
                    ThreadPool.QueueUserWorkItem(o =>
                    {
                        var callNumber = (int)o;
                        var startTime  = DateTime.Now;
                        ClientLogger.Debug("Executing delete user call #{0}", callNumber);
                        client.DeleteUser(Guid.NewGuid(), handler, new object[] { callNumber, startTime });
                    }, i);
                }
            });
            EnqueueConditional(() => actualCalls >= maxCalls);
            EnqueueTestComplete();
        }
Exemple #15
0
 // private DateTime _firstSampleRequestedAt = DateTime.MinValue;
 // private int _samplesRequested;
 protected override void GetSampleAsync(MediaStreamType mediaStreamType)
 {
     try
     {
         if (mediaStreamType != MediaStreamType.Audio)
         {
             return;
         }
         _logger.LogSampleRequested();
         if (AudioController == null)
         {
             ReportSample(new MemoryStream(0));
         }
         else
         {
             //if (_firstSampleRequestedAt == DateTime.MinValue)
             //{
             //    _firstSampleRequestedAt = DateTime.Now;
             //}
             //_samplesRequested++;
             AudioController.GetNextAudioFrame(ReportSample);
         }
     }
     catch (Exception ex)
     {
         ClientLogger.Debug(ex.ToString);
     }
 }
Exemple #16
0
        public void zzCodecPerformanceTest()
        {
            // Encode and decode a basic raster structure.
            var perf = new PerformanceMonitor("Encode/Decode", 1);
            var vqc  = new VideoQualityController(1);

            vqc.RemoteSessions = _remoteSessions;
            var codec = new JpegDiffVideoCodec(vqc);

            codec.Initialize(height, width, VideoConstants.MaxPayloadSize);
            var videoChunkPool = new ObjectPool <ByteStream>(() => new ByteStream(VideoConstants.MaxPayloadSize), bs => bs.Reset());

            perf.Start();
            const int iterations = 100;

            for (int i = 0; i < iterations; i++)
            {
                byte[] sample = GetRgba(i);
                codec.EncodeFrame(sample, 0);
                bool moreChunks = true;
                var  buffer     = videoChunkPool.GetNext();
                while (moreChunks)
                {
                    if (codec.GetNextChunk(buffer, out moreChunks))
                    {
                        codec.DecodeChunk(buffer, 2);
                    }
                }
                videoChunkPool.Recycle(buffer);
                codec.GetNextFrame();
            }
            perf.Stop();

            ClientLogger.Debug("Finished JpegEncoderDecoder performance test.");
        }
 private void InitializeCaptureSource()
 {
     if (_captureSource == null)
     {
         // Setup the capture source (for recording audio)
         _captureSource = new CaptureSource();
         _captureSource.AudioCaptureDevice = CaptureDeviceConfiguration.GetDefaultAudioCaptureDevice();
         if (_captureSource.AudioCaptureDevice != null)
         {
             MediaDeviceConfig.SelectBestAudioFormat(_captureSource.AudioCaptureDevice);
             if (_captureSource.AudioCaptureDevice.DesiredFormat != null)
             {
                 _captureSource.AudioCaptureDevice.AudioFrameSize = AudioFormat.Default.MillisecondsPerFrame;                         // 20 milliseconds
                 _audioSink = new AudioSinkAdapter(_captureSource, null, MediaConfig.Default, new TestMediaEnvironment(), AudioFormat.Default);
                 _recorder  = new RecorderBase(_captureSource, _audioSink, speakersAudioVisualizer);
                 chkSynchronizeRecording.DataContext = _audioSink;
                 ClientLogger.Debug("CaptureSource initialized.");
             }
             else
             {
                 ClientLogger.Debug("No suitable audio format was found.");
             }
             panelMicrophone.DataContext = _captureSource;
         }
         else
         {
             // Do something more here eventually, once we figure out what the user experience should be.
             ClientLogger.Debug("No audio capture device was found.");
         }
     }
 }
Exemple #18
0
        private QuantizeDel EmitQuantize()
        {
            ClientLogger.Debug("Emitting quantize data.");
            Type[] args = { typeof(float[]) };

            var quantizeMethod = new DynamicMethod("Quantize",
                                                   null,  // no return type
                                                   args); // input array

            ILGenerator il = quantizeMethod.GetILGenerator();

            for (int i = 0; i < quantizationTable.Length; i++)
            {
                float mult = quantizationTable[i];

                // Sz Stack:
                il.Emit(OpCodes.Ldarg_0);                              // 1  {arr}
                il.Emit(OpCodes.Ldc_I4_S, (short)i);                   // 3  {arr,i}
                il.Emit(OpCodes.Ldarg_0);                              // 1  {arr,i,arr}
                il.Emit(OpCodes.Ldc_I4_S, (short)i);                   // 3  {arr,i,arr,i}
                il.Emit(OpCodes.Ldelem_R4);                            // 1  {arr,i,arr[i]}
                il.Emit(OpCodes.Ldc_R4, mult);                         // 5  {arr,i,arr[i],mult}
                il.Emit(OpCodes.Mul);                                  // 1  {arr,i,arr[i]*mult}
                il.Emit(OpCodes.Stelem_R4);                            // 1  {}
            }

            il.Emit(OpCodes.Ret);

            return((QuantizeDel)quantizeMethod.CreateDelegate(typeof(QuantizeDel)));
        }
Exemple #19
0
        public void JpegEncoderDecoderTest()
        {
            // Encode and decode a basic raster structure.
            var colorModel = new ColorModel();

            colorModel.ColorSpace = ColorSpace.YCbCr;
            colorModel.Opaque     = true;
            byte[][][] originalRaster = GetRaster();
            var        image          = new Image(colorModel, originalRaster);
            var        stream         = new MemoryStream();
            var        encoder        = new JpegEncoder(image, 50, stream);

            encoder.Encode();
            stream.Seek(0, SeekOrigin.Begin);
            var         decoder      = new JpegDecoder(stream);
            DecodedJpeg decodedImage = decoder.Decode();

            // Check that the returned raster structure looks something like what we passed in.
            for (int i = 0; i < 3; i++)
            {
                for (int j = 0; j < width; j++)
                {
                    for (int k = 0; k < height; k++)
                    {
                        // Tune this.
                        int diff = Math.Abs(decodedImage.Image.Raster[i][j][k] - originalRaster[i][j][k]);
                        Assert.IsTrue(diff < 5);
                    }
                }
            }
            ClientLogger.Debug("Finished JpegEncoderDecoder test.");
        }
Exemple #20
0
        private void InitializeCaptureSource()
        {
            if (_captureSource != null)
            {
                return;
            }

            // Setup the capture source (for recording audio)
            _captureSource = new CaptureSource();
            _captureSource.AudioCaptureDevice = CaptureDeviceConfiguration.GetDefaultAudioCaptureDevice();
            if (_captureSource.AudioCaptureDevice != null)
            {
                MediaDeviceConfig.SelectBestAudioFormat(_captureSource.AudioCaptureDevice);
                if (_captureSource.AudioCaptureDevice.DesiredFormat != null)
                {
                    var mediaStats       = new MediaStatistics();
                    var mediaEnvironment = new MediaEnvironment(mediaStats);
                    _captureSource.AudioCaptureDevice.AudioFrameSize = AudioFormat.Default.MillisecondsPerFrame;                     // 20 milliseconds
                    _audioSinkAdapter  = new MultipleControllerAudioSinkAdapter(GetMediaConfig(), _captureSource, 2000);
                    _mediaStreamSource = new MultipleControllerAudioMediaStreamSource(2000);
                    ClientLogger.Debug("CaptureSource initialized.");
                }
                else
                {
                    ClientLogger.Debug("No suitable audio format was found.");
                }
            }
            else
            {
                // Do something more here eventually, once we figure out what the user experience should be.
                ClientLogger.Debug("No audio capture device was found.");
            }
        }
Exemple #21
0
        public void SendMultipleChatMessages()
        {
            const int maxMessages      = 100;
            int       receivedMessages = 0;

            int[] sentMessages = { 0 };

            // Initialize the chatMessageAdded event handler, which will tell us that the chat message was sent successfully.
            EventHandler <EventArgs <ChatMessage> > handleChatMessageReceived = (s, e) =>
            {
                receivedMessages++;
                ClientLogger.Debug("Message #{0} received.", receivedMessages);
            };

            _roomService.ChatMessageReceived += handleChatMessageReceived;
            // roomViewModel.ChatMessages.CollectionChanged += handleChatMessageAdded;

            // Kick everything off.
            JoinRoom(error =>
            {
                for (int i = 0; i < maxMessages; i++)
                {
                    _roomService.SendMessage(_roomVm.SessionId, Guid.NewGuid().ToString(), e1 =>
                    {
                        Assert.IsNull(e1);
                        sentMessages[0]++;
                    });
                }
            });
            EnqueueConditional(() => receivedMessages >= maxMessages);
            EnqueueConditional(() => sentMessages[0] == maxMessages);
            EnqueueTestComplete();
        }
Exemple #22
0
        public virtual void SendAudioPacket(short[] audioBuffer, int length, AudioCodecType codecTypeType, bool isSilent, int localProcessorLoad)
        {
            if (++_audioPacketsSent % 200 == 0)
            {
                ClientLogger.Debug("{0} audio packets sent through the LoopbackMediaConnection", _audioPacketsSent);
            }

            if (!isSilent && length == 0)
            {
                ClientLogger.Debug("Entry has zero datalength!");
            }

            var packet = new LoopbackMediaPacket
            {
                Payload        = audioBuffer,
                PayloadLength  = (ushort)(length * sizeof(short)),
                SsrcId         = _localSsrcId,
                ProcessorLoad  = 10,
                AudioCodecType = codecTypeType,
                IsSilent       = isSilent,
                SequenceNumber = _sequenceNumber++
            };

            AudioPacketHandler(packet);
        }
Exemple #23
0
        public static string AnalyzeAudioFrame(string source, short[] frame, int start, int length)
        {
            var sb     = new StringBuilder();
            int zeroes = 0;

            for (int i = start; i < start + length; i++)
            {
                if (frame[i] == 0)
                {
                    zeroes++;
                }
                if (i % 32 == 0)
                {
                    sb.AppendLine();
                    sb.Append(i + ":\t ");
                }
                sb.Append(frame[i] + "\t");
            }

            double zeroPercent = zeroes / (double)length;

            // Display the frame
            string results = string.Format("Frame stats: Source={0}; Length={1}; ZeroPercent={2}; Data={3}", source, length, zeroPercent, sb);

            ClientLogger.Debug(results);
            return(results);
        }
        public void SendVideoPacket(ByteStream videoChunk)
        {
            ByteStream packetBuffer = null;

            try
            {
                packetBuffer = _packetBufferPool.GetNext();
                bool packetBuilt;
                lock (_rtpData)
                {
                    _rtpData.PayloadType   = RtpPayloadType.VideoFromClient;
                    _rtpData.Payload       = videoChunk.Data;
                    _rtpData.PayloadLength = (ushort)videoChunk.DataLength;
                    packetBuilt            = _rtpData.TryBuildPacket(packetBuffer);
                }
                if (packetBuilt)
                {
                    _rtpClient.Send(packetBuffer.Data, packetBuffer.DataOffset, packetBuffer.DataLength);
                }
                else
                {
                    ClientLogger.Debug("Error building video packetBuffer.");
                }
            }
            finally
            {
                _packetBufferPool.Recycle(packetBuffer);
            }
        }
Exemple #25
0
        public void TestInitialize()
        {
            InterceptUnhandledExceptions = true;
            bool initializingCompleted = false;

            ClientLogger.Debug(" -- Beginning test initialize.");
            EnqueueConditional(() => TestGlobals.Initialized);
            EnqueueCallback(() =>
            {
                _roomService                 = new RoomServiceAdapter();
                var messageService           = new TestMessageService();
                var viewLocator              = new ViewLocator();
                _viewModelFactory            = new ViewModelFactory(_roomService, messageService, viewLocator);
                _testController              = new TestController(TestGlobals.UserTag, Guid.NewGuid().ToString(), _viewModelFactory, new TestCompanyInfo());
                _contactData                 = _testController.ContactData;
                _companyVm                   = _viewModelFactory.GetViewModel <CompanyViewModel>();
                _companyVm.Model             = TestGlobals.Company;
                _authenticationGroupVm       = _viewModelFactory.GetViewModel <AuthenticationGroupViewModel>();
                _authenticationGroupVm.Model = TestGlobals.AuthenticationGroup;
                _roomVm      = _viewModelFactory.GetViewModel <RoomViewModel>();
                _localUserVm = _viewModelFactory.GetViewModel <LocalUserViewModel>();
                _roomService.CreateClient();

                TestInitializing(() => initializingCompleted = true);
            });
            EnqueueConditional(() => initializingCompleted);
            EnqueueTestComplete();
        }
Exemple #26
0
        private void InitializeCaptureSource()
        {
            if (captureSource == null)
            {
                mediaElement      = new MediaElement();
                audioStreamSource = new TestAudioStreamSource(this);
                mediaElement.SetSource(audioStreamSource);

                // Set the audio properties.
                captureSource = new CaptureSource();
                captureSource.AudioCaptureDevice = CaptureDeviceConfiguration.GetDefaultAudioCaptureDevice();
                if (captureSource.AudioCaptureDevice != null)
                {
                    MediaDeviceConfig.SelectBestAudioFormat(captureSource.AudioCaptureDevice);
                    if (captureSource.AudioCaptureDevice.DesiredFormat != null)
                    {
                        captureSource.AudioCaptureDevice.AudioFrameSize = AudioFormat.Default.MillisecondsPerFrame; // 20 milliseconds
                        audioSink = new TestAudioSinkAdapter(captureSource);
                        audioSink.ProcessedFrameAvailable += audioSink_FrameArrived;
                        ClientLogger.Debug("CaptureSource initialized.");
                    }
                    else
                    {
                        ClientLogger.Debug("No suitable audio format was found.");
                    }
                }
                else
                {
                    // Do something more here eventually, once we figure out what the user experience should be.
                    ClientLogger.Debug("No audio capture device was found.");
                }
            }
        }
Exemple #27
0
        protected virtual void ReportSample(MemoryStream frameStream)
        {
            try
            {
                if (frameStream != null)
                {
                    frameStream.Position = 0;                     // .Seek(0, SeekOrigin.Begin);

                    // Send out the next sample
                    var msSamp = new MediaStreamSample(
                        _videoDesc,
                        frameStream,
                        0,
                        frameStream.Length,
                        (DateTime.Now - _startTime).Ticks,
                        _emptySampleDict);

                    ReportGetSampleCompleted(msSamp);
                }
            }
            catch (Exception ex)
            {
                ClientLogger.Debug(ex.ToString);
            }
        }
        internal void LogAudioFrameReceived(IMediaPacket packet)
        {
            if (packet.SequenceNumber == _lastSequenceNumber)
            {
                _recentPacketsDuplicated++;
                if (++_totalPacketsDuplicated % 10 == 0)
                {
                    ClientLogger.Debug("{0} packets duplicated. Bizarre.", _totalPacketsDuplicated);
                }
            }
            else if (packet.SequenceNumber < _lastSequenceNumber && _lastSequenceNumber < ushort.MaxValue)
            {
                _recentPacketsOutOfOrder++;
                if (++_totalPacketsOutOfOrder % 10 == 0)
                {
                    ClientLogger.Debug("{0} packets received out of order.", _totalPacketsOutOfOrder);
                }
            }
            _lastSequenceNumber = packet.SequenceNumber;

            if (++_totalFramesReceived % 200 == 0)
            {
                _duplicateSequenceNumbers.Update(_recentPacketsDuplicated);
                _recentPacketsDuplicated = 0;
                _recentPacketsOutOfOrder = 0;
            }
        }
 public void LogAudioInputQueueStatus(Queue <ByteStream> queue)
 {
     if (queue.Count > 5)
     {
         ClientLogger.Debug(
             "The audio input queue has {0} members; this is larger than we should probably have.", queue.Count);
     }
 }
Exemple #30
0
 public void Connect(string roomId, Action <Exception> callback = null)
 {
     ClientLogger.Debug("TimingMediaConnection.Connect() called");
     IsConnected = true;
     if (callback != null)
     {
         callback(null);
     }
 }