/// <summary> /// Closes the session. /// </summary> /// <param name="reason">Reason for the closure.</param> public override void Close(string reason) { if (!_isClosed) { _isClosed = true; base.OnRtpPacketReceived -= RtpPacketReceived; _waveOutEvent?.Stop(); if (_waveInEvent != null) { _waveInEvent.DataAvailable -= LocalAudioSampleAvailable; _waveInEvent.StopRecording(); } _audioStreamTimer?.Dispose(); if (_testPatternVideoSource != null) { _testPatternVideoSource.SampleReady -= LocalVideoSampleAvailable; _testPatternVideoSource.Stop(); _testPatternVideoSource.Dispose(); } // The VPX encoder is a memory hog. _vpxDecoder.Dispose(); _imgConverter.Dispose(); _vpxEncoder?.Dispose(); _imgEncConverter?.Dispose(); base.Close(reason); } }
private static void OnVideoResolutionChangedEvent(uint width, uint height, uint stride) { try { if (_vpxEncoder == null || (_vpxEncoder.GetWidth() != width || _vpxEncoder.GetHeight() != height || _vpxEncoder.GetStride() != stride)) { _vpxEncoderReady = false; if (_vpxEncoder != null) { _vpxEncoder.Dispose(); } _vpxEncoder = new VpxEncoder(); _vpxEncoder.InitEncoder(width, height, stride); logger.LogInformation($"VPX encoder initialised with width {width}, height {height} and stride {stride}."); _vpxEncoderReady = true; } } catch (Exception excp) { logger.LogWarning("Exception MfSampleGrabber_OnVideoResolutionChangedEvent. " + excp.Message); } }
private void OnDestroy() { NetworkSender.DoIfActive((NetworkSender net) => { // クライアントから設定が届いた時 net.NetIO.GetResever <HvNetIOClientSetting>().OnSetting -= NetworkSender_OnSetting; net.NetIO.GetResever <HvNetIOClientInfo>().OnClientInfo -= VRVController_OnClientInfo; net.NetIO.OnDisconnected -= NetIO_OnDisconnected; }); _encoder.Dispose(); }
protected virtual void Dispose(bool disposing) { if (!_isDisposing) { _isDisposing = true; if (disposing) { _testPattern.Dispose(); } lock (_vpxEncoder) { // Prevent the encoder being disposed of if it's in the middle of a sample. _vpxEncoder.Dispose(); _vpxEncoder = null; } _colorConverter = null; } }
private async Task ExpireConnections() { try { logger.LogDebug("Starting expire connections thread."); byte[] encodedBuffer = null; if (File.Exists(_expiredImagePath)) { Bitmap expiredImage = expiredImage = new Bitmap(_expiredImagePath); // Get the stride. Rectangle rect = new Rectangle(0, 0, expiredImage.Width, expiredImage.Height); System.Drawing.Imaging.BitmapData bmpData = expiredImage.LockBits(rect, System.Drawing.Imaging.ImageLockMode.ReadWrite, expiredImage.PixelFormat); // Get the address of the first line. int stride = bmpData.Stride; expiredImage.UnlockBits(bmpData); // Initialise the video codec and color converter. SIPSorceryMedia.VpxEncoder vpxEncoder = new VpxEncoder(); vpxEncoder.InitEncoder((uint)expiredImage.Width, (uint)expiredImage.Height, (uint)stride); SIPSorceryMedia.ImageConvert colorConverter = new ImageConvert(); byte[] sampleBuffer = null; sampleBuffer = BitmapToRGB24(expiredImage); unsafe { fixed(byte *p = sampleBuffer) { byte[] convertedFrame = null; colorConverter.ConvertRGBtoYUV(p, VideoSubTypesEnum.BGR24, expiredImage.Width, expiredImage.Height, stride, VideoSubTypesEnum.I420, ref convertedFrame); fixed(byte *q = convertedFrame) { int encodeResult = vpxEncoder.Encode(q, convertedFrame.Length, 1, ref encodedBuffer); if (encodeResult != 0) { logger.LogWarning("VPX encode of expired image failed."); } } } } expiredImage.Dispose(); vpxEncoder.Dispose(); } while (!_exit) { foreach (var conn in _webRtcConnections.Where(x => DateTime.Now.Subtract(x.Value.CreatedAt).TotalSeconds > _connectionTimeLimitSeconds).Select(x => x.Value)) { OnMp4MediaSampleReady -= conn.SendMedia; OnTestPatternSampleReady -= conn.SendMedia; if (conn.WebRtcSession.IsDtlsNegotiationComplete && !conn.WebRtcSession.IsClosed && encodedBuffer != null) { // Send the expired frame 3 times as a crude attempt to cope with packet loss. conn.SendMedia(SDPMediaTypesEnum.video, conn.LastVideoTimeStamp + VP8_TIMESTAMP_SPACING, encodedBuffer); await Task.Delay(1); conn.SendMedia(SDPMediaTypesEnum.video, conn.LastVideoTimeStamp + VP8_TIMESTAMP_SPACING, encodedBuffer); await Task.Delay(1); conn.SendMedia(SDPMediaTypesEnum.video, conn.LastVideoTimeStamp + VP8_TIMESTAMP_SPACING, encodedBuffer); } conn.WebRtcSession.Close("expired"); } await Task.Delay(1000); } } catch (Exception excp) { logger.LogError("Exception ExpireConnections. " + excp); } }
private async void SampleTestPattern() { try { logger.LogDebug("Starting test pattern sampling thread."); _isTestPatternSampling = true; Bitmap testPattern = new Bitmap(_testPatternImagePath); // Get the stride. Rectangle rect = new Rectangle(0, 0, testPattern.Width, testPattern.Height); System.Drawing.Imaging.BitmapData bmpData = testPattern.LockBits(rect, System.Drawing.Imaging.ImageLockMode.ReadWrite, testPattern.PixelFormat); // Get the address of the first line. int stride = bmpData.Stride; testPattern.UnlockBits(bmpData); // Initialise the video codec and color converter. SIPSorceryMedia.VpxEncoder vpxEncoder = new VpxEncoder(); vpxEncoder.InitEncoder((uint)testPattern.Width, (uint)testPattern.Height, (uint)stride); SIPSorceryMedia.ImageConvert colorConverter = new ImageConvert(); byte[] sampleBuffer = null; byte[] encodedBuffer = null; int sampleCount = 0; uint rtpTimestamp = 0; while (!_exit) { if (OnTestPatternSampleReady == null) { logger.LogDebug("No active clients, test pattern sampling paused."); break; } else { var stampedTestPattern = testPattern.Clone() as System.Drawing.Image; AddTimeStampAndLocation(stampedTestPattern, DateTime.UtcNow.ToString("dd MMM yyyy HH:mm:ss:fff"), "Test Pattern"); sampleBuffer = BitmapToRGB24(stampedTestPattern as System.Drawing.Bitmap); unsafe { fixed(byte *p = sampleBuffer) { byte[] convertedFrame = null; colorConverter.ConvertRGBtoYUV(p, VideoSubTypesEnum.BGR24, testPattern.Width, testPattern.Height, stride, VideoSubTypesEnum.I420, ref convertedFrame); fixed(byte *q = convertedFrame) { int encodeResult = vpxEncoder.Encode(q, convertedFrame.Length, 1, ref encodedBuffer); if (encodeResult != 0) { logger.LogWarning("VPX encode of video sample failed."); continue; } } } stampedTestPattern.Dispose(); OnTestPatternSampleReady?.Invoke(SDPMediaTypesEnum.video, rtpTimestamp, encodedBuffer); sampleCount++; rtpTimestamp += VP8_TIMESTAMP_SPACING; } await Task.Delay(30); } } testPattern.Dispose(); vpxEncoder.Dispose(); } catch (Exception excp) { logger.LogError("Exception SampleTestPattern. " + excp); } finally { logger.LogDebug("test pattern sampling thread stopped."); _isTestPatternSampling = false; } }
/// <summary> /// Starts the Media Foundation sampling. /// </summary> unsafe private void SampleMp4Media() { try { logger.LogDebug("Starting mp4 media sampling thread."); _isMp4Sampling = true; VpxEncoder vpxEncoder = null; uint vp8Timestamp = 0; uint mulawTimestamp = 0; while (!_exit) { if (OnMp4MediaSampleReady == null) { logger.LogDebug("No active clients, media sampling paused."); break; } else { byte[] sampleBuffer = null; var sample = _mediaSource.GetSample(ref sampleBuffer); if (sample != null && sample.HasVideoSample) { if (vpxEncoder == null || (vpxEncoder.GetWidth() != sample.Width || vpxEncoder.GetHeight() != sample.Height || vpxEncoder.GetStride() != sample.Stride)) { if (vpxEncoder != null) { vpxEncoder.Dispose(); } vpxEncoder = InitialiseVpxEncoder((uint)sample.Width, (uint)sample.Height, (uint)sample.Stride); } byte[] vpxEncodedBuffer = null; unsafe { fixed(byte *p = sampleBuffer) { int encodeResult = vpxEncoder.Encode(p, sampleBuffer.Length, 1, ref vpxEncodedBuffer); if (encodeResult != 0) { logger.LogWarning("VPX encode of video sample failed."); } } } OnMp4MediaSampleReady?.Invoke(SDPMediaTypesEnum.video, vp8Timestamp, vpxEncodedBuffer); //Console.WriteLine($"Video SeqNum {videoSeqNum}, timestamp {videoTimestamp}, buffer length {vpxEncodedBuffer.Length}, frame count {sampleProps.FrameCount}."); vp8Timestamp += VP8_TIMESTAMP_SPACING; } else if (sample != null && sample.HasAudioSample) { uint sampleDuration = (uint)(sampleBuffer.Length / 2); byte[] mulawSample = new byte[sampleDuration]; int sampleIndex = 0; for (int index = 0; index < sampleBuffer.Length; index += 2) { var ulawByte = MuLawEncoder.LinearToMuLawSample(BitConverter.ToInt16(sampleBuffer, index)); mulawSample[sampleIndex++] = ulawByte; } OnMp4MediaSampleReady?.Invoke(SDPMediaTypesEnum.audio, mulawTimestamp, mulawSample); //Console.WriteLine($"Audio SeqNum {audioSeqNum}, timestamp {audioTimestamp}, buffer length {mulawSample.Length}."); mulawTimestamp += sampleDuration; } } } vpxEncoder.Dispose(); } catch (Exception excp) { logger.LogWarning("Exception SampleMp4Media. " + excp.Message); } finally { logger.LogDebug("mp4 sampling thread stopped."); _isMp4Sampling = false; } }
private void SampleWebCam(MFVideoSampler videoSampler, VideoMode videoMode, CancellationTokenSource cts) { try { Thread.CurrentThread.Name = "vidsampler_" + videoMode.DeviceIndex + "_" + videoMode.Width + "_" + videoMode.Height; var vpxEncoder = new VpxEncoder(); // TODO: The last parameter passed to the vpx encoder init needs to be the frame stride not the width. vpxEncoder.InitEncoder(Convert.ToUInt32(videoMode.Width), Convert.ToUInt32(videoMode.Height), Convert.ToUInt32(videoMode.Width)); // var videoSampler = new MFVideoSampler(); //videoSampler.Init(videoMode.DeviceIndex, videoMode.Width, videoMode.Height); // videoSampler.InitFromFile(); while (!_stop && !cts.IsCancellationRequested) { byte[] videoSample = null; var sample = videoSampler.GetSample(ref videoSample); //if (result == NAudio.MediaFoundation.MediaFoundationErrors.MF_E_HW_MFT_FAILED_START_STREAMING) //{ // logger.Warn("A sample could not be acquired from the local webcam. Check that it is not already in use."); // OnLocalVideoError("A sample could not be acquired from the local webcam. Check that it is not already in use."); // break; //} //else if (result != 0) //{ // logger.Warn("A sample could not be acquired from the local webcam. Check that it is not already in use. Error code: " + result); // OnLocalVideoError("A sample could not be acquired from the local webcam. Check that it is not already in use. Error code: " + result); // break; //} //else if (sample?.HasVideoSample == true) { // This event sends the raw bitmap to the WPF UI. OnLocalVideoSampleReady?.Invoke(videoSample, videoSampler.Width, videoSampler.Height); // This event encodes the sample and forwards it to the RTP manager for network transmission. if (OnLocalVideoEncodedSampleReady != null) { IntPtr rawSamplePtr = Marshal.AllocHGlobal(videoSample.Length); Marshal.Copy(videoSample, 0, rawSamplePtr, videoSample.Length); byte[] yuv = null; unsafe { // TODO: using width instead of stride. _imageConverter.ConvertRGBtoYUV((byte *)rawSamplePtr, VideoSubTypesEnum.RGB24, Convert.ToInt32(videoMode.Width), Convert.ToInt32(videoMode.Height), Convert.ToInt32(videoMode.Width), VideoSubTypesEnum.I420, ref yuv); //_imageConverter.ConvertToI420((byte*)rawSamplePtr, VideoSubTypesEnum.RGB24, Convert.ToInt32(videoMode.Width), Convert.ToInt32(videoMode.Height), ref yuv); } Marshal.FreeHGlobal(rawSamplePtr); IntPtr yuvPtr = Marshal.AllocHGlobal(yuv.Length); Marshal.Copy(yuv, 0, yuvPtr, yuv.Length); byte[] encodedBuffer = null; unsafe { vpxEncoder.Encode((byte *)yuvPtr, yuv.Length, _encodingSample++, ref encodedBuffer); } Marshal.FreeHGlobal(yuvPtr); OnLocalVideoEncodedSampleReady(encodedBuffer); } } } videoSampler.Stop(); vpxEncoder.Dispose(); } catch (Exception excp) { logger.LogError($"Exception SampleWebCam. {excp.Message}"); } }