static public SoftwareBitmap Binarize(SoftwareBitmap src) { int imageSize = src.PixelWidth * src.PixelHeight * 4; var imageData = new byte[imageSize]; int width = src.PixelWidth; int height = src.PixelHeight; int graySize = width * height; byte[] grayData = new byte[graySize]; int pos = 0; unsafe { using (var buffer = src.LockBuffer(BitmapBufferAccessMode.Read)) { uint capacity; byte *dataInBytes; using (var read = buffer.CreateReference()) { ((IMemoryBufferByteAccess)read).GetBuffer(out dataInBytes, out capacity); BitmapPlaneDescription bufferLayout = buffer.GetPlaneDescription(0); for (int i = 0; i < bufferLayout.Height; i++) { for (int j = 0; j < bufferLayout.Width; j++) { var offset = bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j; byte grayScale = (byte)((dataInBytes[offset + 2] * 0.299) + (dataInBytes[offset + 1] * 0.587) + (dataInBytes[offset] * 0.114)); grayData[pos++] = grayScale; } } } } } byte threshold = (byte)otsu_th(grayData, graySize); pos = 0; var dst = new SoftwareBitmap(BitmapPixelFormat.Bgra8, width, height); unsafe { uint capacity; byte *dstDataInBytes; using (var dstBuffer = dst.LockBuffer(BitmapBufferAccessMode.Write)) using (var write = dstBuffer.CreateReference()) { { ((IMemoryBufferByteAccess)write).GetBuffer(out dstDataInBytes, out capacity); BitmapPlaneDescription wBufferLayout = dstBuffer.GetPlaneDescription(0); for (int i = 0; i < height; i++) { for (int j = 0; j < width; j++) { byte d = grayData[pos++] > threshold ? (byte)0xFF : (byte)0; int dstOffset = wBufferLayout.StartIndex + wBufferLayout.Stride * i + 4 * j; dstDataInBytes[dstOffset] = d; dstDataInBytes[dstOffset + 1] = d; dstDataInBytes[dstOffset + 2] = d; dstDataInBytes[dstOffset + 3] = 0xFF; } } } } } return(dst); }
private unsafe void GetHues(SoftwareBitmap bitmap, out double[,] hues) { // In BGRA8 format, each pixel is defined by 4 bytes const int BYTES_PER_PIXEL = 4; using (var buffer = bitmap.LockBuffer(BitmapBufferAccessMode.ReadWrite)) using (var reference = buffer.CreateReference()) { if (reference is IMemoryBufferByteAccess) { hues = new double[360, _numSoundPerGakki]; for (int i = 0; i < 360; i++) { for (int j = 0; j < _numSoundPerGakki; j++) { hues[i, j] = 0; } } // Get a pointer to the pixel buffer byte *data; uint capacity; ((IMemoryBufferByteAccess)reference).GetBuffer(out data, out capacity); // Get information about the BitmapBuffer var desc = buffer.GetPlaneDescription(0); var rowStart = (uint)((desc.Height >> 2) - 1); var rowNum = (uint)((desc.Height * 3) >> 2); var colStart = (uint)((desc.Width >> 2) - 1); var colNum = (uint)((desc.Width * 3) >> 2); // Iterate over all pixels for (uint row = rowStart; row < rowNum; row++) { for (uint col = colStart; col < colNum; col++) { // Index of the current pixel in the buffer (defined by the next 4 bytes, BGRA8) var currPixel = desc.StartIndex + desc.Stride * row + BYTES_PER_PIXEL * col; // 色合いの値を取得する。 HSVColor hsv = Models.ColorHelper.RGBtoHSV( Color.FromArgb( 255, data[currPixel + 2], // Red data[currPixel + 1], // Green data[currPixel + 0] // Blue ) ); double hue = hsv.H; double brightness = hsv.V; try { // 彩度が10%以上の場合 if ((hsv.S > _saturationThreshold) || (hsv.V > _valueThreshold)) { hues[(int)hue, (int)(brightness * 7.0 / 255.0)] += 1.0; } } catch (Exception excep) { int a = 0; } } } } else { hues = null; } } }
/// <summary> /// Event handler for video frames for the local video capture device. /// </summary> private async void FrameArrivedHandler(MediaFrameReader sender, MediaFrameArrivedEventArgs e) { if (!_isClosed) { if (!_videoFormatManager.SelectedFormat.IsEmpty() && (OnVideoSourceEncodedSample != null || OnVideoSourceRawSample != null)) { using (var mediaFrameReference = sender.TryAcquireLatestFrame()) { var videoMediaFrame = mediaFrameReference?.VideoMediaFrame; var softwareBitmap = videoMediaFrame?.SoftwareBitmap; if (softwareBitmap == null && videoMediaFrame != null) { var videoFrame = videoMediaFrame.GetVideoFrame(); softwareBitmap = await SoftwareBitmap.CreateCopyFromSurfaceAsync(videoFrame.Direct3DSurface); } if (softwareBitmap != null) { int width = softwareBitmap.PixelWidth; int height = softwareBitmap.PixelHeight; if (softwareBitmap.BitmapPixelFormat != BitmapPixelFormat.Nv12) { softwareBitmap = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Nv12, BitmapAlphaMode.Ignore); } // Swap the processed frame to _backBuffer and dispose of the unused image. softwareBitmap = Interlocked.Exchange(ref _backBuffer, softwareBitmap); using (BitmapBuffer buffer = _backBuffer.LockBuffer(BitmapBufferAccessMode.Read)) { using (var reference = buffer.CreateReference()) { unsafe { byte *dataInBytes; uint capacity; ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacity); byte[] nv12Buffer = new byte[capacity]; Marshal.Copy((IntPtr)dataInBytes, nv12Buffer, 0, (int)capacity); if (OnVideoSourceEncodedSample != null) { lock (_videoEncoder) { var encodedBuffer = _videoEncoder.EncodeVideo(width, height, nv12Buffer, EncoderInputFormat, _videoFormatManager.SelectedFormat.Codec); if (encodedBuffer != null) { uint fps = (_fpsDenominator > 0 && _fpsNumerator > 0) ? _fpsNumerator / _fpsDenominator : DEFAULT_FRAMES_PER_SECOND; uint durationRtpTS = VIDEO_SAMPLING_RATE / fps; OnVideoSourceEncodedSample.Invoke(durationRtpTS, encodedBuffer); } if (_forceKeyFrame) { _forceKeyFrame = false; } } } if (OnVideoSourceRawSample != null) { uint frameSpacing = 0; if (_lastFrameAt != DateTime.MinValue) { frameSpacing = Convert.ToUInt32(DateTime.Now.Subtract(_lastFrameAt).TotalMilliseconds); } var bgrBuffer = PixelConverter.NV12toBGR(nv12Buffer, width, height, width * 3); OnVideoSourceRawSample(frameSpacing, width, height, bgrBuffer, VideoPixelFormatsEnum.Bgr); } } } } _backBuffer?.Dispose(); softwareBitmap?.Dispose(); } _lastFrameAt = DateTime.Now; } } } }