/// <summary/> public static void ForceDpiOnBrush(ref ImageBrush original) { BitmapSource originalBitmap = original.ImageSource as BitmapSource; // we want to unify all source images to 96.0 dpi if (originalBitmap != null && originalBitmap.DpiX == 96.0 && originalBitmap.DpiY == 96.0) { return; } // we are assuming BGRA 32 bit color - assert that here if (originalBitmap.Format != PixelFormats.Bgra32) { originalBitmap = new FormatConvertedBitmap(originalBitmap, PixelFormats.Bgra32, null, 0); } // get the raw bitmap form the original brush int width = originalBitmap.PixelWidth; int height = originalBitmap.PixelHeight; byte[] bitmap = new byte[width * height * 4]; originalBitmap.CopyPixels(bitmap, width * 4, 0); // Create image data for new brush, forcing 96 DPI BitmapSource image = BitmapSource.Create( width, height, 96, 96, // FORCE DPI here PixelFormats.Bgra32, null, bitmap, width * 4); // reset imagedata original.ImageSource = image; }
private void CreateImage( ) { Width = _Width; Height = _Height; byte[] bPixels = new byte[Math.Abs(_Stride) * _Height]; for (int i = 0; i < bPixels.Length; i += 3) { bPixels[i + 2] = ( byte )(i / _Stride); bPixels[i + 1] = ( byte )((i % _Stride) / 3); bPixels[i] = _R; } _Image = BitmapSource.Create( _Width, _Height, 0, 0, PixelFormats.Rgb24, null, bPixels, _Stride); }
void nui_ColorFrameReady(object sender, ColorImageFrameReadyEventArgs e) { //colorstream에 추가한 이벤트 핸들러 ColorImageFrame ImageParam = e.OpenColorImageFrame(); //현재 인식한 이미지(카메라로 보이는 화면)을 ImageParm에 저장 if (ImageParam == null) { return; } //인식된 이미지가 없으면 사람이 보이지도 않으므로 종료 byte[] ImageBits = new byte[ImageParam.PixelDataLength]; //각각의 frame을 저장 > 보여주는 화면은 정지된 이미지가 아닌 움직이는 화면이니까 ImageParam.CopyPixelDataTo(ImageBits); BitmapSource src = null; //화면에 보여줄 이미지 폭과 너비, 가로, 세로, 어떤 픽셀 형식을 쓸건지 등등 저장할 변수 src = BitmapSource.Create(ImageParam.Width, ImageParam.Height, 96, 96, PixelFormats.Bgr32, null, ImageBits, ImageParam.Width * ImageParam.BytesPerPixel); //변수에 화면에 보여줄 이미지 정보를 넣는다 user_img.Source = src; //화면을 화면에 보여줄 이미지를 저장한 변수로 지정 >> 변수 정보가 화면에 뿌려짐 }
private Image CreateImage(IImage image) { var pinnedArray = GCHandle.Alloc(image.Data, GCHandleType.Pinned); var source = BitmapSource.Create(image.Width, image.Height, 96.0, 96.0, PixelFormat(image), null, pinnedArray.AddrOfPinnedObject(), image.DataLen, image.Stride); _gcHandle = pinnedArray; return(new Image { Source = source, Width = image.Width, Height = image.Height, MaxHeight = image.Height, MaxWidth = image.Width }); }
protected override void Show(IDialogVisualizerService windowService, IVisualizerObjectProvider objectProvider) { if (windowService == null) { throw new ArgumentNullException("windowService"); } if (objectProvider == null) { throw new ArgumentNullException("objectProvider"); } //BitmapSourceをBitmapへ変換 var data = (MyProxy)objectProvider.GetObject(); OriginBitmapSource = BitmapSource.Create(data.Width, data.Height, 96, 96, PixelFormats.Bgra32, null, data.Pixels, data.Stride); OriginBitmap = new Bitmap(data.Width, data.Height, System.Drawing.Imaging.PixelFormat.Format32bppArgb); var bmpData = OriginBitmap.LockBits(new Rectangle(0, 0, OriginBitmap.Width, OriginBitmap.Height), ImageLockMode.ReadOnly, OriginBitmap.PixelFormat); OriginBitmapSource.CopyPixels(Int32Rect.Empty, bmpData.Scan0, bmpData.Height * bmpData.Stride, bmpData.Stride); OriginBitmap.UnlockBits(bmpData); //this.OriginBitmap = (Bitmap)objectProvider.GetObject(); //Form作成表示 using (MyForm = new Form()) { //FormにボタンとかPictureBox追加 AddToolStrip(); MyForm.Text = "BitmapSourceVisualizer"; MyForm.BackColor = System.Drawing.Color.White; MyForm.Width = Screen.PrimaryScreen.Bounds.Width / 2; MyForm.Height = Screen.PrimaryScreen.Bounds.Height / 2; windowService.ShowDialog(MyForm); } //this.OriginBitmap.Dispose(); }
static protected void SaveAsJpeg(byte[] pixs, Rect rect, string file, string keyword) { int pixCount = (int)(rect.Width * rect.Height); if (pixCount <= 0) { return; } int dataPerPix = pixs.Length / pixCount; int stride = (int)rect.Width * _dataBytePerPixel; BitmapSource image = BitmapSource.Create((int)rect.Width, (int)rect.Height, _defaultDPI, _defaultDPI, _bitmap.Format, null, pixs, stride); JpegBitmapEncoder encoder = new JpegBitmapEncoder(); encoder.QualityLevel = 50; encoder.Frames.Add(BitmapFrame.Create(image)); //if (null != keyword) //{ // List<string> keywordList = new List<string>(); // keywordList.Add(keyword); // System.Collections.ObjectModel.ReadOnlyCollection<string> ll = new System.Collections.ObjectModel.ReadOnlyCollection<string>(keywordList); // BitmapMetadata meta = new BitmapMetadata("jpg"); // meta.Keywords = ll; // encoder.Metadata = meta; //} using (FileStream fs = new FileStream(file, FileMode.Create)) { encoder.Save(fs); } }
public override void OnToolWindowCreated() { base.OnToolWindowCreated(); IVsWindowFrame windowFrame = base.Frame as IVsWindowFrame; if (Icon != null && windowFrame != null) { var icon = Icon; if (icon.PixelWidth == TabImageWidth && icon.PixelHeight == TabImageHeight) { int stride = icon.Format.BitsPerPixel * icon.PixelWidth; byte[] pixels = new byte[stride * icon.PixelHeight]; icon.CopyPixels(pixels, stride, 0); icon = BitmapSource.Create(16, 16, 96.0, 96.0, icon.Format, null, pixels, stride); windowFrame.SetProperty((int)__VSFPROPID4.VSFPROPID_TabImage, icon); } else { Trace.WriteLine(string.Format("The icon for the {0} window could not be used because it was not {1}x{2}px.", Caption, TabImageWidth, TabImageHeight)); } } }
/// <summary> /// Function to create the bitmap of the rotated image /// </summary> private void UpdatePaddedImage() { int bitsPerPixel = 24; int stride = (SquareWidth * bitsPerPixel + 7) / 8; if (pixelsToWrite != null) { Array.Clear(pixelsToWrite, 0, pixelsToWrite.Length); } pixelsToWrite = new byte[stride * SquareHeight]; int i1; for (int i = 0; i < pixelsToWrite.Count(); i += 3) { i1 = i / 3; pixelsToWrite[i] = Pixels8RotatedRed[i1]; pixelsToWrite[i + 1] = Pixels8RotatedGreen[i1]; pixelsToWrite[i + 2] = Pixels8RotatedBlue[i1]; } RotatedImage = BitmapSource.Create(SquareWidth, SquareHeight, 96, 96, PixelFormats.Rgb24, null, pixelsToWrite, stride); }
public void renderFrame() { // Draws a gradient over the back, mostly just testing if viewport works for (int i = 0; i < frameBuffer.Length; i++) { float multiplier = (float)i / frameBuffer.Length; if (i % 4 < 3) { frameBuffer[i] = (byte)(0xff * multiplier); } else if (i % 4 == 3) { frameBuffer[i] = 0xff; } } doRayCast(); /* * Trace.WriteLine(frameBuffer[frameHeight / 2 * frameStride + 0]); * Trace.WriteLine(frameBuffer[frameHeight / 2 * frameStride + 1]); * Trace.WriteLine(frameBuffer[frameHeight / 2 * frameStride + 2]); * Trace.WriteLine(frameBuffer[frameHeight / 2 * frameStride + 3]); */ // Write the buffer to a bitmapsource object frame = BitmapSource.Create(frameWidth, frameHeight, 96, 96, PixelFormats.Bgra32, null, frameBuffer, frameStride); frame.Freeze(); frameCount++; // Send the frame to the ui viewPort.Dispatcher.Invoke( new rayCasterDelegate(setViewPort) ); }
public BitmapSource GetBitmapSource(int width, int height, int channels, PixelFormat pixelFormat) { int size = width * height; int rawStride = (width * pixelFormat.BitsPerPixel + 7) / 8; // should be the same value as channels * width byte[] rawImage = new byte[rawStride * height]; for (int c = 0; c < channels; c++) { for (int i = 0; i < size; i++) { rawImage[(i * channels) + c] = Image[i + (c * size)]; } } BitmapSource output = BitmapSource.Create(width, height, 96, 96, pixelFormat, null, rawImage, rawStride); if (output.CanFreeze) { output.Freeze(); } return(output); }
private BitmapSource ConvertBitmapInBitmapSource(Bitmap bitmap) { var bitmapData = bitmap.LockBits( new Rectangle(0, 0, bitmap.Width, bitmap.Height), ImageLockMode.ReadOnly, bitmap.PixelFormat); BitmapPalette palette = null; Color[] pal = null; if (bitmap.Palette != null && bitmap.Palette.Entries != null && bitmap.Palette.Entries.Length > 0) { pal = bitmap.Palette.Entries.Select(e => Color.FromArgb(e.A, e.R, e.G, e.B)).ToArray(); palette = new BitmapPalette(pal); } var bitmapSource = BitmapSource.Create( bitmapData.Width, bitmapData.Height, 96, 96, bitmap.PixelFormat.Convert(), palette, bitmapData.Scan0, bitmapData.Stride * bitmapData.Height, bitmapData.Stride); bitmap.UnlockBits(bitmapData); return(bitmapSource); }
static Image createImage(TestResultBitmap bitmap) { if (bitmap.Width <= 0 || bitmap.Height <= 0) { return(new Image()); } var bs = BitmapSource.Create( bitmap.Width, bitmap.Height, 96, 96, PixelFormats.Pbgra32, null, bitmap.Data, bitmap.Width * 4); var image = new Image { Source = bs }; return(image); }
private void GetField(Action <int, int, double[]> renderCode) { var field = new double[fieldWidth * fieldHeight]; Parallel.For(0, fieldWidth, x => { for (int y = 0; y < fieldHeight; y++) { renderCode(x, y, field); } }); var gradField = GetGradient(field); var mainImage = GetImage(field); var gradImage = GetImage(gradField); Dispatcher.Invoke(new Action(() => { MagnitudeImage.Source = BitmapSource.Create(fieldWidth, fieldHeight, 20, 20, PixelFormats.Rgb24, null, mainImage, fieldWidth * 3); GradientImage.Source = BitmapSource.Create(fieldWidth, fieldHeight, 20, 20, PixelFormats.Rgb24, null, gradImage, fieldWidth * 3); })); }
/// <summary> /// Replace the image with one whose pixels are given in the /// nRows x nCols x 4 array of bytes called "pixelArray"; the data at pixels[r, c, *] /// are the R, G, B, and A values for the pixel, stored as values from 0 to 255. /// </summary> public void SetPixelArray(byte[, ,] pixelArray, int nRows, int nCols) { int stride = nCols * 4; int size = nRows * stride; byte[] pixels = new byte[size]; for (int y = 0; y < nRows; ++y) { for (int x = 0; x < nCols; ++x) { // Swap our RGBA format into MSFT BGRA format: for (int i = 0; i < 3; i++) { pixels[y * stride + x * 4 + i] = pixelArray[y, x, 2 - i]; } pixels[y * stride + x * 4 + 3] = pixelArray[y, x, 3]; } } bmpSource = BitmapSource.Create(nCols, nRows, dpi, dpi, PixelFormats.Bgra32, null, pixels, stride); Source = bmpSource; this.Width = bmpSource.Width; this.Height = bmpSource.Height; }
public void ToDepthBitmapQ(int width, int height, ushort minDepth, ushort maxDepth, ushort[] depthData) { byte[] pixelData = new byte[width * height * (PixelFormats.Bgr32.BitsPerPixel + 7) / 8]; int colorIndex = 0; for (int depthIndex = 0; depthIndex < depthData.Length; ++depthIndex) { ushort depth = depthData[depthIndex]; byte intensity = (byte)(depth >= minDepth && depth <= maxDepth ? depth : 0); pixelData[colorIndex++] = intensity; // Blue pixelData[colorIndex++] = intensity; // Green pixelData[colorIndex++] = intensity; // Red ++colorIndex; } var format = PixelFormats.Bgr32; int stride = width * format.BitsPerPixel / 8; BitmapSource temp = BitmapSource.Create(width, height, 96, 96, format, null, pixelData, stride); temp.Freeze(); MainWindow.DepthConcurrentQueue.Enqueue(temp); }
public void LoadFromBufferRGBA() { double dpi = 96; int width = 128; int height = 128; byte[] pixelData = new byte[width * height * 4]; for (int y = 0; y < height; ++y) { int yIndex = y * width * 4; for (int x = 0; x < width; ++x) { int xIndex = x * 4; pixelData[xIndex++ + yIndex] = (byte)_blue; pixelData[xIndex++ + yIndex] = (byte)_green; pixelData[xIndex++ + yIndex] = (byte)_red; pixelData[xIndex++ + yIndex] = (byte)_alpha; } } ImageSource = BitmapSource.Create(width, height, dpi, dpi, PixelFormats.Pbgra32, null, pixelData, width * 4); }
public static BitmapSource Convert(Color[,] inData) { int stride = inData.GetLength(0) * 4, size = inData.GetLength(1) * stride; byte[] pixels = new byte[size]; for (int y = 0; y < inData.GetLength(1); ++y) { for (int x = 0; x < inData.GetLength(0); ++x) { int index = (y * stride) + (4 * x); //Using 32-bit BGRA pixels. pixels[index] = inData[x, y].B; pixels[index + 1] = inData[x, y].G; pixels[index + 2] = inData[x, y].R; pixels[index + 3] = inData[x, y].A; } } return(BitmapSource.Create(inData.GetLength(0), inData.GetLength(1), 96.0, 96.0, PixelFormats.Bgra32, null, pixels, stride)); }
private void storeImage(string savePath, byte[,] imageData) { int width = imageData.GetLength(1); int height = imageData.GetLength(0); byte[] pixels = new byte[width * height]; for (int j = 0; j < height; j++) { for (int i = 0; i < width; i++) { pixels[(width * j) + i] = imageData[j, i]; } } // Define the image palette BitmapPalette myPalette = BitmapPalettes.Gray256Transparent; // Creates a new empty image with the pre-defined palette BitmapSource image = BitmapSource.Create( width, height, 96, 96, PixelFormats.Indexed8, myPalette, pixels, width); FileStream stream = new FileStream(savePath + ".png", FileMode.Create); PngBitmapEncoder encoder = new PngBitmapEncoder(); encoder.Interlace = PngInterlaceOption.On; encoder.Frames.Add(BitmapFrame.Create(image)); encoder.Save(stream); stream.Dispose(); }
// Listing 3-7 private void CreateLighterShadesOfGray(DepthImageFrame depthFrame, short[] pixelData) { int depth; int loThreshold = 1220; int hiThreshold = 3048; short[] enhPixelData = new short[depthFrame.Width * depthFrame.Height]; for (int i = 0; i < pixelData.Length; i++) { depth = pixelData[i] >> DepthImageFrame.PlayerIndexBitmaskWidth; if (depth < loThreshold || depth > hiThreshold) { enhPixelData[i] = 0xFF; } else { enhPixelData[i] = (short)~pixelData[i]; } } EnhancedDepthImage.Source = BitmapSource.Create(depthFrame.Width, depthFrame.Height, 96, 96, PixelFormats.Gray16, null, enhPixelData, depthFrame.Width * depthFrame.BytesPerPixel); }
/* * Event which triggers when all frames are ready from the kinect * (will operate at the speed of the slowest camera) **/ private void sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e) { using (ColorImageFrame colorFrame = e.OpenColorImageFrame()) { using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) { if (depthFrame != null) { // Display an actual slice // (smaller range should only display things within that range) //Range can be tweaked via sliders on application depthFrame.depthSlice(sliceBitmap, (int)minSlider.Value, (int)maxSlider.Value); } if (colorFrame != null) { //Initializes a byte array to the size //of colorFrame's pixelDataLength byte[] colorPixels = new byte[colorFrame.PixelDataLength]; colorFrame.CopyPixelDataTo(colorPixels); //Number of pixels in a row (based on bgr32 format) int stride = colorFrame.Width * 4; /* * Creates a bitmapsource based on the colorFrame coming * from the kinect (in bgr32 using our calculated stride) * and displays it to the image frame named colorDisplay **/ colorDisplay.Source = BitmapSource.Create (colorFrame.Width, colorFrame.Height, 96, 96, PixelFormats.Bgr32, null, colorPixels, stride); } } } }
private BitmapSource Vector4ArrayToBitmapSource(Float4[,] source, float maxDepth) { int width = source.GetLength(0), height = source.GetLength(1); byte[] target = new byte[width * height]; unsafe { fixed(Float4 *sSource = &source[0, 0]) fixed(byte *sTarget = &target[0]) { Float4 *pSource = sSource; byte * pTarget = sTarget, eTarget = sTarget + width * height; float depth; while (pTarget < eTarget) { depth = pSource->Z; if (depth > maxDepth) { *pTarget = 255; } else if (depth < 0) { *pTarget = 0; } else { *pTarget = (byte)(depth * 255 / maxDepth); } pTarget++; pSource++; } } } return(BitmapSource.Create(width, height, 96d, 96d, PixelFormats.Gray8, BitmapPalettes.Gray256, target, width)); }
public static BitmapSource ToBitmapSource(this Icon icon, bool small = false) { using (var bitmap = icon.ToBitmap()) { var bitmapData = bitmap.LockBits ( new Rectangle(0, 0, bitmap.Width, bitmap.Height), ImageLockMode.ReadOnly, bitmap.PixelFormat ); var mediaPixelFormat = bitmap.PixelFormat.ToMediaPixelFormat(); var bitmapSource = BitmapSource.Create ( bitmapData.Width, bitmapData.Height, small ? bitmap.HorizontalResolution * 2 : bitmap.HorizontalResolution, small ? bitmap.VerticalResolution * 2 : bitmap.VerticalResolution, mediaPixelFormat, null, bitmapData.Scan0, bitmapData.Stride * bitmapData.Height, bitmapData.Stride ); bitmap.UnlockBits(bitmapData); return(bitmapSource); } }
void miKinect_ColorFrameReady(object sender, ColorImageFrameReadyEventArgs e) { using (ColorImageFrame frameImagen = e.OpenColorImageFrame()) { if (frameImagen == null) { return; } byte[] datosColor = new byte[frameImagen.PixelDataLength]; frameImagen.CopyPixelDataTo(datosColor); mostrarVideo.Source = BitmapSource.Create( frameImagen.Width, frameImagen.Height, 96, 96, PixelFormats.Bgr32, null, datosColor, frameImagen.Width * frameImagen.BytesPerPixel ); } }
private static BitmapSource GetImage(string fileName) { var bitmap = new BitmapImage(); bitmap.BeginInit(); bitmap.CacheOption = BitmapCacheOption.OnLoad; bitmap.CreateOptions = BitmapCreateOptions.None; bitmap.UriSource = new Uri(fileName, UriKind.Absolute); bitmap.EndInit(); bitmap.Freeze(); if (Math.Abs(bitmap.DpiX - 96.0) < 0.1 && Math.Abs(bitmap.DpiY - 96.0) < 0.1) { return(bitmap); } int stride = bitmap.PixelWidth * 4; var pixels = new byte[stride * bitmap.PixelHeight]; bitmap.CopyPixels(pixels, stride, 0); return(BitmapSource.Create(bitmap.PixelWidth, bitmap.PixelHeight, 96.0, 96.0, bitmap.Format, bitmap.Palette, pixels, stride)); }
private void UploadImage(Image img, VideoFrame frame) { Dispatcher.Invoke(new Action(() => { if (frame.Width == 0) { return; } var bytes = new byte[frame.Stride * frame.Height]; frame.CopyTo(bytes); var bs = BitmapSource.Create(frame.Width, frame.Height, 300, 300, PixelFormats.Rgb24, null, bytes, frame.Stride); var imgSrc = bs as ImageSource; img.Source = imgSrc; })); }
private void loadingImage(string label, string database) { string pathImage = Directory.GetCurrentDirectory() + "/thumbnails/" + database + "/Named_Boxarts/" + label + ".png"; picItem.BeginInit(); if (picItem.Source != null) { picItem.Source = null; } if (Controller.isCustom == false) { if (File.Exists(pathImage)) { picItem.Source = FileManagers.FileManager.LoadImage(pathImage); } else { int width = 128; int height = width; int stride = width / 8; byte[] pixels = new byte[height * stride]; picItem.Source = BitmapSource.Create( width, height, 96, 96, PixelFormats.Indexed1, new BitmapPalette(new List <Color>() { Colors.LightGray }), pixels, stride); } } picItem.EndInit(); }
/// <summary> /// GrabAsync. It is for continuous grabbing /// </summary> /// <returns></returns> public CogImage8Grey CamGrabAsync() { //네트워크가 불안정하여 로스트 패킷이 존나게 발생하는 환경에서는 오히려 독이 될 수도있다. //status = virtualFG40.IsOpenDevice(hDevice, ref pFlag); status = virtualFG40.SetCmdReg(hDevice, VirtualFG40Library.MCAM_TRIGGER_SOFTWARE); if (status != 0) { throw new Exception(); } //트리거모드 OFF 이면 아무짝에도 쓸모가 없는 소프트트리거 날리기 코드. status = virtualFG40.GrabImageAsync(hDevice, pImage, (UInt32)bufferSize, 0xFFFFFFFF); if (status != 0) { throw new Exception(); } /* * 트리거모드 OFF 상태에서 Async를 사용하면 메모리에서 바로 뽑아오기 때문에 이전 이미지가 한번 들어온다. * 트리거 모드없이 쓸꺼면 Grab Once 에서 Async 쓰지마요 */ Bitmap bitmap = new Bitmap(width, height, System.Drawing.Imaging.PixelFormat.Format24bppRgb); var bitmapData = bitmap.LockBits(new System.Drawing.Rectangle(0, 0, width, height), System.Drawing.Imaging.ImageLockMode.ReadOnly, bitmap.PixelFormat); virtualFG40.CvtColor(pImage, bitmapData.Scan0, bitmap.Width, bitmap.Height, VirtualFG40Library.CV_BayerGR2RGB); var bitmapSource = BitmapSource.Create(width, height, bitmap.HorizontalResolution, bitmap.VerticalResolution, PixelFormats.Bgr24, null, bitmapData.Scan0, bitmapData.Stride * height, bitmapData.Stride); bitmap.UnlockBits(bitmapData); CogImage8Grey cogImage = new CogImage8Grey(bitmap); return(cogImage); }
private ImageSource DataToBitmap(float[] data, int2 dims) { ImageSource Bitmap; float Min = (float)ThresholdLower; float Range = (float)(ThresholdUpper - ThresholdLower); if (Range == 0) { Range = 1e-5f; } ColorScale Palette = ColorScale == null ? new ColorScale(new[] { new float4(0, 0, 0, 1), new float4(1, 1, 1, 1) }) : ColorScale; byte[] DataBytes = new byte[data.Length * 4]; for (int y = 0; y < dims.Y; y++) { for (int x = 0; x < dims.X; x++) { float V = (data[y * dims.X + x] - Min) / Range; float4 C = Palette.GetColor(V) * 255; DataBytes[((dims.Y - 1 - y) * dims.X + x) * 4 + 3] = (byte)C.W; DataBytes[((dims.Y - 1 - y) * dims.X + x) * 4 + 2] = (byte)C.X; DataBytes[((dims.Y - 1 - y) * dims.X + x) * 4 + 1] = (byte)C.Y; DataBytes[((dims.Y - 1 - y) * dims.X + x) * 4 + 0] = (byte)C.Z; } } Bitmap = BitmapSource.Create(dims.X, dims.Y, 96, 96, PixelFormats.Bgra32, null, DataBytes, dims.X * 4); Bitmap.Freeze(); return(Bitmap); }
private ImageSource ToBitmap(ColorFrame frame) { int width = frame.FrameDescription.Width; int height = frame.FrameDescription.Height; PixelFormat format = PixelFormats.Bgr32; //why bgra? it results in faster performance byte[] pixels = new byte[width * height * ((format.BitsPerPixel + 7) / 8)]; //kinect uses bgra if (frame.RawColorImageFormat == ColorImageFormat.Bgra) { frame.CopyRawFrameDataToArray(pixels); } else { //convert the image type to bgra before converting to array frame.CopyConvertedFrameDataToArray(pixels, ColorImageFormat.Bgra); } int stride = width * format.BitsPerPixel / 8; return(BitmapSource.Create(width, height, 96, 96, format, null, pixels, stride)); }
private void UpdateColorImage(PXCMImage colorFrame) { // データを取得する PXCMImage.ImageData data; PXCMImage.ImageInfo info = colorFrame.QueryInfo(); Width = info.width; Height = info.height; pxcmStatus ret = colorFrame.AcquireAccess(PXCMImage.Access.ACCESS_READ, PXCMImage.PixelFormat.PIXEL_FORMAT_RGB24, out data); if (ret < pxcmStatus.PXCM_STATUS_NO_ERROR) { return; } // Bitmapに変換する var buffer = data.ToByteArray(0, info.width * info.height * 3); ImageColor.Source = BitmapSource.Create(info.width, info.height, 96, 96, PixelFormats.Bgr24, null, buffer, info.width * 3); // データを解放する colorFrame.ReleaseAccess(data); }