// konoa modified. public Bitmap GetBitmap() { Bitmap retval; using (Surface backbuffer = device.GetBackBuffer(0, 0, BackBufferType.Mono)) using (GraphicsStream gStr = SurfaceLoader.SaveToStream(ImageFileFormat.Bmp, backbuffer)) { retval = (Bitmap)Bitmap.FromStream(gStr); } return(retval); }
public Image Screenshot(ImageFileFormat format) { Surface backbuffer = device.GetBackBuffer(0, 0, BackBufferType.Mono); System.IO.Stream s = SurfaceLoader.SaveToStream(format, backbuffer); Image i = Image.FromStream(s); backbuffer.Dispose(); return(i); }
void lightPreviewControl2_OnNewPreview(object sender, EventArgs e) { // put texture onto side image preview Texture tex = (Texture)sender; GraphicsStream stream = SurfaceLoader.SaveToStream(ImageFileFormat.Bmp, tex.GetSurfaceLevel(0)); Image img = Bitmap.FromStream(stream); pictureBox7.SizeMode = PictureBoxSizeMode.StretchImage; pictureBox7.Image = img; tex.Dispose(); }
public Bitmap SaveToBitmap(Device device) { using (Surface backbuffer = device.GetBackBuffer(0, 0, BackBufferType.Mono)) { if (bufferSurface == null) { CreateBufferSurface(device, backbuffer.Description); } device.GetRenderTargetData(backbuffer, bufferSurface); } using (GraphicsStream gStr = SurfaceLoader.SaveToStream(ImageFileFormat.Bmp, bufferSurface)) return((Bitmap)Bitmap.FromStream(gStr)); }
private void использоватьGPUToolStripMenuItem_Click(object sender, EventArgs e) { if (pictureBox1.Image == null) { return; } Surface newbb = dr.CreateRenderTarget(bmhm.Width, bmhm.Height, Format.A8R8G8B8, MultiSampleType.None, 0, true); Surface oldbb = dr.GetRenderTarget(0); dr.SetRenderTarget(0, newbb); dr.BeginScene(); dr.Clear(ClearFlags.Target, Color.Black, 1f, 0); //Очистка фона eff.SetValue("w", Matrix.Scaling(2.0f, 2.0f, 2.0f)); eff.SetValue("v", Matrix.RotationX(0)); eff.SetValue("p", Matrix.RotationX(0)); eff.SetValue("txh", txh); eff.SetValue("h", himax); eff.Begin(FX.None); eff.BeginPass(2); dr.VertexFormat = CustomVertex.PositionNormalTextured.Format; dr.DrawUserPrimitives(PrimitiveType.TriangleFan, 2, dp); eff.EndPass(); eff.End(); dr.EndScene(); if (pictureBox2.Image != null) { pictureBox2.Image.Dispose(); } if (gstr != null) { gstr.Dispose(); } gstr = SurfaceLoader.SaveToStream(ImageFileFormat.Bmp, newbb); if (bmnm != null) { bmnm.Dispose(); } bmnm = (Bitmap)Image.FromStream(gstr); pictureBox2.Image = (Image)bmnm; hdv = (float)bmnm.Width / (float)bmnm.Height; if (txn != null) { txn.Dispose(); } txn = Texture.FromBitmap(dr, bmnm, Usage.None, Pool.Managed); dr.SetRenderTarget(0, oldbb); newbb.Dispose(); oldbb.Dispose(); }
/// <summary> /// Grabs the next frame of video obtained /// from the VMR9 and return it as an RGB image /// </summary> /// <returns>Returns null on failure or a Bitmap object</returns> public Bitmap GetCurrentImage() { try { //Log.Debug("GetCurrentImage called"); lock (grabNotifier) { grabSucceeded = false; grabSample = true; if (!Monitor.Wait(grabNotifier, 500)) { Log.Debug("FrameGrabber: Timed-out waiting for grabbed frame!"); return(null); } if (grabSucceeded) { using (GraphicsStream stream = SurfaceLoader.SaveToStream(ImageFileFormat.Bmp, rgbSurface)) { Bitmap b = new Bitmap(Image.FromStream(stream)); // IMPORTANT: Closes and disposes the stream // If this is not done we get a memory leak! stream.Close(); return(b); } } Log.Debug("FrameGrabber: Frame grab failed"); return(null); } } catch (Exception e) // Can occur for example if the video device is lost { Log.Debug(e.ToString()); return(null); } }
private void ExportThread() { //create timer so we can check how long it takes to create the whole file Stopwatch stopwatch = new Stopwatch(); // Begin timing stopwatch.Start(); //for (int i = 0; i < _refLibrary.Count; i++) for (int i = Convert.ToInt16(text_test_framemin.Text); i < Convert.ToInt16(text_test_framemax.Text); i++) { showRefImage = false; //turns off the ref image while we render the new library nud_frame.Value = i; //just setting this value and then calling Render() is enough to cycle through the frames backgroundColour = Color.Black; //set the background to black so the crop code can detect the blank areas and remove pure black pixels from the image, if set to any other colour the image will have a solid colour background and not become transparent UpdateTextBoxes(); //this will set the _selectedRefImage for the current frame Render(); #region Save Snapshot of the Directx panel try { Surface backbuffer = device.GetBackBuffer(0, 0, BackBufferType.Mono); //SurfaceLoader.Save("Screenshot.bmp", ImageFileFormat.Bmp, backbuffer); //saves file (to test it is working) //if you continue to Render the DirectX panel while updating the GraphicsStream it causes the device to be lost and crashes //so I just call Render(); when I need to update the panel instead of using the infinite loop thread #region Graphics Lock GraphicsStream gs = backbuffer.LockRectangle(LockFlags.Discard); gs.Position = 0; //set start position int bytesPerPixel = 4; int currentPosition = 0; int heightInPixels = 250; int widthInPixels = 250; //Crop offsets int XMin = 249; int XMax = 0; int YMin = 249; int YMax = 0; for (int y = 0; y < heightInPixels; y++) { //couldn't use the Parallel.For loop as it would overwrite the GraphicsStream.Position because it was working on two or //more pixels at once causing a speckled effect as it misses pixels (well I think that is what was causing the errors I was having) //Parallel.For(0, heightInPixels, y => //{ //int currentLine = (y * ((widthInPixels * bytesPerPixel) + (4 *12))); //4*12 is how many pixels (12 for 500pixel image) was missing from each row (not sure why 12 but I guess the backbuffer extends beyond the screen) int currentLine = (y * (widthInPixels * bytesPerPixel) + (y * (4 * 6))); //4*6 is how many pixels (6 for 250pixel image) was missing from each row (not sure why 6 but I guess the backbuffer extends beyond the screen) for (int x = 0; x < widthInPixels; x++) { byte[] bu = new byte[4]; //new byte to store current pixel data currentPosition = currentLine + (x * bytesPerPixel); //calculate current position gs.Position = currentPosition; //set pixel position in GraphicsStream gs.Read(bu, 0, 4); //read image pixel data //gets RGB values int r = bu[2]; int g = bu[1]; int b = bu[0]; Color c = Color.FromArgb(r, g, b); if (c.R != backgroundColour.R && c.G != backgroundColour.G && c.B != backgroundColour.B) //if not the same as backgroundColour set the min/max values { if (XMin > x) { XMin = x; } if (XMax < x) { XMax = x; } if (YMin > y) { YMin = y; } if (YMax < y) { YMax = y; } } //if (y == YMin) //this is a way to show where the image is cropped //{ // bu[2] = 255; // bu[1] = 0; // bu[0] = 0; //} gs.Position = currentPosition; //sets the position back to the starting pixel gs.Write(bu, 0, 4); //updates the GraphicsStream with the new changes } //}); //end of Parallel.For loop } //Shows the detected bounds of the image for testing //MessageBox.Show("XMin: " + XMin // + Environment.NewLine + "XMax: " + XMax // + Environment.NewLine + "YMin: " + YMin // + Environment.NewLine + "YMax: " + YMax // + Environment.NewLine + "Width: " + (XMax - XMin) // + Environment.NewLine + "Height: " + (YMax - YMin)); backbuffer.UnlockRectangle(); gs.Dispose(); #endregion Bitmap Preview = new Bitmap(SurfaceLoader.SaveToStream(ImageFileFormat.Bmp, backbuffer)); #region crop //create target image and draw cropped part of the Preview image to the target image Bitmap target = new Bitmap((XMax - XMin) + 1, (YMax - YMin) + 1, PixelFormat.Format32bppArgb); using (Graphics g = Graphics.FromImage(target)) { g.DrawImage(Preview, new RectangleF(0, 0, target.Width, target.Height), new RectangleF(XMin, YMin, (XMax - XMin) + 1, (YMax - YMin) + 1), GraphicsUnit.Pixel); } #endregion //Add Image and offsets to the _library _library.AddImage(target, (short)(XMin - characterGlobalOffsetX), (short)(YMin - characterGlobalOffsetY)); //target.Save("Test1.PNG", ImageFormat.Png); //testing screen capture and crop image from directx works by saving to file target.Dispose(); Preview.Dispose(); backbuffer.Dispose(); } catch (Direct3DXException ee) //Display error Messages with the DirectX code { MessageBox.Show("Message: " + ee.Message + Environment.NewLine + "ErrorString: " + ee.ErrorString + Environment.NewLine + "ErrorCode: " + ee.ErrorCode + Environment.NewLine + "StackTrace: " + ee.StackTrace ); } #endregion toolStripProgressBar.Value = i + 1; } //save file as normal .lib (true = reference file, false = normal .lib) _library.Save(false); stopwatch.Stop(); MessageBox.Show(string.Format("Time Taken: {0:n0} Seconds", stopwatch.Elapsed.TotalMilliseconds / 1000)); toolStripProgressBar.Value = 0; showRefImage = true; nud_frame.Value = 0; Render(); }
/// <summary> /// Grabs the next frame of video obtained /// from the VMR9 and return it as an RGB image /// </summary> /// <returns>Returns null on failure or a Bitmap object</returns> public Bitmap GetCurrentImage() { try { //Log.Debug("GetCurrentImage called"); if (GUIGraphicsContext.VideoRenderer == GUIGraphicsContext.VideoRendererType.madVR && GUIGraphicsContext.Vmr9Active && !FrameGrabberD3D9Enable) { lock (grabNotifier) { if (VMR9Util.g_vmr9 != null) { VMR9Util.g_vmr9.MadVrGrabCurrentFrame(); try { if (FrameResult != null) { FrameResult.SafeDispose(); FrameResult = null; } if (GUIGraphicsContext.madVRCurrentFrameBitmap != null) { FrameResult = new Bitmap(GUIGraphicsContext.madVRCurrentFrameBitmap); return(FrameResult); } } catch { Log.Debug("FrameGrabber: Frame grab catch failed for madVR"); return(null); // When Bitmap is not yet ready } } //////// Part of code used for D3D9 setting in madVR //////lock (grabNotifier) //////{ ////// grabSucceeded = false; ////// grabSample = true; ////// if (!Monitor.Wait(grabNotifier, 500)) ////// { ////// Log.Debug("FrameGrabber: Timed-out waiting for grabbed frame!"); ////// return null; ////// } ////// if (grabSucceeded) ////// { ////// try ////// { ////// if (FrameResult != null) ////// { ////// FrameResult.SafeDispose(); ////// FrameResult = null; ////// } ////// if (GUIGraphicsContext.madVRFrameBitmap != null) ////// { ////// FrameResult = new Bitmap(GUIGraphicsContext.madVRFrameBitmap); ////// return FrameResult; ////// } ////// } ////// catch ////// { ////// Log.Debug("FrameGrabber: Frame grab catch failed for madVR"); ////// return null; ////// // When Bitmap is not yet ready ////// } ////// } //////} } // Bitmap not ready return null Log.Debug("FrameGrabber: Frame grab failed for madVR"); return(null); } if (GUIGraphicsContext.VideoRenderer == GUIGraphicsContext.VideoRendererType.madVR && GUIGraphicsContext.Vmr9Active) { Surface backbuffer = null; Bitmap b = null; try { backbuffer = GUIGraphicsContext.DX9DeviceMadVr.GetBackBuffer(0, 0, BackBufferType.Mono); using (var stream = SurfaceLoader.SaveToStream(ImageFileFormat.Bmp, backbuffer)) { b = new Bitmap(Image.FromStream(stream)); // IMPORTANT: Closes and disposes the stream // If this is not done we get a memory leak! stream.Close(); stream.Dispose(); backbuffer.Dispose(); return(b); } } catch (Exception) { backbuffer?.Dispose(); b?.Dispose(); Log.Debug("FrameGrabber: Timed-out waiting for grabbed frame!"); } } else { lock (grabNotifier) { grabSucceeded = false; grabSample = true; if (!Monitor.Wait(grabNotifier, 500)) { Log.Debug("FrameGrabber: Timed-out waiting for grabbed frame!"); return(null); } if (grabSucceeded) { using (GraphicsStream stream = SurfaceLoader.SaveToStream(ImageFileFormat.Bmp, rgbSurface)) { Bitmap b = new Bitmap(Image.FromStream(stream)); // IMPORTANT: Closes and disposes the stream // If this is not done we get a memory leak! stream.Close(); return(b); } } Log.Debug("FrameGrabber: Frame grab failed"); return(null); } } } catch (Exception e) // Can occur for example if the video device is lost { Log.Debug(e.ToString()); return(null); } // Not image grabbed return(null); }
/// <summary> /// Grabs the next frame of video obtained /// from the VMR9 and return it as an RGB image /// </summary> /// <returns>Returns null on failure or a Bitmap object</returns> public Bitmap GetCurrentImage() { lock (grabFrame) { try { //Log.Debug("GetCurrentImage called"); if (GUIGraphicsContext.VideoRenderer == GUIGraphicsContext.VideoRendererType.madVR && GUIGraphicsContext.Vmr9Active) { lock (grabNotifier) { if (VMR9Util.g_vmr9?._syncRoot != null) { lock (VMR9Util.g_vmr9?._syncRoot) { if (VMR9Util.g_vmr9 != null && !VMR9Util.g_vmr9._exitThread) { try { if (FrameResult != null) { FrameResult.SafeDispose(); FrameResult = null; } // Grab frame //VMR9Util.g_vmr9.GrabCurrentFrame(); // Using C# WIP VMR9Util.g_vmr9.MadVrGrabCurrentFrame(); if (GUIGraphicsContext.madVRCurrentFrameBitmap != null) { #if DEBUG string directory = string.Format("{0}\\MediaPortal Screenshots\\{1:0000}-{2:00}-{3:00}", Environment.GetFolderPath(Environment.SpecialFolder.MyPictures), DateTime.Now.Year, DateTime.Now.Month, DateTime.Now.Day); if (!Directory.Exists(directory)) { Log.Info("GetCurrentImage: Taking screenshot - Creating directory: {0}", directory); Directory.CreateDirectory(directory); } string fileName = string.Format("{0}\\madVR - {1:00}-{2:00}-{3:00}-{4:000}", directory, DateTime.Now.Hour, DateTime.Now.Minute, DateTime.Now.Second, DateTime.Now.Millisecond); #endif FrameResult = new Bitmap(GUIGraphicsContext.madVRCurrentFrameBitmap); #if DEBUG // Need to be commented out for saving screenshot frame //FrameResult.Save(fileName + ".jpg", ImageFormat.Jpeg); #endif return(FrameResult); } // Bitmap not ready return null Log.Debug("FrameGrabber: Frame not ready for madVR"); return(null); } catch { Log.Debug("FrameGrabber: Frame grab catch failed for madVR"); return(null); // When Bitmap is not yet ready } } } } //////// Part of code used for D3D9 setting in madVR //////lock (grabNotifier) //////{ ////// grabSucceeded = false; ////// grabSample = true; ////// if (!Monitor.Wait(grabNotifier, 500)) ////// { ////// Log.Debug("FrameGrabber: Timed-out waiting for grabbed frame!"); ////// return null; ////// } ////// if (grabSucceeded) ////// { ////// try ////// { ////// if (FrameResult != null) ////// { ////// FrameResult.SafeDispose(); ////// FrameResult = null; ////// } ////// if (GUIGraphicsContext.madVRFrameBitmap != null) ////// { ////// FrameResult = new Bitmap(GUIGraphicsContext.madVRFrameBitmap); ////// return FrameResult; ////// } ////// } ////// catch ////// { ////// Log.Debug("FrameGrabber: Frame grab catch failed for madVR"); ////// return null; ////// // When Bitmap is not yet ready ////// } ////// } //////} } // Bitmap not ready return null Log.Debug("FrameGrabber: Frame grab failed for madVR"); return(null); } //// This code is used only for D3D9 so comment it for now //if (GUIGraphicsContext.VideoRenderer == GUIGraphicsContext.VideoRendererType.madVR && // GUIGraphicsContext.Vmr9Active && FrameGrabberD3D9Enable) //{ // Surface backbuffer = null; // Bitmap b = null; // try // { // backbuffer = GUIGraphicsContext.DX9DeviceMadVr.GetBackBuffer(0, 0, BackBufferType.Mono); // using (var stream = SurfaceLoader.SaveToStream(ImageFileFormat.Bmp, backbuffer)) // { // b = new Bitmap(Image.FromStream(stream)); // // IMPORTANT: Closes and disposes the stream // // If this is not done we get a memory leak! // stream.Close(); // stream.Dispose(); // backbuffer.Dispose(); // return b; // } // } // catch (Exception) // { // backbuffer?.Dispose(); // b?.Dispose(); // Log.Debug("FrameGrabber: Timed-out waiting for grabbed frame!"); // } //} else if (GUIGraphicsContext.VideoRenderer != GUIGraphicsContext.VideoRendererType.madVR) // used for EVR { lock (grabNotifier) { grabSucceeded = false; grabSample = true; if (!Monitor.Wait(grabNotifier, 500)) { Log.Debug("FrameGrabber: Timed-out waiting for grabbed frame!"); return(null); } if (grabSucceeded) { using (GraphicsStream stream = SurfaceLoader.SaveToStream(ImageFileFormat.Bmp, rgbSurface)) { Bitmap b = new Bitmap(Image.FromStream(stream)); // IMPORTANT: Closes and disposes the stream // If this is not done we get a memory leak! stream.Close(); return(b); } } Log.Debug("FrameGrabber: Frame grab failed"); return(null); } } else { Log.Debug("FrameGrabber: Frame grab failed"); return(null); } } catch (Exception e) // Can occur for example if the video device is lost { Log.Debug(e.ToString()); return(null); } // Not image grabbed return(null); } }