Beispiel #1
0
 public static unsafe void Copy(IVideoProvider src, IVideoProvider dst)
 {
     if (src.BufferWidth == dst.BufferWidth && src.BufferHeight == dst.BufferHeight)
     {
         Array.Copy(src.GetVideoBuffer(), dst.GetVideoBuffer(), src.GetVideoBuffer().Length);
     }
     else
     {
         fixed(int *srcp = src.GetVideoBuffer(), dstp = dst.GetVideoBuffer())
         {
             Blit_Any_NoFlip(new BMP
             {
                 Data   = srcp,
                 Width  = src.BufferWidth,
                 Height = src.BufferHeight
             },
                             new BMP
             {
                 Data   = dstp,
                 Width  = dst.BufferWidth,
                 Height = dst.BufferHeight
             });
         }
     }
 }
Beispiel #2
0
 private static void PopulateFramebuffer(BinaryReader br, IVideoProvider videoProvider, IQuickBmpFile quickBmpFile)
 {
     try
     {
         using (new SimpleTime("Load Framebuffer"))
         {
             quickBmpFile.Load(videoProvider, br.BaseStream);
         }
     }
     catch
     {
         var buff = videoProvider.GetVideoBuffer();
         try
         {
             for (int i = 0; i < buff.Length; i++)
             {
                 int j = br.ReadInt32();
                 buff[i] = j;
             }
         }
         catch (EndOfStreamException)
         {
         }
     }
 }
Beispiel #3
0
        public void AddFrame(IVideoProvider source)
        {
            if (source.BufferWidth != width || source.BufferHeight != height)
            {
                SetVideoParameters(source.BufferWidth, source.BufferHeight);
            }

            if (ffmpeg.HasExited)
            {
                throw new Exception("unexpected ffmpeg death:\n" + ffmpeg_geterror());
            }

            var video = source.GetVideoBuffer();

            try
            {
                muxer.WriteVideoFrame(video);
            }
            catch
            {
                System.Windows.Forms.MessageBox.Show("Exception! ffmpeg history:\n" + ffmpeg_geterror());
                throw;
            }

            // have to do binary write!
            //ffmpeg.StandardInput.BaseStream.Write(b, 0, b.Length);
        }
        public void AddFrame(IVideoProvider source)
        {
            using var bb = new BitmapBuffer(source.BufferWidth, source.BufferHeight, source.GetVideoBuffer());
            string subPath = GetAndCreatePathForFrameNum(_mCurrFrame);
            string path    = $"{subPath}.png";

            bb.ToSysdrawingBitmap().Save(path, ImageFormat.Png);
        }
 public void AddFrame(IVideoProvider source)
 {
     if (source.BufferHeight != height || source.BufferWidth != width)
     {
         SetVideoParameters(source.BufferWidth, source.BufferHeight);
     }
     current.WriteVideoFrame(source.GetVideoBuffer());
 }
Beispiel #6
0
 public VideoCopy(IVideoProvider c)
 {
     vb              = (int[])c.GetVideoBuffer().Clone();
     BufferWidth     = c.BufferWidth;
     BufferHeight    = c.BufferHeight;
     BackgroundColor = c.BackgroundColor;
     VirtualWidth    = c.VirtualWidth;
     VirtualHeight   = c.VirtualHeight;
 }
Beispiel #7
0
		public void AddFrame(IVideoProvider source)
		{
			using (var bb = new BitmapBuffer(source.BufferWidth, source.BufferHeight, source.GetVideoBuffer()))
			{
				string subpath = GetAndCreatePathForFrameNum(mCurrFrame);
				string path = subpath + ".png";
				bb.ToSysdrawingBitmap().Save(path, System.Drawing.Imaging.ImageFormat.Png);
			}
		}
Beispiel #8
0
        /// <summary>
        /// Sets the frame buffer to the given frame buffer
        /// Note: This sets the value returned by <see cref="IVideoProvider.GetVideoBuffer" />
        /// which relies on the core to send a reference to the frame buffer instead of a copy,
        /// in order to work
        /// </summary>
        public static void PopulateFromBuffer(this IVideoProvider videoProvider, int[] frameBuffer)
        {
            var b1  = frameBuffer;
            var b2  = videoProvider.GetVideoBuffer();
            int len = Math.Min(b1.Length, b2.Length);

            for (int i = 0; i < len; i++)
            {
                b2[i] = b1[i];
            }
        }
 public VideoCopy(IVideoProvider c)
 {
     _vb              = (int[])c.GetVideoBuffer().Clone();
     BufferWidth      = c.BufferWidth;
     BufferHeight     = c.BufferHeight;
     BackgroundColor  = c.BackgroundColor;
     VirtualWidth     = c.VirtualWidth;
     VirtualHeight    = c.VirtualHeight;
     VsyncNumerator   = c.VsyncNumerator;
     VsyncDenominator = c.VsyncDenominator;
 }
Beispiel #10
0
        public static unsafe bool Load(IVideoProvider v, Stream s)
        {
            var bf = BITMAPFILEHEADER.FromStream(s);
            var bi = BITMAPINFOHEADER.FromStream(s);

            if (bf.bfType != 0x4d42 ||
                bf.bfOffBits != bf.bfSize + bi.biSize ||
                bi.biPlanes != 1 ||
                bi.biBitCount != 32 ||
                bi.biCompression != BitmapCompressionMode.BI_RGB)
            {
                return(false);
            }

            int inW = bi.biWidth;
            int inH = bi.biHeight;

            byte[] src = new byte[inW * inH * 4];
            s.Read(src, 0, src.Length);
            if (v is LoadedBMP)
            {
                var l = v as LoadedBMP;
                l.BufferWidth  = inW;
                l.BufferHeight = inH;
                l.VideoBuffer  = new int[inW * inH];
            }

            int[] dst = v.GetVideoBuffer();

            fixed(byte *srcp = src)
            fixed(int *dstp = dst)
            {
                using (new SimpleTime("Blit"))
                {
                    Blit(new BMP
                    {
                        Data   = (int *)srcp,
                        Width  = inW,
                        Height = inH
                    },
                         new BMP
                    {
                        Data   = dstp,
                        Width  = v.BufferWidth,
                        Height = v.BufferHeight,
                    });
                }
            }

            return(true);
        }
Beispiel #11
0
		public void AddFrame(IVideoProvider source)
		{
			string ext = Path.GetExtension(BaseName);
			string name = Path.GetFileNameWithoutExtension(BaseName) + "_" + Frame.ToString();
			name += ext;
			name = Path.Combine(Path.GetDirectoryName(BaseName), name);
			BizHawk.Bizware.BizwareGL.BitmapBuffer bb = new Bizware.BizwareGL.BitmapBuffer(source.BufferWidth, source.BufferHeight, source.GetVideoBuffer());
			using (var bmp = bb.ToSysdrawingBitmap())
			{
				if (ext.ToUpper() == ".PNG")
					bmp.Save(name, System.Drawing.Imaging.ImageFormat.Png);
				else if (ext.ToUpper() == ".JPG")
					bmp.Save(name, System.Drawing.Imaging.ImageFormat.Jpeg);
			}
			Frame++;
		}
Beispiel #12
0
 public VideoCopy(IVideoProvider c)
 {
     int[] vb = c.GetVideoBuffer();
     VideoBuffer = new byte[vb.Length * sizeof(int)];
     // we have to switch RGB ordering here
     for (int i = 0; i < vb.Length; i++)
     {
         VideoBuffer[i * 4 + 0] = (byte)(vb[i] >> 16);
         VideoBuffer[i * 4 + 1] = (byte)(vb[i] >> 8);
         VideoBuffer[i * 4 + 2] = (byte)(vb[i] & 255);
         VideoBuffer[i * 4 + 3] = 0;
     }
     //Buffer.BlockCopy(vb, 0, VideoBuffer, 0, VideoBuffer.Length);
     BufferWidth  = c.BufferWidth;
     BufferHeight = c.BufferHeight;
 }
Beispiel #13
0
        public static unsafe void Save(IVideoProvider v, Stream s, int w, int h)
        {
            var bf = new BITMAPFILEHEADER();
            var bi = new BITMAPINFOHEADER();

            bf.bfType    = 0x4d42;
            bf.bfOffBits = bf.bfSize + bi.biSize;

            bi.biPlanes      = 1;
            bi.biBitCount    = 32;          // xrgb
            bi.biCompression = BitmapCompressionMode.BI_RGB;
            bi.biSizeImage   = (uint)(w * h * 4);
            bi.biWidth       = w;
            bi.biHeight      = h;

            byte[] bfb = GetBytes(bf);
            byte[] bib = GetBytes(bi);

            s.Write(bfb, 0, bfb.Length);
            s.Write(bib, 0, bib.Length);

            int[]  src = v.GetVideoBuffer();
            byte[] dst = new byte[4 * w * h];

            fixed(int *srcp = src)
            fixed(byte *dstp = dst)
            {
                using (new SimpleTime("Blit"))
                {
                    Blit(new BMP
                    {
                        Data   = srcp,
                        Width  = v.BufferWidth,
                        Height = v.BufferHeight
                    },
                         new BMP
                    {
                        Data   = (int *)dstp,
                        Width  = w,
                        Height = h,
                    });
                }
            }

            s.Write(dst, 0, dst.Length);
        }
Beispiel #14
0
        public void AddFrame(IVideoProvider source)
        {
            if (skipindex == token.frameskip)
            {
                skipindex = 0;
            }
            else
            {
                skipindex++;
                return;                 // skip this frame
            }



            using (var bmp = new Bitmap(source.BufferWidth, source.BufferHeight, System.Drawing.Imaging.PixelFormat.Format32bppArgb))
            {
                var data = bmp.LockBits(new Rectangle(0, 0, bmp.Width, bmp.Height), System.Drawing.Imaging.ImageLockMode.WriteOnly, System.Drawing.Imaging.PixelFormat.Format32bppArgb);
                System.Runtime.InteropServices.Marshal.Copy(source.GetVideoBuffer(), 0, data.Scan0, bmp.Width * bmp.Height);
                bmp.UnlockBits(data);

                using (var qBmp = new OctreeQuantizer(255, 8).Quantize(bmp))
                {
                    MemoryStream ms = new MemoryStream();
                    qBmp.Save(ms, System.Drawing.Imaging.ImageFormat.Gif);
                    byte[] b = ms.GetBuffer();
                    if (!firstdone)
                    {
                        firstdone = true;
                        b[10]     = (byte)(b[10] & 0x78);                     // no global color table
                        f.Write(b, 0, 13);
                        f.Write(GifAnimation, 0, GifAnimation.Length);
                    }
                    b[785] = Delay[0];
                    b[786] = Delay[1];
                    b[798] = (byte)(b[798] | 0x87);
                    f.Write(b, 781, 18);
                    f.Write(b, 13, 768);
                    f.Write(b, 799, (int)(ms.Length - 800));

                    lastbyte = b[ms.Length - 1];
                }
            }
        }
Beispiel #15
0
        public void AddFrame(IVideoProvider source)
        {
            if (source.BufferWidth != width || source.BufferHeight != height)
                SetVideoParameters(source.BufferWidth, source.BufferHeight);

            if (ffmpeg.HasExited)
                throw new Exception("unexpected ffmpeg death:\n" + ffmpeg_geterror());

            var video = source.GetVideoBuffer();
            try
            {
                muxer.WriteVideoFrame(video);
            }
            catch
            {
                System.Windows.Forms.MessageBox.Show("Exception! ffmpeg history:\n" + ffmpeg_geterror());
                throw;
            }

            // have to do binary write!
            //ffmpeg.StandardInput.BaseStream.Write(b, 0, b.Length);
        }
Beispiel #16
0
        public void AddFrame(IVideoProvider source)
        {
            if (skipindex == token.frameskip)
                skipindex = 0;
            else
            {
                skipindex++;
                return; // skip this frame
            }

            using (var bmp = new Bitmap(source.BufferWidth, source.BufferHeight, System.Drawing.Imaging.PixelFormat.Format32bppArgb))
            {
                var data = bmp.LockBits(new Rectangle(0, 0, bmp.Width, bmp.Height), System.Drawing.Imaging.ImageLockMode.WriteOnly, System.Drawing.Imaging.PixelFormat.Format32bppArgb);
                System.Runtime.InteropServices.Marshal.Copy(source.GetVideoBuffer(), 0, data.Scan0, bmp.Width * bmp.Height);
                bmp.UnlockBits(data);

                using (var qBmp = new OctreeQuantizer(255, 8).Quantize(bmp))
                {
                    MemoryStream ms = new MemoryStream();
                    qBmp.Save(ms, System.Drawing.Imaging.ImageFormat.Gif);
                    byte[] b = ms.GetBuffer();
                    if (!firstdone)
                    {
                        firstdone = true;
                        b[10] = (byte)(b[10] & 0x78); // no global color table
                        f.Write(b, 0, 13);
                        f.Write(GifAnimation, 0, GifAnimation.Length);
                    }
                    b[785] = Delay[0];
                    b[786] = Delay[1];
                    b[798] = (byte)(b[798] | 0x87);
                    f.Write(b, 781, 18);
                    f.Write(b, 13, 768);
                    f.Write(b, 799, (int)(ms.Length - 800));

                    lastbyte = b[ms.Length - 1];
                }
            }
        }
        FilterProgram UpdateSourceInternal(JobInfo job)
        {
            _glManager.Activate(CR_GraphicsControl);

            IVideoProvider videoProvider = job.videoProvider;
            bool           simulate      = job.simulate;
            Size           chain_outsize = job.chain_outsize;

            int vw = videoProvider.BufferWidth;
            int vh = videoProvider.BufferHeight;

            if (Global.Config.DispFixAspectRatio)
            {
                if (Global.Config.DispManagerAR == Config.EDispManagerAR.System)
                {
                    vw = videoProvider.VirtualWidth;
                    vh = videoProvider.VirtualHeight;
                }
                if (Global.Config.DispManagerAR == Config.EDispManagerAR.Custom)
                {
                    vw = Global.Config.DispCustomUserARWidth;
                    vh = Global.Config.DispCustomUserARHeight;
                }
            }

            int[] videoBuffer = videoProvider.GetVideoBuffer();

            int  bufferWidth   = videoProvider.BufferWidth;
            int  bufferHeight  = videoProvider.BufferHeight;
            bool isGlTextureId = videoBuffer.Length == 1;

            //TODO - need to do some work here for GDI+ to repair gl texture ID importing
            BitmapBuffer bb           = null;
            Texture2d    videoTexture = null;

            if (!simulate)
            {
                if (isGlTextureId)
                {
                    videoTexture = GL.WrapGLTexture2d(new IntPtr(videoBuffer[0]), bufferWidth, bufferHeight);
                }
                else
                {
                    //wrap the videoprovider data in a BitmapBuffer (no point to refactoring that many IVideoProviders)
                    bb = new BitmapBuffer(bufferWidth, bufferHeight, videoBuffer);

                    //now, acquire the data sent from the videoProvider into a texture
                    videoTexture = VideoTextureFrugalizer.Get(bb);
                    GL.SetTextureWrapMode(videoTexture, true);
                }

                //TEST (to be removed once we have an actual example of bring in a texture ID from opengl emu core):
                //if (!isGlTextureId)
                //{
                //  videoBuffer = new int[1] { videoTexture.Id.ToInt32() };
                //  goto TESTEROO;
                //}
            }

            //record the size of what we received, since lua and stuff is gonna want to draw onto it
            currEmuWidth  = bufferWidth;
            currEmuHeight = bufferHeight;

            //build the default filter chain and set it up with services filters will need
            Size chain_insize = new Size(bufferWidth, bufferHeight);

            var filterProgram = BuildDefaultChain(chain_insize, chain_outsize, job.includeOSD);

            filterProgram.GuiRenderer = Renderer;
            filterProgram.GL          = GL;

            //setup the source image filter
            BizHawk.Client.EmuHawk.Filters.SourceImage fInput = filterProgram["input"] as BizHawk.Client.EmuHawk.Filters.SourceImage;
            fInput.Texture = videoTexture;

            //setup the final presentation filter
            BizHawk.Client.EmuHawk.Filters.FinalPresentation fPresent = filterProgram["presentation"] as BizHawk.Client.EmuHawk.Filters.FinalPresentation;
            fPresent.VirtualTextureSize = new Size(vw, vh);
            fPresent.TextureSize        = new Size(bufferWidth, bufferHeight);
            fPresent.BackgroundColor    = videoProvider.BackgroundColor;
            fPresent.GuiRenderer        = Renderer;
            fPresent.GL = GL;

            filterProgram.Compile("default", chain_insize, chain_outsize, !job.offscreen);

            if (simulate)
            {
            }
            else
            {
                CurrentFilterProgram = filterProgram;
                UpdateSourceDrawingWork(job);
            }

            //cleanup:
            if (bb != null)
            {
                bb.Dispose();
            }

            return(filterProgram);
        }
Beispiel #18
0
        private FilterProgram UpdateSourceInternal(JobInfo job)
        {
            //no drawing actually happens. it's important not to begin drawing on a control
            if (!job.Simulate && !job.Offscreen)
            {
                GLManager.Activate(CR_GraphicsControl);

                if (job.ChainOutsize.Width == 0 || job.ChainOutsize.Height == 0)
                {
                    // this has to be a NOP, because lots of stuff will malfunction on a 0-sized viewport
                    if (_currentFilterProgram != null)
                    {
                        UpdateSourceDrawingWork(job);                         //but we still need to do this, because of vsync
                    }

                    return(null);
                }
            }

            IVideoProvider videoProvider = job.VideoProvider;
            bool           simulate      = job.Simulate;
            Size           chainOutsize  = job.ChainOutsize;

            //simulate = true;

            int[] videoBuffer            = videoProvider.GetVideoBuffer();
            int   bufferWidth            = videoProvider.BufferWidth;
            int   bufferHeight           = videoProvider.BufferHeight;
            int   presenterTextureWidth  = bufferWidth;
            int   presenterTextureHeight = bufferHeight;
            bool  isGlTextureId          = videoBuffer.Length == 1;

            int vw = videoProvider.VirtualWidth;
            int vh = videoProvider.VirtualHeight;

            //TODO: it is bad that this is happening outside the filter chain
            //the filter chain has the ability to add padding...
            //for now, we have to have some hacks. this could be improved by refactoring the filter setup hacks to be in one place only though
            //could the PADDING be done as filters too? that would be nice.
            var fCoreScreenControl = CreateCoreScreenControl();

            if (fCoreScreenControl != null)
            {
                var sz = fCoreScreenControl.PresizeInput("default", new Size(bufferWidth, bufferHeight));
                presenterTextureWidth  = vw = sz.Width;
                presenterTextureHeight = vh = sz.Height;
            }

            if (Global.Config.DispFixAspectRatio)
            {
                if (Global.Config.DispManagerAR == EDispManagerAR.System)
                {
                    //Already set
                }
                if (Global.Config.DispManagerAR == EDispManagerAR.Custom)
                {
                    //not clear what any of these other options mean for "screen controlled" systems
                    vw = Global.Config.DispCustomUserARWidth;
                    vh = Global.Config.DispCustomUserARHeight;
                }
                if (Global.Config.DispManagerAR == EDispManagerAR.CustomRatio)
                {
                    //not clear what any of these other options mean for "screen controlled" systems
                    FixRatio(Global.Config.DispCustomUserArx, Global.Config.DispCustomUserAry, videoProvider.BufferWidth, videoProvider.BufferHeight, out vw, out vh);
                }
            }

            var padding = CalculateCompleteContentPadding(true, false);

            vw += padding.Horizontal;
            vh += padding.Vertical;

            BitmapBuffer bb           = null;
            Texture2d    videoTexture = null;

            if (!simulate)
            {
                if (isGlTextureId)
                {
                    //FYI: this is a million years from happening on n64, since it's all geriatric non-FBO code
                    //is it workable for saturn?
                    videoTexture = GL.WrapGLTexture2d(new IntPtr(videoBuffer[0]), bufferWidth, bufferHeight);
                }
                else
                {
                    //wrap the VideoProvider data in a BitmapBuffer (no point to refactoring that many IVideoProviders)
                    bb = new BitmapBuffer(bufferWidth, bufferHeight, videoBuffer);
                    bb.DiscardAlpha();

                    //now, acquire the data sent from the videoProvider into a texture
                    videoTexture = VideoTextureFrugalizer.Get(bb);

                    // lets not use this. lets define BizwareGL to make clamp by default (TBD: check opengl)
                    //GL.SetTextureWrapMode(videoTexture, true);
                }
            }

            // record the size of what we received, since lua and stuff is gonna want to draw onto it
            currEmuWidth  = bufferWidth;
            currEmuHeight = bufferHeight;

            //build the default filter chain and set it up with services filters will need
            Size chainInsize = new Size(bufferWidth, bufferHeight);

            var filterProgram = BuildDefaultChain(chainInsize, chainOutsize, job.IncludeOSD, job.IncludeUserFilters);

            filterProgram.GuiRenderer = Renderer;
            filterProgram.GL          = GL;

            //setup the source image filter
            Filters.SourceImage fInput = filterProgram["input"] as Filters.SourceImage;
            fInput.Texture = videoTexture;

            //setup the final presentation filter
            Filters.FinalPresentation fPresent = filterProgram["presentation"] as Filters.FinalPresentation;
            fPresent.VirtualTextureSize = new Size(vw, vh);
            fPresent.TextureSize        = new Size(presenterTextureWidth, presenterTextureHeight);
            fPresent.BackgroundColor    = videoProvider.BackgroundColor;
            fPresent.GuiRenderer        = Renderer;
            fPresent.Flip = isGlTextureId;
            fPresent.Config_FixAspectRatio  = Global.Config.DispFixAspectRatio;
            fPresent.Config_FixScaleInteger = Global.Config.DispFixScaleInteger;
            fPresent.Padding      = ClientExtraPadding;
            fPresent.AutoPrescale = Global.Config.DispAutoPrescale;

            fPresent.GL = GL;

            //POOPY. why are we delivering the GL context this way? such bad
            Filters.ScreenControlNDS fNDS = filterProgram["CoreScreenControl"] as Filters.ScreenControlNDS;
            if (fNDS != null)
            {
                fNDS.GuiRenderer = Renderer;
                fNDS.GL          = GL;
            }

            filterProgram.Compile("default", chainInsize, chainOutsize, !job.Offscreen);

            if (simulate)
            {
            }
            else
            {
                _currentFilterProgram = filterProgram;
                UpdateSourceDrawingWork(job);
            }

            // cleanup:
            bb?.Dispose();

            return(filterProgram);
        }
Beispiel #19
0
		public void AddFrame(IVideoProvider source)
		{
			if (source.BufferHeight != height || source.BufferWidth != width)
				SetVideoParameters(source.BufferWidth, source.BufferHeight);
			current.WriteVideoFrame(source.GetVideoBuffer());
		}
Beispiel #20
0
			public VideoCopy(IVideoProvider c)
			{
				int[] vb = c.GetVideoBuffer();
				VideoBuffer = new byte[vb.Length * sizeof(int)];
				// we have to switch RGB ordering here
				for (int i = 0; i < vb.Length; i++)
				{
					VideoBuffer[i * 4 + 0] = (byte)(vb[i] >> 16);
					VideoBuffer[i * 4 + 1] = (byte)(vb[i] >> 8);
					VideoBuffer[i * 4 + 2] = (byte)(vb[i] & 255);
					VideoBuffer[i * 4 + 3] = 0;
				}
				//Buffer.BlockCopy(vb, 0, VideoBuffer, 0, VideoBuffer.Length);
				BufferWidth = c.BufferWidth;
				BufferHeight = c.BufferHeight;
			}
Beispiel #21
0
        FilterProgram UpdateSourceInternal(JobInfo job)
        {
            if (job.chain_outsize.Width == 0 || job.chain_outsize.Height == 0)
            {
                //this has to be a NOP, because lots of stuff will malfunction on a 0-sized viewport
                return(null);
            }

            //no drawing actually happens. it's important not to begin drawing on a control
            if (!job.simulate && !job.offscreen)
            {
                GLManager.Activate(CR_GraphicsControl);
            }

            IVideoProvider videoProvider = job.videoProvider;
            bool           simulate      = job.simulate;
            Size           chain_outsize = job.chain_outsize;

            //simulate = true;

            int vw = videoProvider.BufferWidth;
            int vh = videoProvider.BufferHeight;

            if (Global.Config.DispFixAspectRatio)
            {
                if (Global.Config.DispManagerAR == Config.EDispManagerAR.System)
                {
                    vw = videoProvider.VirtualWidth;
                    vh = videoProvider.VirtualHeight;
                }
                if (Global.Config.DispManagerAR == Config.EDispManagerAR.Custom)
                {
                    vw = Global.Config.DispCustomUserARWidth;
                    vh = Global.Config.DispCustomUserARHeight;
                }
                if (Global.Config.DispManagerAR == Config.EDispManagerAR.CustomRatio)
                {
                    FixRatio(Global.Config.DispCustomUserARX, Global.Config.DispCustomUserARY, videoProvider.BufferWidth, videoProvider.BufferHeight, out vw, out vh);
                }
            }

            var padding = CalculateCompleteContentPadding(true, false);

            vw += padding.Horizontal;
            vh += padding.Vertical;

            int[] videoBuffer = videoProvider.GetVideoBuffer();

            int  bufferWidth   = videoProvider.BufferWidth;
            int  bufferHeight  = videoProvider.BufferHeight;
            bool isGlTextureId = videoBuffer.Length == 1;

            BitmapBuffer bb           = null;
            Texture2d    videoTexture = null;

            if (!simulate)
            {
                if (isGlTextureId)
                {
                    //FYI: this is a million years from happening on n64, since it's all geriatric non-FBO code
                    //is it workable for saturn?
                    videoTexture = GL.WrapGLTexture2d(new IntPtr(videoBuffer[0]), bufferWidth, bufferHeight);
                }
                else
                {
                    //wrap the videoprovider data in a BitmapBuffer (no point to refactoring that many IVideoProviders)
                    bb = new BitmapBuffer(bufferWidth, bufferHeight, videoBuffer);
                    bb.DiscardAlpha();

                    //now, acquire the data sent from the videoProvider into a texture
                    videoTexture = VideoTextureFrugalizer.Get(bb);

                    //lets not use this. lets define BizwareGL to make clamp by default (TBD: check opengl)
                    //GL.SetTextureWrapMode(videoTexture, true);
                }
            }

            //record the size of what we received, since lua and stuff is gonna want to draw onto it
            currEmuWidth  = bufferWidth;
            currEmuHeight = bufferHeight;

            //build the default filter chain and set it up with services filters will need
            Size chain_insize = new Size(bufferWidth, bufferHeight);

            var filterProgram = BuildDefaultChain(chain_insize, chain_outsize, job.includeOSD);

            filterProgram.GuiRenderer = Renderer;
            filterProgram.GL          = GL;

            //setup the source image filter
            EmuHawk.Filters.SourceImage fInput = filterProgram["input"] as EmuHawk.Filters.SourceImage;
            fInput.Texture = videoTexture;

            //setup the final presentation filter
            EmuHawk.Filters.FinalPresentation fPresent = filterProgram["presentation"] as EmuHawk.Filters.FinalPresentation;
            fPresent.VirtualTextureSize = new Size(vw, vh);
            fPresent.TextureSize        = new Size(bufferWidth, bufferHeight);
            fPresent.BackgroundColor    = videoProvider.BackgroundColor;
            fPresent.GuiRenderer        = Renderer;
            fPresent.Flip = isGlTextureId;
            fPresent.Config_FixAspectRatio  = Global.Config.DispFixAspectRatio;
            fPresent.Config_FixScaleInteger = Global.Config.DispFixScaleInteger;
            fPresent.Padding      = ClientExtraPadding;
            fPresent.AutoPrescale = Global.Config.DispAutoPrescale;

            fPresent.GL = GL;

            filterProgram.Compile("default", chain_insize, chain_outsize, !job.offscreen);

            if (simulate)
            {
            }
            else
            {
                CurrentFilterProgram = filterProgram;
                UpdateSourceDrawingWork(job);
            }

            //cleanup:
            if (bb != null)
            {
                bb.Dispose();
            }

            return(filterProgram);
        }
Beispiel #22
0
            public unsafe void AddFrame(IVideoProvider source)
            {
                if (parameters.width != source.BufferWidth ||
                    parameters.height != source.BufferHeight)
                {
                    throw new InvalidOperationException("video buffer changed between start and now");
                }

                int pitch_add = parameters.pitch_add;

                int todo = parameters.pitch * parameters.height;
                int w    = source.BufferWidth;
                int h    = source.BufferHeight;

                if (!bit32)
                {
                    IntPtr buf = GetStaticGlobalBuf(todo);

                    //TODO - would using a byte* be faster?
                    int[] buffer = source.GetVideoBuffer();
                    fixed(int *buffer_ptr = &buffer[0])
                    {
                        byte *bytes_ptr = (byte *)buf.ToPointer();
                        {
                            byte *bp = bytes_ptr;

                            for (int idx = w * h - w, y = 0; y < h; y++)
                            {
                                for (int x = 0; x < w; x++, idx++)
                                {
                                    int r    = (buffer[idx] >> 0) & 0xFF;
                                    int g    = (buffer[idx] >> 8) & 0xFF;
                                    int b    = (buffer[idx] >> 16) & 0xFF;
                                    *   bp++ = (byte)r;
                                    *   bp++ = (byte)g;
                                    *   bp++ = (byte)b;
                                }
                                idx -= w * 2;
                                bp  += pitch_add;
                            }

                            int bytes_written;
                            int ret = Win32.AVIStreamWrite(pAviCompressedVideoStream, outStatus.video_frames, 1, new IntPtr(bytes_ptr), todo, Win32.AVIIF_KEYFRAME, IntPtr.Zero, out bytes_written);
                            outStatus.video_bytes += bytes_written;
                            outStatus.video_frames++;
                        }
                    }
                }
                else                 // 32 bit
                {
                    IntPtr buf    = GetStaticGlobalBuf(todo * 4);
                    int[]  buffer = source.GetVideoBuffer();
                    fixed(int *buffer_ptr = &buffer[0])
                    {
                        byte *bytes_ptr = (byte *)buf.ToPointer();
                        {
                            byte *bp = bytes_ptr;

                            for (int idx = w * h - w, y = 0; y < h; y++)
                            {
                                for (int x = 0; x < w; x++, idx++)
                                {
                                    int r    = (buffer[idx] >> 0) & 0xFF;
                                    int g    = (buffer[idx] >> 8) & 0xFF;
                                    int b    = (buffer[idx] >> 16) & 0xFF;
                                    *   bp++ = (byte)r;
                                    *   bp++ = (byte)g;
                                    *   bp++ = (byte)b;
                                    *   bp++ = 0;
                                }
                                idx -= w * 2;
                            }

                            int bytes_written;
                            int ret = Win32.AVIStreamWrite(pAviCompressedVideoStream, outStatus.video_frames, 1, new IntPtr(bytes_ptr), todo * 3, Win32.AVIIF_KEYFRAME, IntPtr.Zero, out bytes_written);
                            outStatus.video_bytes += bytes_written;
                            outStatus.video_frames++;
                        }
                    }
                }
            }
Beispiel #23
0
        public void Create(string filename, SaveStateConfig config)
        {
            // the old method of text savestate save is now gone.
            // a text savestate is just like a binary savestate, but with a different core lump
            using var bs = new ZipStateSaver(filename, config.CompressionLevelNormal);
            bs.PutVersionLumps();

            using (new SimpleTime("Save Core"))
            {
                if (config.Type == SaveStateType.Text)
                {
                    bs.PutLump(BinaryStateLump.CorestateText, tw => _statable.SaveStateText(tw));
                }
                else
                {
                    bs.PutLump(BinaryStateLump.Corestate, bw => _statable.SaveStateBinary(bw));
                }
            }

            if (config.SaveScreenshot && _videoProvider != null)
            {
                var buff = _videoProvider.GetVideoBuffer();
                if (buff.Length == 1)
                {
                    // is a hacky opengl texture ID. can't handle this now!
                    // need to discuss options
                    // 1. cores must be able to provide a pixels VideoProvider in addition to a texture ID, on command (not very hard overall but interface changing and work per core)
                    // 2. SavestateManager must be setup with a mechanism for resolving texture IDs (even less work, but sloppy)
                    // There are additional problems with AVWriting. They depend on VideoProvider providing pixels.
                }
                else
                {
                    int outWidth  = _videoProvider.BufferWidth;
                    int outHeight = _videoProvider.BufferHeight;

                    // if buffer is too big, scale down screenshot
                    if (!config.NoLowResLargeScreenshots && buff.Length >= config.BigScreenshotSize)
                    {
                        outWidth  /= 2;
                        outHeight /= 2;
                    }

                    using (new SimpleTime("Save Framebuffer"))
                    {
                        bs.PutLump(BinaryStateLump.Framebuffer, s => _quickBmpFile.Save(_videoProvider, s, outWidth, outHeight));
                    }
                }
            }

            if (_movieSession.Movie.IsActive())
            {
                bs.PutLump(BinaryStateLump.Input,
                           delegate(TextWriter tw)
                {
                    // this never should have been a core's responsibility
                    tw.WriteLine("Frame {0}", _emulator.Frame);
                    _movieSession.HandleSaveState(tw);
                });
            }

            if (_userBag.Any())
            {
                bs.PutLump(BinaryStateLump.UserData,
                           delegate(TextWriter tw)
                {
                    var data = ConfigService.SaveWithType(_userBag);
                    tw.WriteLine(data);
                });
            }

            if (_movieSession.Movie.IsActive() && _movieSession.Movie is ITasMovie)
            {
                bs.PutLump(BinaryStateLump.LagLog,
                           delegate(TextWriter tw)
                {
                    ((ITasMovie)_movieSession.Movie).LagLog.Save(tw);
                });
            }
        }
        public void AddFrame(IVideoProvider source)
        {
            string       ext  = Path.GetExtension(_baseName) ?? "";
            var          name = Path.Combine(Path.GetDirectoryName(_baseName) ?? "", $"{Path.GetFileNameWithoutExtension(_baseName)}_{_frame}{ext}");
            BitmapBuffer bb   = new BitmapBuffer(source.BufferWidth, source.BufferHeight, source.GetVideoBuffer());

            using var bmp = bb.ToSysdrawingBitmap();
            if (ext.ToUpper() == ".PNG")
            {
                bmp.Save(name, ImageFormat.Png);
            }
            else if (ext.ToUpper() == ".JPG")
            {
                bmp.Save(name, ImageFormat.Jpeg);
            }

            _frame++;
        }
Beispiel #25
0
			public VideoCopy(IVideoProvider c)
			{
				vb = (int[])c.GetVideoBuffer().Clone();
				BufferWidth = c.BufferWidth;
				BufferHeight= c.BufferHeight;
				BackgroundColor = c.BackgroundColor;
				VirtualWidth = c.VirtualWidth;
				VirtualHeight = c.VirtualHeight;
			}
Beispiel #26
0
			public unsafe void AddFrame(IVideoProvider source)
			{
				if (parameters.width != source.BufferWidth
					|| parameters.height != source.BufferHeight)
					throw new InvalidOperationException("video buffer changed between start and now");

				int todo = source.BufferHeight * source.BufferWidth;
				int w = source.BufferWidth;
				int h = source.BufferHeight;

				if (!bit32)
				{
					IntPtr buf = GetStaticGlobalBuf(todo * 3);

					int[] buffer = source.GetVideoBuffer();
					fixed (int* buffer_ptr = &buffer[0])
					{
						byte* bytes_ptr = (byte*)buf.ToPointer();
						{
							byte* bp = bytes_ptr;

							for (int idx = w * h - w, y = 0; y < h; y++)
							{
								for (int x = 0; x < w; x++, idx++)
								{
									int r = (buffer[idx] >> 0) & 0xFF;
									int g = (buffer[idx] >> 8) & 0xFF;
									int b = (buffer[idx] >> 16) & 0xFF;
									*bp++ = (byte)r;
									*bp++ = (byte)g;
									*bp++ = (byte)b;
								}
								idx -= w * 2;
							}

							int bytes_written;
							int ret = Win32.AVIStreamWrite(pAviCompressedVideoStream, outStatus.video_frames, 1, new IntPtr(bytes_ptr), todo * 3, Win32.AVIIF_KEYFRAME, IntPtr.Zero, out bytes_written);
							outStatus.video_bytes += bytes_written;
							outStatus.video_frames++;
						}
					}
				}
				else // 32 bit
				{
					IntPtr buf = GetStaticGlobalBuf(todo * 4);
					int[] buffer = source.GetVideoBuffer();
					fixed (int* buffer_ptr = &buffer[0])
					{
						byte* bytes_ptr = (byte*)buf.ToPointer();
						{
							byte* bp = bytes_ptr;

							for (int idx = w * h - w, y = 0; y < h; y++)
							{
								for (int x = 0; x < w; x++, idx++)
								{
									int r = (buffer[idx] >> 0) & 0xFF;
									int g = (buffer[idx] >> 8) & 0xFF;
									int b = (buffer[idx] >> 16) & 0xFF;
									*bp++ = (byte)r;
									*bp++ = (byte)g;
									*bp++ = (byte)b;
									*bp++ = 0;
								}
								idx -= w * 2;
							}

							int bytes_written;
							int ret = Win32.AVIStreamWrite(pAviCompressedVideoStream, outStatus.video_frames, 1, new IntPtr(bytes_ptr), todo * 3, Win32.AVIIF_KEYFRAME, IntPtr.Zero, out bytes_written);
							outStatus.video_bytes += bytes_written;
							outStatus.video_frames++;
						}
					}
				}
			}
Beispiel #27
0
        public void AddFrame(IVideoProvider source)
        {
            string ext  = Path.GetExtension(BaseName);
            string name = Path.GetFileNameWithoutExtension(BaseName) + "_" + Frame.ToString();

            name += ext;
            name  = Path.Combine(Path.GetDirectoryName(BaseName), name);
            BizHawk.Bizware.BizwareGL.BitmapBuffer bb = new Bizware.BizwareGL.BitmapBuffer(source.BufferWidth, source.BufferHeight, source.GetVideoBuffer());
            using (var bmp = bb.ToSysdrawingBitmap())
            {
                if (ext.ToUpper() == ".PNG")
                {
                    bmp.Save(name, System.Drawing.Imaging.ImageFormat.Png);
                }
                else if (ext.ToUpper() == ".JPG")
                {
                    bmp.Save(name, System.Drawing.Imaging.ImageFormat.Jpeg);
                }
            }
            Frame++;
        }
Beispiel #28
0
 public void AddFrame(IVideoProvider source)
 {
     using (var bb = new BitmapBuffer(source.BufferWidth, source.BufferHeight, source.GetVideoBuffer()))
     {
         string subpath = GetAndCreatePathForFrameNum(mCurrFrame);
         string path    = subpath + ".png";
         bb.ToSysdrawingBitmap().Save(path, System.Drawing.Imaging.ImageFormat.Png);
     }
 }