Ejemplo n.º 1
0
        public void AddFrame(IVideoProvider source)
        {
            if (source.BufferWidth != width || source.BufferHeight != height)
            {
                SetVideoParameters(source.BufferWidth, source.BufferHeight);
            }

            if (ffmpeg.HasExited)
            {
                throw new Exception("unexpected ffmpeg death:\n" + ffmpeg_geterror());
            }

            var video = source.GetVideoBuffer();

            try
            {
                muxer.WriteVideoFrame(video);
            }
            catch
            {
                System.Windows.Forms.MessageBox.Show("Exception! ffmpeg history:\n" + ffmpeg_geterror());
                throw;
            }

            // have to do binary write!
            //ffmpeg.StandardInput.BaseStream.Write(b, 0, b.Length);
        }
Ejemplo n.º 2
0
        public void Setup(string fileName)
        {
            Repository = new Repository();
            Repository.Load();
            _window = Substitute.For <IEmulatorWindow>();
            var settings      = Substitute.For <ISettings>();
            var recognition   = new TextRecognition();
            var loggerFactory = Substitute.For <ILoggerFactory>();

            _videoProvider = Substitute.For <IVideoProvider>();
            Game           = new GameImpl(_window, _videoProvider, Repository, loggerFactory, recognition, settings);
#if DEBUG
            Settings.SaveImages    = true;
            Settings.SaveRawImages = true;
#endif

            using var bitmap = new Bitmap(fileName);
            _window.Width.Returns(bitmap.Width);
            _window.Height.Returns(bitmap.Height);
            _window.GrabScreen(Arg.Any <int>(), Arg.Any <int>(), Arg.Any <int>(), Arg.Any <int>())
            .Returns(info =>
            {
                using var bitmapLocal = new Bitmap(fileName);
                return(bitmapLocal.Crop(info.ArgAt <int>(0), info.ArgAt <int>(1), info.ArgAt <int>(2), info.ArgAt <int>(3)));
            });
            _videoProvider.GetCurrentFrame().Returns(info => bitmap);
        }
Ejemplo n.º 3
0
 private static void PopulateFramebuffer(BinaryReader br, IVideoProvider videoProvider, IQuickBmpFile quickBmpFile)
 {
     try
     {
         using (new SimpleTime("Load Framebuffer"))
         {
             quickBmpFile.Load(videoProvider, br.BaseStream);
         }
     }
     catch
     {
         var buff = videoProvider.GetVideoBuffer();
         try
         {
             for (int i = 0; i < buff.Length; i++)
             {
                 int j = br.ReadInt32();
                 buff[i] = j;
             }
         }
         catch (EndOfStreamException)
         {
         }
     }
 }
Ejemplo n.º 4
0
        private long _soundRemainder;         // audio timekeeping for video dumping

        /// <exception cref="InvalidOperationException">
        /// <paramref name="asyncSoundProvider"/>'s mode is not <see cref="SyncSoundMode.Async"/>, or
        /// A/V parameters haven't been set (need to call <see cref="AVStretcher.SetAudioParameters"/> and <see cref="AVStretcher.SetMovieParameters"/>)
        /// </exception>
        public void DumpAV(IVideoProvider v, ISoundProvider asyncSoundProvider, out short[] samples, out int samplesProvided)
        {
            // Sound refactor TODO: we could try set it here, but we want the client to be responsible for mode switching? There may be non-trivial complications with when to switch modes that we don't want this object worrying about
            if (asyncSoundProvider.SyncMode != SyncSoundMode.Async)
            {
                throw new InvalidOperationException("Only async mode is supported, set async mode before passing in the sound provider");
            }

            if (!ASet || !VSet)
            {
                throw new InvalidOperationException("Must set params first!");
            }

            long nSampNum = Samplerate * (long)FpsDen + _soundRemainder;
            long nsamp    = nSampNum / FpsNum;

            // exactly remember fractional parts of an audio sample
            _soundRemainder = nSampNum % FpsNum;

            samples = new short[nsamp * Channels];
            asyncSoundProvider.GetSamplesAsync(samples);
            samplesProvided = (int)nsamp;

            W.AddFrame(v);
            W.AddSamples(samples);
        }
Ejemplo n.º 5
0
        private IVideoProvider CreateSigleCaptureObject(XmlNode node, HardwareType hw)
        {
            if (node == null)
            {
                return(null);
            }
            XmlNode item = node.SelectSingleNode(hw.ToString());

            if (item == null)
            {
                return(null);
            }
            if (hwLib.ContainsKey(hw) && hwLib[hw] is IVideoProvider)
            {
                return((IVideoProvider)hwLib[hw]);
            }
            try
            {
                IVideoProvider provider = Serialize.XmlDeSerialize(item) as IVideoProvider;
                return(provider);
            }
            catch (Exception ex)
            {
                LogHelper.WriteDebugException(ex);
                return(null);
            }
        }
Ejemplo n.º 6
0
 public static unsafe void Copy(IVideoProvider src, IVideoProvider dst)
 {
     if (src.BufferWidth == dst.BufferWidth && src.BufferHeight == dst.BufferHeight)
     {
         Array.Copy(src.GetVideoBuffer(), dst.GetVideoBuffer(), src.GetVideoBuffer().Length);
     }
     else
     {
         fixed(int *srcp = src.GetVideoBuffer(), dstp = dst.GetVideoBuffer())
         {
             Blit_Any_NoFlip(new BMP
             {
                 Data   = srcp,
                 Width  = src.BufferWidth,
                 Height = src.BufferHeight
             },
                             new BMP
             {
                 Data   = dstp,
                 Width  = dst.BufferWidth,
                 Height = dst.BufferHeight
             });
         }
     }
 }
Ejemplo n.º 7
0
        public void Init(
            IBrain brain,
            IVideoProvider videoProvider,
            IVideoRecorder videoRecorder,
            IAudioRecorder audioRecorder,
            IAudioPlayer audioPlayer,
            IAudioRepository audioRepository)
        {
            this.brain         = brain;
            this.videoProvider = videoProvider;
            this.videoRecorder = videoRecorder;
            this.audioRecorder = audioRecorder;

            var frameObserver           = new FrameObserver(brain);
            var frameRecognizedObserver = new FrameRecognizedObserver(audioPlayer, audioRepository);

            recognizeAvailableFrameSubscription = videoProvider.FrameAvailable.Subscribe(frameObserver);

            this.lifetimeStreams = new CompositeDisposable
            {
                brain.FrameRecognized.Subscribe(frameRecognizedObserver),
                videoRecorder.RecordingAvailable.Subscribe(new VideoPublisher(brain)),
                audioRecorder.RecordingAvailable.Subscribe(new AudioPublisher(audioRepository))
            };
        }
 public WebSocketStudyRoomOnlineEventHandler(IHubContext <StudyRoomHub> hubContext,
                                             IVideoProvider videoProvider, TelemetryClient telemetryClient)
 {
     _hubContext      = hubContext;
     _videoProvider   = videoProvider;
     _telemetryClient = telemetryClient;
 }
Ejemplo n.º 9
0
 public void AddFrame(IVideoProvider source)
 {
     if (source.BufferHeight != height || source.BufferWidth != width)
     {
         SetVideoParameters(source.BufferWidth, source.BufferHeight);
     }
     current.WriteVideoFrame(source.GetVideoBuffer());
 }
Ejemplo n.º 10
0
        public void AddFrame(IVideoProvider source)
        {
            using var bb = new BitmapBuffer(source.BufferWidth, source.BufferHeight, source.GetVideoBuffer());
            string subPath = GetAndCreatePathForFrameNum(_mCurrFrame);
            string path    = $"{subPath}.png";

            bb.ToSysdrawingBitmap().Save(path, ImageFormat.Png);
        }
Ejemplo n.º 11
0
            public bool success = false;             //indicates whether the last command was executed succesfully

            public void Initialize()
            {
                if (currentVideoProvider == null)
                {
                    currentVideoProvider = Global.Emulator.AsVideoProviderOrDefault();
                }
                initialized = true;
            }
Ejemplo n.º 12
0
 public BitmapBuffer MakeScreenShotImage()
 {
     if (currentVideoProvider == null)
     {
         currentVideoProvider = Global.Emulator.AsVideoProviderOrDefault();
     }
     return(GlobalWin.DisplayManager.RenderVideoProvider(currentVideoProvider));
 }
Ejemplo n.º 13
0
 public VideoCopy(IVideoProvider c)
 {
     vb              = (int[])c.GetVideoBuffer().Clone();
     BufferWidth     = c.BufferWidth;
     BufferHeight    = c.BufferHeight;
     BackgroundColor = c.BackgroundColor;
     VirtualWidth    = c.VirtualWidth;
     VirtualHeight   = c.VirtualHeight;
 }
Ejemplo n.º 14
0
		public void AddFrame(IVideoProvider source)
		{
			using (var bb = new BitmapBuffer(source.BufferWidth, source.BufferHeight, source.GetVideoBuffer()))
			{
				string subpath = GetAndCreatePathForFrameNum(mCurrFrame);
				string path = subpath + ".png";
				bb.ToSysdrawingBitmap().Save(path, System.Drawing.Imaging.ImageFormat.Png);
			}
		}
Ejemplo n.º 15
0
 /// <summary>
 /// adds a frame to the stream
 /// </summary>
 public void AddFrame(IVideoProvider source)
 {
     if (!workerT.IsAlive)
     {
         // signal some sort of error?
         return;
     }
     threadQ.Add(GzipFrameDelegate.BeginInvoke(new VideoCopy(source), null, null));
 }
Ejemplo n.º 16
0
        /// <summary>
        /// Initializes a new instance of the <see cref="VideosViewModel"/> class.
        /// </summary>
        public VideosViewModel(IVideoProvider videoProvider)
        {
            VideoProvider = videoProvider;

            PlaySelectedVideo = new Command<object, object>(OnPlaySelectedVideoExecute, OnPlaySelectedVideoCanExecute);
            StopPlayingVideo = new Command<object>(OnStopPlayingVideoExecute);
            Refresh = new Command<object, object>(OnRefreshExecute, OnRefreshCanExecute);
            ViewInSoftware = new Command<object, object>(OnViewInSoftwareExecute, OnViewInSoftwareCanExecute);
        }
Ejemplo n.º 17
0
 public void AddFrame(IVideoProvider source)
 {
     using (var bb = new BitmapBuffer(source.BufferWidth, source.BufferHeight, source.GetVideoBuffer()))
     {
         string subpath = GetAndCreatePathForFrameNum(mCurrFrame);
         string path    = subpath + ".png";
         bb.ToSysdrawingBitmap().Save(path, System.Drawing.Imaging.ImageFormat.Png);
     }
 }
 /// <exception cref="Exception">worker thrread died</exception>
 public void AddFrame(IVideoProvider source)
 {
     while (!_threadQ.TryAdd(new VideoCopy(source), 1000))
     {
         if (!_workerT.IsAlive)
         {
             throw new Exception("AVI Worker thread died!");
         }
     }
 }
Ejemplo n.º 19
0
 public EmuClientApi(Action <string> logCallback, IMainFormForApi mainForm, DisplayManagerBase displayManager, Config config, IEmulator emulator, IGameInfo game)
 {
     _config         = config;
     _displayManager = displayManager;
     Emulator        = emulator;
     Game            = game;
     _logCallback    = logCallback;
     _mainForm       = mainForm;
     VideoProvider   = Emulator.AsVideoProviderOrDefault();
 }
        public IwaraViewerViewModel(IDownloadProvider downloadProvider, IVideoProvider videoProvider)
        {
            ChangeAddressCommand = new RelayCommand <string>(ChangeAddress);
            DownloadCommand      = new RelayCommand <Video>(DownloadVideo);

            WebViewAdress = "https://ecchi.iwara.tv/";

            DownloadProvider = downloadProvider;
            _VideoProvider   = videoProvider;
        }
Ejemplo n.º 21
0
 void AddFrameEx(IVideoProvider source)
 {
     SetVideoParameters(source.BufferWidth, source.BufferHeight);
     ConsiderLengthSegment();
     if (currSegment == null)
     {
         Segment();
     }
     currSegment.AddFrame(source);
 }
Ejemplo n.º 22
0
        private XmlNode SaveSingleCaptureObject(HardwareType hwType, IVideoProvider item, XmlDocument document)
        {
            if (item == null)
            {
                return(null);
            }
            XmlNode cap = document.CreateElement(hwType.ToString());

            Serialize.XmlSerialize(item, cap);
            return(cap);
        }
 public VideoCopy(IVideoProvider c)
 {
     _vb              = (int[])c.GetVideoBuffer().Clone();
     BufferWidth      = c.BufferWidth;
     BufferHeight     = c.BufferHeight;
     BackgroundColor  = c.BackgroundColor;
     VirtualWidth     = c.VirtualWidth;
     VirtualHeight    = c.VirtualHeight;
     VsyncNumerator   = c.VsyncNumerator;
     VsyncDenominator = c.VsyncDenominator;
 }
Ejemplo n.º 24
0
        /// <summary>
        /// Sets the frame buffer to the given frame buffer
        /// Note: This sets the value returned by <see cref="IVideoProvider.GetVideoBuffer" />
        /// which relies on the core to send a reference to the frame buffer instead of a copy,
        /// in order to work
        /// </summary>
        public static void PopulateFromBuffer(this IVideoProvider videoProvider, int[] frameBuffer)
        {
            var b1  = frameBuffer;
            var b2  = videoProvider.GetVideoBuffer();
            int len = Math.Min(b1.Length, b2.Length);

            for (int i = 0; i < len; i++)
            {
                b2[i] = b1[i];
            }
        }
Ejemplo n.º 25
0
 public GameImpl(IEmulatorWindow window, IVideoProvider videoProvider, IUiRepository repository,
                 ILoggerFactory loggerFactory, ITextRecognition recognition, ISettings settings)
 {
     _window        = window;
     _videoProvider = videoProvider;
     _repository    = repository;
     _loggerFactory = loggerFactory;
     _recognition   = recognition;
     _settings      = settings;
     _logger        = _loggerFactory.CreateLogger(GetType());
 }
Ejemplo n.º 26
0
        /// <summary>
        /// This will receive an emulated output frame from an IVideoProvider and run it through the complete frame processing pipeline
        /// Then it will stuff it into the bound PresentationPanel.
        /// ---
        /// If the int[] is size=1, then it contains an openGL texture ID (and the size should be as specified from videoProvider)
        /// Don't worry about the case where the frontend isnt using opengl; it isnt supported yet, and it will be my responsibility to deal with anyway
        /// </summary>
        public void UpdateSource(IVideoProvider videoProvider)
        {
            var job = new JobInfo
            {
                videoProvider = videoProvider,
                simulate      = false,
                chain_outsize = GraphicsControl.Size,
                includeOSD    = true
            };

            UpdateSourceInternal(job);
        }
Ejemplo n.º 27
0
        /// <summary>
        /// Raise when a rom is successfully Loaded
        /// </summary>
        public static void OnRomLoaded(IEmulator emu)
        {
            Emulator      = emu;
            VideoProvider = emu.AsVideoProviderOrDefault();
            RomLoaded?.Invoke(null, EventArgs.Empty);

            _allJoyPads = new List <Joypad>(RunningSystem.MaxControllers);
            for (int i = 1; i <= RunningSystem.MaxControllers; i++)
            {
                _allJoyPads.Add(new Joypad(RunningSystem, i));
            }
        }
Ejemplo n.º 28
0
		/// <summary>
		/// This will receive an emulated output frame from an IVideoProvider and run it through the complete frame processing pipeline
		/// Then it will stuff it into the bound PresentationPanel.
		/// ---
		/// If the int[] is size=1, then it contains an openGL texture ID (and the size should be as specified from videoProvider)
		/// Don't worry about the case where the frontend isnt using opengl; DisplayManager deals with it
		/// </summary>
		public void UpdateSource(IVideoProvider videoProvider)
		{
			bool displayNothing = Global.Config.DispSpeedupFeatures == 0;
			var job = new JobInfo
			{
				VideoProvider = videoProvider,
				Simulate = displayNothing,
				ChainOutsize = GraphicsControl.Size,
				IncludeOSD = true,
				IncludeUserFilters = true
			};
			UpdateSourceInternal(job);
		}
Ejemplo n.º 29
0
        /// <summary>
        /// This will receive an emulated output frame from an IVideoProvider and run it through the complete frame processing pipeline
        /// Then it will stuff it into the bound PresentationPanel.
        /// ---
        /// If the int[] is size=1, then it contains an openGL texture ID (and the size should be as specified from videoProvider)
        /// Don't worry about the case where the frontend isnt using opengl; DisplayManager deals with it
        /// </summary>
        public void UpdateSource(IVideoProvider videoProvider)
        {
            bool displayNothing = Global.Config.DispSpeedupFeatures == 0;
            var  job            = new JobInfo
            {
                videoProvider = videoProvider,
                simulate      = displayNothing,
                chain_outsize = GraphicsControl.Size,
                includeOSD    = true,
            };

            UpdateSourceInternal(job);
        }
Ejemplo n.º 30
0
        public static unsafe bool Load(IVideoProvider v, Stream s)
        {
            var bf = BITMAPFILEHEADER.FromStream(s);
            var bi = BITMAPINFOHEADER.FromStream(s);

            if (bf.bfType != 0x4d42 ||
                bf.bfOffBits != bf.bfSize + bi.biSize ||
                bi.biPlanes != 1 ||
                bi.biBitCount != 32 ||
                bi.biCompression != BitmapCompressionMode.BI_RGB)
            {
                return(false);
            }

            int inW = bi.biWidth;
            int inH = bi.biHeight;

            byte[] src = new byte[inW * inH * 4];
            s.Read(src, 0, src.Length);
            if (v is LoadedBMP)
            {
                var l = v as LoadedBMP;
                l.BufferWidth  = inW;
                l.BufferHeight = inH;
                l.VideoBuffer  = new int[inW * inH];
            }

            int[] dst = v.GetVideoBuffer();

            fixed(byte *srcp = src)
            fixed(int *dstp = dst)
            {
                using (new SimpleTime("Blit"))
                {
                    Blit(new BMP
                    {
                        Data   = (int *)srcp,
                        Width  = inW,
                        Height = inH
                    },
                         new BMP
                    {
                        Data   = dstp,
                        Width  = v.BufferWidth,
                        Height = v.BufferHeight,
                    });
                }
            }

            return(true);
        }
Ejemplo n.º 31
0
        /// <summary>
        /// Does the entire display process to an offscreen buffer, suitable for a 'client' screenshot.
        /// </summary>
        public BitmapBuffer RenderOffscreen(IVideoProvider videoProvider, bool includeOSD)
        {
            var job = new JobInfo
            {
                videoProvider = videoProvider,
                simulate      = false,
                chain_outsize = GraphicsControl.Size,
                offscreen     = true,
                includeOSD    = includeOSD
            };

            UpdateSourceInternal(job);
            return(job.offscreenBB);
        }
Ejemplo n.º 32
0
        public EmulationState(Emulator emulator, Ppu ppu, Interrupts interrupts, Memory memory, Dma dma, Apu apu, Cpu cpu, Input input, IVideoProvider videoProvider)
        {
            this.emulator = emulator;

            this.ppu        = ppu;
            this.interrupts = interrupts;
            this.memory     = memory;
            this.dma        = dma;
            this.apu        = apu;
            this.cpu        = cpu;
            this.input      = input;

            this.view = videoProvider;
        }
Ejemplo n.º 33
0
		public void AddFrame(IVideoProvider source)
		{
			string ext = Path.GetExtension(BaseName);
			string name = Path.GetFileNameWithoutExtension(BaseName) + "_" + Frame.ToString();
			name += ext;
			name = Path.Combine(Path.GetDirectoryName(BaseName), name);
			BizHawk.Bizware.BizwareGL.BitmapBuffer bb = new Bizware.BizwareGL.BitmapBuffer(source.BufferWidth, source.BufferHeight, source.GetVideoBuffer());
			using (var bmp = bb.ToSysdrawingBitmap())
			{
				if (ext.ToUpper() == ".PNG")
					bmp.Save(name, System.Drawing.Imaging.ImageFormat.Png);
				else if (ext.ToUpper() == ".JPG")
					bmp.Save(name, System.Drawing.Imaging.ImageFormat.Jpeg);
			}
			Frame++;
		}
Ejemplo n.º 34
0
		private long _soundRemainder; // audio timekeeping for video dumping

		public void DumpAV(IVideoProvider v, ISoundProvider s, out short[] samples, out int samplesprovided)
		{
			if (!aset || !vset)
				throw new InvalidOperationException("Must set params first!");

			long nsampnum = samplerate * (long)fpsden + _soundRemainder;
			long nsamp = nsampnum / fpsnum;

			// exactly remember fractional parts of an audio sample
			_soundRemainder = nsampnum % fpsnum;

			samples = new short[nsamp * channels];
			s.GetSamples(samples);
			samplesprovided = (int)nsamp;

			w.AddFrame(v);
			w.AddSamples(samples);
		}
Ejemplo n.º 35
0
		public void DumpAV(IVideoProvider v, ISyncSoundProvider s, out short[] samples, out int samplesprovided)
		{
			VerifyParams();
			s.GetSamples(out samples, out samplesprovided);
			exaudio_num += samplesprovided * (long)fpsnum;

			// todo: scan for duplicate frames (ie, video content exactly matches previous frame) and for them, skip the threshone step
			// this is a good idea, but expensive on time.  is it worth it?

			if (exaudio_num >= threshone)
			{
				// add frame once
				w.AddFrame(v);
				exaudio_num -= threshtotal;
			}
			else
			{
				Console.WriteLine("Dropped Frame!");
			}
			while (exaudio_num >= threshmore)
			{
				// add frame again!
				w.AddFrame(v);
				exaudio_num -= threshtotal;
				Console.WriteLine("Dupped Frame!");
			}

			// a bit of hackey due to the fact that this api can't read a
			// usable buffer length separately from the actual length of the buffer
			if (samples.Length == samplesprovided * channels)
			{
				w.AddSamples(samples);
			}
			else
			{
				if (_samples.Length != samplesprovided * channels)
					_samples = new short[samplesprovided * channels];

				Buffer.BlockCopy(samples, 0, _samples, 0, samplesprovided * channels * sizeof(short));
				w.AddSamples(_samples);
			}
		}
Ejemplo n.º 36
0
        public void AddFrame(IVideoProvider source)
        {
            if (source.BufferWidth != width || source.BufferHeight != height)
                SetVideoParameters(source.BufferWidth, source.BufferHeight);

            if (ffmpeg.HasExited)
                throw new Exception("unexpected ffmpeg death:\n" + ffmpeg_geterror());

            var video = source.GetVideoBuffer();
            try
            {
                muxer.WriteVideoFrame(video);
            }
            catch
            {
                System.Windows.Forms.MessageBox.Show("Exception! ffmpeg history:\n" + ffmpeg_geterror());
                throw;
            }

            // have to do binary write!
            //ffmpeg.StandardInput.BaseStream.Write(b, 0, b.Length);
        }
Ejemplo n.º 37
0
        public void AddFrame(IVideoProvider source)
        {
            if (skipindex == token.frameskip)
                skipindex = 0;
            else
            {
                skipindex++;
                return; // skip this frame
            }

            using (var bmp = new Bitmap(source.BufferWidth, source.BufferHeight, System.Drawing.Imaging.PixelFormat.Format32bppArgb))
            {
                var data = bmp.LockBits(new Rectangle(0, 0, bmp.Width, bmp.Height), System.Drawing.Imaging.ImageLockMode.WriteOnly, System.Drawing.Imaging.PixelFormat.Format32bppArgb);
                System.Runtime.InteropServices.Marshal.Copy(source.GetVideoBuffer(), 0, data.Scan0, bmp.Width * bmp.Height);
                bmp.UnlockBits(data);

                using (var qBmp = new OctreeQuantizer(255, 8).Quantize(bmp))
                {
                    MemoryStream ms = new MemoryStream();
                    qBmp.Save(ms, System.Drawing.Imaging.ImageFormat.Gif);
                    byte[] b = ms.GetBuffer();
                    if (!firstdone)
                    {
                        firstdone = true;
                        b[10] = (byte)(b[10] & 0x78); // no global color table
                        f.Write(b, 0, 13);
                        f.Write(GifAnimation, 0, GifAnimation.Length);
                    }
                    b[785] = Delay[0];
                    b[786] = Delay[1];
                    b[798] = (byte)(b[798] | 0x87);
                    f.Write(b, 781, 18);
                    f.Write(b, 13, 768);
                    f.Write(b, 799, (int)(ms.Length - 800));

                    lastbyte = b[ms.Length - 1];
                }
            }
        }
Ejemplo n.º 38
0
		/// <summary>
		/// adds a frame to the stream
		/// </summary>
		public void AddFrame(IVideoProvider source)
		{
			if (!workerT.IsAlive)
				// signal some sort of error?
				return;
			threadQ.Add(GzipFrameDelegate.BeginInvoke(new VideoCopy(source), null, null));
		}
Ejemplo n.º 39
0
        public BitmapBuffer RenderVideoProvider(IVideoProvider videoProvider)
        {
            //TODO - we might need to gather more Global.Config.DispXXX properties here, so they can be overridden
            var targetSize = new Size(videoProvider.BufferWidth, videoProvider.BufferHeight);
            var padding = CalculateCompleteContentPadding(true,true);
            targetSize.Width += padding.Horizontal;
            targetSize.Height += padding.Vertical;

            var job = new JobInfo
            {
                videoProvider = videoProvider,
                simulate = false,
                chain_outsize = targetSize,
                offscreen = true,
                includeOSD = false
            };
            UpdateSourceInternal(job);
            return job.offscreenBB;
        }
Ejemplo n.º 40
0
 public static void SetupVideoRenderer(IVideoProvider video)
 {
     SetupVideoRenderer(video, false, IntPtr.Zero, 0, 0);
 }
Ejemplo n.º 41
0
			public VideoCopy(IVideoProvider c)
			{
				int[] vb = c.GetVideoBuffer();
				VideoBuffer = new byte[vb.Length * sizeof(int)];
				// we have to switch RGB ordering here
				for (int i = 0; i < vb.Length; i++)
				{
					VideoBuffer[i * 4 + 0] = (byte)(vb[i] >> 16);
					VideoBuffer[i * 4 + 1] = (byte)(vb[i] >> 8);
					VideoBuffer[i * 4 + 2] = (byte)(vb[i] & 255);
					VideoBuffer[i * 4 + 3] = 0;
				}
				//Buffer.BlockCopy(vb, 0, VideoBuffer, 0, VideoBuffer.Length);
				BufferWidth = c.BufferWidth;
				BufferHeight = c.BufferHeight;
			}
Ejemplo n.º 42
0
		public BitmapBuffer RenderOffscreen(IVideoProvider videoProvider, bool includeOSD)
		{
			var job = new JobInfo
			{
				videoProvider = videoProvider,
				simulate = false,
				chain_outsize = GraphicsControl.Size,
				offscreen = true,
				includeOSD = includeOSD
			};
			UpdateSourceInternal(job);
			return job.offscreenBB;
		}		
Ejemplo n.º 43
0
 public static void SetupVideoRenderer(IVideoProvider video, bool pointerMode, IntPtr scan0, int scanStart, int scanSize)
 {
     videoOut = video;
     if (pointerMode)
     {
         screenPointerMode = true;
         screenPointer = (int*)scan0;
         screenPointerSize = scanSize;
         screenPointerStart = scanStart;
     }
     else
     {
         screenPointerMode = false;
         screenPointer = null;
         screenPointerSize = 0;
         screenPointerStart = 0;
     }
 }
Ejemplo n.º 44
0
			public VideoCopy(IVideoProvider c)
			{
				vb = (int[])c.GetVideoBuffer().Clone();
				BufferWidth = c.BufferWidth;
				BufferHeight= c.BufferHeight;
				BackgroundColor = c.BackgroundColor;
				VirtualWidth = c.VirtualWidth;
				VirtualHeight = c.VirtualHeight;
			}
Ejemplo n.º 45
0
		public BitmapBuffer RenderVideoProvider(IVideoProvider videoProvider)
		{
			var job = new JobInfo
			{
				videoProvider = videoProvider,
				simulate = false,
				chain_outsize = new Size(videoProvider.BufferWidth, videoProvider.BufferHeight),
				offscreen = true,
				includeOSD = false
			};
			UpdateSourceInternal(job);
			return job.offscreenBB;
		}
Ejemplo n.º 46
0
		void AddFrameEx(IVideoProvider source)
		{
			SetVideoParameters(source.BufferWidth, source.BufferHeight);
			ConsiderLengthSegment();
			if (currSegment == null) Segment();
			currSegment.AddFrame(source);
		}
Ejemplo n.º 47
0
			public unsafe void AddFrame(IVideoProvider source)
			{
				if (parameters.width != source.BufferWidth
					|| parameters.height != source.BufferHeight)
					throw new InvalidOperationException("video buffer changed between start and now");

				int todo = source.BufferHeight * source.BufferWidth;
				int w = source.BufferWidth;
				int h = source.BufferHeight;

				if (!bit32)
				{
					IntPtr buf = GetStaticGlobalBuf(todo * 3);

					int[] buffer = source.GetVideoBuffer();
					fixed (int* buffer_ptr = &buffer[0])
					{
						byte* bytes_ptr = (byte*)buf.ToPointer();
						{
							byte* bp = bytes_ptr;

							for (int idx = w * h - w, y = 0; y < h; y++)
							{
								for (int x = 0; x < w; x++, idx++)
								{
									int r = (buffer[idx] >> 0) & 0xFF;
									int g = (buffer[idx] >> 8) & 0xFF;
									int b = (buffer[idx] >> 16) & 0xFF;
									*bp++ = (byte)r;
									*bp++ = (byte)g;
									*bp++ = (byte)b;
								}
								idx -= w * 2;
							}

							int bytes_written;
							int ret = Win32.AVIStreamWrite(pAviCompressedVideoStream, outStatus.video_frames, 1, new IntPtr(bytes_ptr), todo * 3, Win32.AVIIF_KEYFRAME, IntPtr.Zero, out bytes_written);
							outStatus.video_bytes += bytes_written;
							outStatus.video_frames++;
						}
					}
				}
				else // 32 bit
				{
					IntPtr buf = GetStaticGlobalBuf(todo * 4);
					int[] buffer = source.GetVideoBuffer();
					fixed (int* buffer_ptr = &buffer[0])
					{
						byte* bytes_ptr = (byte*)buf.ToPointer();
						{
							byte* bp = bytes_ptr;

							for (int idx = w * h - w, y = 0; y < h; y++)
							{
								for (int x = 0; x < w; x++, idx++)
								{
									int r = (buffer[idx] >> 0) & 0xFF;
									int g = (buffer[idx] >> 8) & 0xFF;
									int b = (buffer[idx] >> 16) & 0xFF;
									*bp++ = (byte)r;
									*bp++ = (byte)g;
									*bp++ = (byte)b;
									*bp++ = 0;
								}
								idx -= w * 2;
							}

							int bytes_written;
							int ret = Win32.AVIStreamWrite(pAviCompressedVideoStream, outStatus.video_frames, 1, new IntPtr(bytes_ptr), todo * 3, Win32.AVIIF_KEYFRAME, IntPtr.Zero, out bytes_written);
							outStatus.video_bytes += bytes_written;
							outStatus.video_frames++;
						}
					}
				}
			}
Ejemplo n.º 48
0
		public void AddFrame(IVideoProvider source)
		{
			while (!threadQ.TryAdd(new VideoCopy(source), 1000))
			{
				if (!workerT.IsAlive)
					throw new Exception("AVI Worker thread died!");
			}
		}
Ejemplo n.º 49
0
		public void AddFrame(IVideoProvider source)
		{
			w.AddFrame(source);
		}
Ejemplo n.º 50
0
		/// <summary>
		/// This will receive an emulated output frame from an IVideoProvider and run it through the complete frame processing pipeline
		/// Then it will stuff it into the bound PresentationPanel.
		/// ---
		/// If the int[] is size=1, then it contains an openGL texture ID (and the size should be as specified from videoProvider)
		/// Don't worry about the case where the frontend isnt using opengl; it isnt supported yet, and it will be my responsibility to deal with anyway
		/// </summary>
		public void UpdateSource(IVideoProvider videoProvider)
		{
			bool displayNothing = Global.Config.DispSpeedupFeatures == 0;
			var job = new JobInfo
			{
				videoProvider = videoProvider,
				simulate = displayNothing,
				chain_outsize = GraphicsControl.Size,
				includeOSD = true,
				
			};
			UpdateSourceInternal(job);
		}
Ejemplo n.º 51
0
		public void AddFrame(IVideoProvider source)
		{
			if (source.BufferHeight != height || source.BufferWidth != width)
				SetVideoParameters(source.BufferWidth, source.BufferHeight);
			current.WriteVideoFrame(source.GetVideoBuffer());
		}
Ejemplo n.º 52
0
		public new virtual void AddFrame(IVideoProvider source)
		{
			throw new InvalidOperationException("Must call AddAV()!");
		}
Ejemplo n.º 53
0
 public MovieDataAccess()
 {
     movieInfo = new ImdbMovieInformation();
     videoProvider = new YoutubeVideoProvider();
 }
Ejemplo n.º 54
0
		/// <summary>
		/// Attempts to calculate a good client size with the given zoom factor, considering the user's DisplayManager preferences
		/// </summary>
		public Size CalculateClientSize(IVideoProvider videoProvider, int zoom)
		{
			int bufferWidth = videoProvider.BufferWidth;
			int bufferHeight = videoProvider.BufferHeight;
			int virtualWidth = videoProvider.VirtualWidth;
			int virtualHeight = videoProvider.VirtualHeight;

			//test
			//Console.WriteLine("DISPZOOM " + zoom);

			//old stuff
			var fvp = new FakeVideoProvider();
			fvp.BufferWidth = bufferWidth;
			fvp.BufferHeight = bufferHeight;
			fvp.VirtualWidth = virtualWidth;
			fvp.VirtualHeight = virtualHeight;

			Size chain_outsize = new Size(fvp.BufferWidth * zoom, fvp.BufferHeight * zoom);

			bool ar_active = Global.Config.DispFixAspectRatio;
			bool ar_system = Global.Config.DispObeyAR;
			bool ar_unity = !ar_system;
			bool ar_integer = Global.Config.DispFixScaleInteger;

			if (ar_active)
			{
				if (ar_system)
				{
					if (ar_integer)
					{
						Vector2 VS = new Vector2(virtualWidth, virtualHeight);
						Vector2 BS = new Vector2(bufferWidth, bufferHeight);
						Vector2 AR = Vector2.Divide(VS, BS);
						float target_par = (AR.X / AR.Y);
						Vector2 PS = new Vector2(1, 1); //this would malfunction for AR <= 0.5 or AR >= 2.0

						//here's how we define zooming, in this case:
						//make sure each step is an increment of zoom for at least one of the dimensions (or maybe both of them)
						//look for the increment which helps the AR the best
						//TODO - this cant possibly support scale factors like 1.5x
						//TODO - also, this might be messing up zooms and stuff, we might need to run this on the output size of the filter chain
						for (int i = 1; i < zoom;i++)
						{
							//would not be good to run this per frame, but it seems to only run when the resolution changes, etc.
							Vector2[] trials = new [] {
								PS + new Vector2(1, 0),
								PS + new Vector2(0, 1),
								PS + new Vector2(1, 1)
							};
							int bestIndex = -1;
							float bestValue = 1000.0f;
							for (int t = 0; t < trials.Length; t++)
							{
								//I.
								float test_ar = trials[t].X / trials[t].Y;

								//II.
								//Vector2 calc = Vector2.Multiply(trials[t], VS);
								//float test_ar = calc.X / calc.Y;
								
								//not clear which approach is superior
								float deviation_linear = Math.Abs(test_ar - target_par);
								float deviation_geom = test_ar / target_par;
								if (deviation_geom < 1) deviation_geom = 1.0f / deviation_geom;

								float value = deviation_linear;
								if (value < bestValue)
								{
									bestIndex = t;
									bestValue = value;
								}
							}
							//is it possible to get here without selecting one? doubtful.
							PS = trials[bestIndex];
						}

						chain_outsize = new Size((int)(bufferWidth * PS.X), (int)(bufferHeight * PS.Y));
					}
					else
					{
						//obey the AR, but allow free scaling: just zoom the virtual size
						chain_outsize = new Size(virtualWidth * zoom, virtualHeight * zoom);
					}
				}
				else
				{
					//ar_unity:
					//just choose to zoom the buffer (make no effort to incorporate AR)
					chain_outsize = new Size(bufferWidth * zoom, bufferHeight * zoom);
				}
			}
			else
			{
				//!ar_active:
				//just choose to zoom the buffer (make no effort to incorporate AR)
				chain_outsize = new Size(bufferWidth * zoom, bufferHeight * zoom);
			}

			var job = new JobInfo
			{
				videoProvider = fvp,
				simulate = true,
				chain_outsize = chain_outsize,
			};
			var filterProgram = UpdateSourceInternal(job);

			var size = filterProgram.Filters[filterProgram.Filters.Count - 1].FindOutput().SurfaceFormat.Size;

			Console.WriteLine("Selecting size " + size.ToString());
			return size;
		}
Ejemplo n.º 55
0
		/// <summary>
		/// This will receive an emulated output frame from an IVideoProvider and run it through the complete frame processing pipeline
		/// Then it will stuff it into the bound PresentationPanel.
		/// ---
		/// If the int[] is size=1, then it contains an openGL texture ID (and the size should be as specified from videoProvider)
		/// Don't worry about the case where the frontend isnt using opengl; it isnt supported yet, and it will be my responsibility to deal with anyway
		/// </summary>
		public void UpdateSource(IVideoProvider videoProvider)
		{
			var job = new JobInfo
			{
				videoProvider = videoProvider,
				simulate = false,
				chain_outsize = GraphicsControl.Size,
				includeOSD = true
			};
			UpdateSourceInternal(job);
		}
Ejemplo n.º 56
0
		/// <summary>
		/// Attempts to calculate a good client size with the given zoom factor, considering the user's DisplayManager preferences
		/// TODO - this needs to be redone with a concept different from zoom factor. 
		/// basically, each increment of a 'zoomlike' factor should definitely increase the viewable area somehow, even if it isnt strictly by an entire zoom level.
		/// </summary>
		public Size CalculateClientSize(IVideoProvider videoProvider, int zoom)
		{
			bool ar_active = Global.Config.DispFixAspectRatio;
			bool ar_system = Global.Config.DispManagerAR == Config.EDispManagerAR.System;
			bool ar_custom = Global.Config.DispManagerAR == Config.EDispManagerAR.Custom;
			bool ar_customRatio = Global.Config.DispManagerAR == Config.EDispManagerAR.CustomRatio;
			bool ar_correct = ar_system || ar_custom || ar_customRatio;
			bool ar_unity = !ar_correct;
			bool ar_integer = Global.Config.DispFixScaleInteger;

			int bufferWidth = videoProvider.BufferWidth;
			int bufferHeight = videoProvider.BufferHeight;
			int virtualWidth = videoProvider.VirtualWidth;
			int virtualHeight = videoProvider.VirtualHeight;

			if (ar_custom)
			{
				virtualWidth = Global.Config.DispCustomUserARWidth;
				virtualHeight = Global.Config.DispCustomUserARHeight;
			}
			
			if (ar_customRatio)
			{
				FixRatio(Global.Config.DispCustomUserARX, Global.Config.DispCustomUserARY, videoProvider.BufferWidth, videoProvider.BufferHeight, out virtualWidth, out virtualHeight);
			}

			var padding = CalculateCompleteContentPadding(true, false);
			virtualWidth += padding.Horizontal;
			virtualHeight += padding.Vertical;

			padding = CalculateCompleteContentPadding(true, true);
			bufferWidth += padding.Horizontal;
			bufferHeight += padding.Vertical;

			//Console.WriteLine("DISPZOOM " + zoom); //test

			//old stuff
			var fvp = new FakeVideoProvider();
			fvp.BufferWidth = bufferWidth;
			fvp.BufferHeight = bufferHeight;
			fvp.VirtualWidth = virtualWidth;
			fvp.VirtualHeight = virtualHeight;

			Size chain_outsize = new Size(fvp.BufferWidth * zoom, fvp.BufferHeight * zoom);

			if (ar_active)
			{
				if (ar_correct)
				{
					if (ar_integer)
					{
						Vector2 VS = new Vector2(virtualWidth, virtualHeight);
						Vector2 BS = new Vector2(bufferWidth, bufferHeight);
						Vector2 AR = Vector2.Divide(VS, BS);
						float target_par = (AR.X / AR.Y);

						//this would malfunction for AR <= 0.5 or AR >= 2.0
						//EDIT - in fact, we have AR like that coming from PSX, sometimes, so maybe we should solve this better
						Vector2 PS = new Vector2(1, 1); 

						//here's how we define zooming, in this case:
						//make sure each step is an increment of zoom for at least one of the dimensions (or maybe both of them)
						//look for the increment which helps the AR the best
						//TODO - this cant possibly support scale factors like 1.5x
						//TODO - also, this might be messing up zooms and stuff, we might need to run this on the output size of the filter chain
						for (int i = 1; i < zoom;i++)
						{
							//would not be good to run this per frame, but it seems to only run when the resolution changes, etc.
							Vector2[] trials = new [] {
								PS + new Vector2(1, 0),
								PS + new Vector2(0, 1),
								PS + new Vector2(1, 1)
							};
							int bestIndex = -1;
							float bestValue = 1000.0f;
							for (int t = 0; t < trials.Length; t++)
							{
								//I.
								float test_ar = trials[t].X / trials[t].Y;

								//II.
								//Vector2 calc = Vector2.Multiply(trials[t], VS);
								//float test_ar = calc.X / calc.Y;
								
								//not clear which approach is superior
								float deviation_linear = Math.Abs(test_ar - target_par);
								float deviation_geom = test_ar / target_par;
								if (deviation_geom < 1) deviation_geom = 1.0f / deviation_geom;

								float value = deviation_linear;
								if (value < bestValue)
								{
									bestIndex = t;
									bestValue = value;
								}
							}
							//is it possible to get here without selecting one? doubtful.
							//EDIT: YES IT IS. it happened with an 0,0 buffer size. of course, that was a mistake, but we shouldnt crash
							if(bestIndex != -1) //so, what now? well, this will result in 0,0 getting picked, so thats probably all we can do
								PS = trials[bestIndex];
						}

						chain_outsize = new Size((int)(bufferWidth * PS.X), (int)(bufferHeight * PS.Y));
					}
					else
					{
						//obey the AR, but allow free scaling: just zoom the virtual size
						chain_outsize = new Size(virtualWidth * zoom, virtualHeight * zoom);
					}
				}
				else
				{
					//ar_unity:
					//just choose to zoom the buffer (make no effort to incorporate AR)
					chain_outsize = new Size(bufferWidth * zoom, bufferHeight * zoom);
				}
			}
			else
			{
				//!ar_active:
				//just choose to zoom the buffer (make no effort to incorporate AR)
				chain_outsize = new Size(bufferWidth * zoom, bufferHeight * zoom);
			}

			chain_outsize.Width += ClientExtraPadding.Horizontal;
			chain_outsize.Height += ClientExtraPadding.Vertical;

			var job = new JobInfo
			{
				videoProvider = fvp,
				simulate = true,
				chain_outsize = chain_outsize,
			};
			var filterProgram = UpdateSourceInternal(job);

			var size = filterProgram.Filters[filterProgram.Filters.Count - 1].FindOutput().SurfaceFormat.Size;

			return size;
		}