private void CompositionTarget_Rendering(object sender, EventArgs e)
        {
            if (HostVideo == null)
            {
                return;
            }

            IVideoFrame frame = null;
            int         frameTag;

            if (!HostVideo.GetCurrentFrame(out frame, out frameTag))
            {
                return;
            }

            if (frameTag != _lastFrameTag)
            {
                Int32Rect rect = new Int32Rect(0, 0, _bitmap.PixelWidth, _bitmap.PixelHeight);

                _bitmap.Lock();
                _bitmap.WritePixels(rect, frame.Image.Data, frame.Image.Stride * frame.Image.Height, frame.Image.Stride);
                _bitmap.AddDirtyRect(rect);
                _bitmap.Unlock();

                _lastFrameTag = frameTag;
            }
        }
Example #2
0
        public static void Run()
        {
            //ExStart:ExtractVideo
            // The path to the documents directory.
            string dataDir = RunExamples.GetDataDir_Slides_Presentations_Media();

            // Instantiate a Presentation object that represents a presentation file
            Presentation presentation = new Presentation(dataDir + "Video.pptx");

            foreach (ISlide slide in presentation.Slides)
            {
                foreach (IShape shape in presentation.Slides[0].Shapes)
                {
                    if (shape is VideoFrame)
                    {
                        IVideoFrame vf   = shape as IVideoFrame;
                        String      type = vf.EmbeddedVideo.ContentType;
                        int         ss   = type.LastIndexOf('/');
                        type = type.Remove(0, type.LastIndexOf('/') + 1);
                        Byte[] buffer = vf.EmbeddedVideo.BinaryData;
                        using (FileStream stream = new FileStream(dataDir + "NewVideo_out." + type, FileMode.Create, FileAccess.Write, FileShare.Read))
                        {
                            stream.Write(buffer, 0, buffer.Length);
                        }
                    }
                }
            }
            //ExEnd:ExtractVideo
        }
        public static void Run()
        {
            // The path to the documents directory.
            string dataDir = RunExamples.GetDataDir_Shapes();

            // Create directory if it is not already present.
            bool IsExists = System.IO.Directory.Exists(dataDir);

            if (!IsExists)
            {
                System.IO.Directory.CreateDirectory(dataDir);
            }

            // Instantiate PrseetationEx class that represents the PPTX
            using (Presentation pres = new Presentation())
            {
                // Get the first slide
                ISlide sld = pres.Slides[0];

                // Add Video Frame
                IVideoFrame vf = sld.Shapes.AddVideoFrame(50, 150, 300, 150, dataDir + "video1.avi");

                // Set Play Mode and Volume of the Video
                vf.PlayMode = VideoPlayModePreset.Auto;
                vf.Volume   = AudioVolumeMode.Loud;

                //Write the PPTX file to disk
                pres.Save(dataDir + "VideoFrame_out.pptx", SaveFormat.Pptx);
            }
        }
Example #4
0
        private async Task <IVideoFrame> QueryFrameAsync()
        {
            IVideoFrame frame = null;

            switch (_state)
            {
            case State.Stop:
                break;

            case State.Refresh:
            {
                frame  = _currentFrame;
                _state = State.Stop;
            }
            break;

            case State.StopBy:
            {
                do
                {
                    frame = await _videoReader.QueryFrameAsync();
                }while (frame != null && frame.Timestamp < _stopByTargetTime);

                _state            = State.Stop;
                _stopByTargetTime = 0.0;
            }
            break;

            case State.Play:
            {
                double frameInterval = 1000.0 / _videoProperties.FrameRate;

                DateTime currentTime = DateTime.Now;
                if (_playElapsed + (currentTime - _playUpdateTime).TotalMilliseconds < frameInterval)
                {
                    return(null);
                }

                do
                {
                    frame = await _videoReader.QueryFrameAsync();

                    _playElapsed -= frameInterval;
                    currentTime   = DateTime.Now;
                }while (frame != null && _playElapsed + (currentTime - _playUpdateTime).TotalMilliseconds >= frameInterval);

                _playElapsed   += (currentTime - _playUpdateTime).TotalMilliseconds;
                _playUpdateTime = currentTime;

                if (frame == null)
                {
                    _state = State.Stop;
                }
            }
            break;
            }

            return(frame);
        }
Example #5
0
 public override void Shutdown()
 {
     _videoSource.NewFrame -= Device_NewFrame;
     _videoSource.Stop();
     _stopped = true;
     _frame   = null;
     _signal.Set();
 }
Example #6
0
        internal void Deinterlace(IVideoFrame frame, IRenderFrame destination, DeinterlaceModes mode)
        {
            try
            {
                using (var ll = this.GetDrawLock())
                {
                    //   Lock();
                    GL.BindFramebuffer(FramebufferTarget.Framebuffer, (destination as RenderFrame).framebuffer);

                    var err = GL.CheckFramebufferStatus(FramebufferTarget.Framebuffer);

                    GL.Viewport(0, 0, destination.Width, destination.Height);// new Rectangle(this.window.Location,this.window.ClientSize));

                    GL.ClearColor(1, 1, 0, 1);
                    GL.Clear(ClearBufferMask.ColorBufferBit /*| ClearBufferMask.DepthBufferBit | ClearBufferMask.StencilBufferBit*/); // We're not using stencil buffer so why bother with clearing?

                    GL.Disable(EnableCap.DepthTest);
                    GL.Disable(EnableCap.StencilTest);

                    GL.ActiveTexture(TextureUnit.Texture0);
                    GL.BindTexture(TextureTarget.Texture2D, (frame as IOpenGLFrame).Textures[0]);

                    shader pp;
                    switch (mode)
                    {
                    default:
                        throw new NotImplementedException();

                    case DeinterlaceModes.Blend:
                        pp = deinterlaceblendshader;
                        break;

                    case DeinterlaceModes.Split:
                        pp = deinterlacesplitshader;
                        break;
                    }
                    pp.Bind(vertices2);
                    var locvpheight = GL.GetUniformLocation(pp, "vpHeight");
                    GL.Uniform1(locvpheight, destination.Height);


                    GL.DrawArrays(PrimitiveType.Quads, 0, 4);

                    GL.DisableVertexAttribArray(0);
                    GL.BindBuffer(BufferTarget.ArrayBuffer, 0);
                    GL.ActiveTexture(TextureUnit.Texture0);
                    GL.BindTexture(TextureTarget.Texture2D, 0);
                    GL.UseProgram(0);

                    //  vertices3.Bind();
                    GL.BindFramebuffer(FramebufferTarget.Framebuffer, 0);
                }
            }
            catch (Exception e)
            {
                //      Log.LogException(e);
            }
        }
 private void CheckFrameAndThrow(IVideoFrame frame)
 {
     if (frame.Width != Width || frame.Height != Height || frame.Format != StreamFormat)
     {
         throw new FFMpegStreamFormatException(FFMpegExceptionType.Operation, "Video frame is not the same format as created raw video stream\r\n" +
                                               $"Frame format: {frame.Width}x{frame.Height} pix_fmt: {frame.Format}\r\n" +
                                               $"Stream format: {Width}x{Height} pix_fmt: {StreamFormat}");
     }
 }
Example #8
0
 /// <summary>
 /// saves values and allocates directx-buffer
 /// </summary>
 /// <param name="renderer"></param>
 /// <param name="video"></param>
 /// <param name="player"></param>
 public frameinfo(IRenderer renderer, VideoStream video, Player player)
 {
     this._player   = player;
     this._renderer = renderer;
     this._video    = video;
     this._avframe  = IntPtr.Zero;
     this.Frame     = video.AllocateFrame(this.allocfunc, this.lockfunc, this.unlockfunc); // wrapper to lock Buffer for interop
     this.Buffer    = this._renderer.GetFrame();
 }
Example #9
0
        public VideoFrameWrapper(IVideoFrame videoFrame)
        {
            this.videoFrame = videoFrame;

            UniqueFrameId = -1;
            ManualIntegrationRateHint = 0;

            if (!string.IsNullOrEmpty(videoFrame.ImageInfo))
            {
                string[] tokens = videoFrame.ImageInfo.Split(';');

                foreach (string token in tokens)
                {
                    string[] nvpair = token.Split(':');
                    if (nvpair.Length == 2)
                    {
                        if (nvpair[0] == "INT")
                            IntegrationRate = int.Parse(nvpair[1]);

                        if (nvpair[0] == "CTOF")
                            CutOffRatio = float.Parse(nvpair[1]);

                        if (nvpair[0] == "UFID")
                            UniqueFrameId = long.Parse(nvpair[1]);

                        if (nvpair[0] == "SFID")
                            StartExposureFrameNo = long.Parse(nvpair[1]);

                        if (nvpair[0] == "EFID")
                            EndExposureFrameNo = long.Parse(nvpair[1]);

                        if (nvpair[0] == "IFID")
                            IntegratedFrameNo = long.Parse(nvpair[1]);

                        if (nvpair[0] == "DRPD")
                            DroppedFramesSinceLocked = int.Parse(nvpair[1]);

                        if (nvpair[0] == "ACT")
                            PerformedAction = int.Parse(nvpair[1]);

                        if (nvpair[0] == "ACT%")
                            PerformedActionProgress = float.Parse(nvpair[1]);

                        if (nvpair[0] == "ORER")
                            OcrErrorsSinceReset = int.Parse(nvpair[1]);

                        if (nvpair[0] == "USRI")
                            ManualIntegrationRateHint = int.Parse(nvpair[1]);
                    }
                }
            }

            if (UniqueFrameId == -1)
                UniqueFrameId = videoFrame.FrameNumber;
        }
Example #10
0
        internal void Deinterlace(IVideoFrame frame, IRenderFrame destination, DeinterlaceModes mode)
        {
            if (this.device != null)
            {
                switch (mode)
                {
                default:
                    throw new NotImplementedException();

                case DeinterlaceModes.Blend:
                case DeinterlaceModes.Split:
                {
                    var state = this.StartRender(destination, rectangle.Zero);
                    try
                    {
                        /*         var dstrec = new System.Drawing.Rectangle(0, 0, destination.Width, destination.Height);
                         *       var effect = this.effect2;
                         *       var m = Matrix.Scaling(dstrec.Width, dstrec.Height, 1) * Matrix.Translation(dstrec.Left, dstrec.Top, 0);
                         *       var worldViewProj = m * this.CreateViewMatrix(destination.Width, destination.Height);
                         *       int n;
                         *       switch (mode)
                         *       {
                         *           case DeinterlaceModes.Blend:
                         *               {
                         *                   effect.SetValue("worldViewProj", worldViewProj);
                         *                   effect.SetTexture("texture0", (frame as IDirectXFrame).Textures[0]);
                         *                   effect.SetValue("vpHeight", frame.Height);
                         *                   n = 1;
                         *               }
                         *               break;
                         *           case DeinterlaceModes.Split:
                         *               {
                         *                   effect.SetValue("worldViewProj", worldViewProj);
                         *                   effect.SetTexture("texture0", (frame as IDirectXFrame).Textures[0]);
                         *                   effect.SetValue("vpHeight", frame.Height);
                         *                   n = 0;
                         *               }
                         *               break;
                         *           default:
                         *               throw new NotImplementedException();
                         *       }
                         *       this.Paint(
                         *           new System.Drawing.Rectangle(0, 0, destination.Width, destination.Height),
                         *           effect, n);*/
                    }
                    finally
                    {
                        this.EndRender(state);
                    }
                }
                break;
                }
            }
        }
Example #11
0
 public static Mat ToMat(this IVideoFrame frame)
 {
     if (frame is MatVideoFrame ocvFrame)
     {
         return(ocvFrame.NativeFrame);
     }
     else
     {
         unsafe
         {
             fixed(void *spanPtr = frame.GetBuffer())
             {
                 return(new Mat(frame.EncodingProperties.Resolution.Height, frame.EncodingProperties.Resolution.Width, default, new IntPtr(spanPtr)));
Example #12
0
        private static void AddVideoFromYouTube(Presentation pres, string videoId)
        {
            //add videoFrame
            IVideoFrame videoFrame = pres.Slides[0].Shapes.AddVideoFrame(10, 10, 427, 240, "https://www.youtube.com/embed/" + videoId);

            videoFrame.PlayMode = VideoPlayModePreset.Auto;

            //load thumbnail
            using (WebClient client = new WebClient())
            {
                string thumbnailUri = "http://img.youtube.com/vi/" + videoId + "/hqdefault.jpg";
                videoFrame.PictureFormat.Picture.Image = pres.Images.AddImage(client.DownloadData(thumbnailUri));
            }
        }
        public static void Run()
        {
            //ExStart:EmbeddedVideoFrame
            // The path to the documents directory.
            string dataDir    = RunExamples.GetDataDir_Shapes();
            string videoDir   = RunExamples.GetDataDir_Video();
            string resultPath = Path.Combine(RunExamples.OutPath, "VideoFrame_out.pptx");

            // Create directory if it is not already present.
            bool IsExists = System.IO.Directory.Exists(dataDir);

            if (!IsExists)
            {
                System.IO.Directory.CreateDirectory(dataDir);
            }
            // Instantiate Presentation class that represents the PPTX
            using (Presentation pres = new Presentation())
            {
                // Get the first slide
                ISlide sld = pres.Slides[0];

                // Embedd vide inside presentation
                IVideo vid = pres.Videos.AddVideo(new FileStream(videoDir + "Wildlife.mp4", FileMode.Open), LoadingStreamBehavior.ReadStreamAndRelease);

                // Add Video Frame
                IVideoFrame vf = sld.Shapes.AddVideoFrame(50, 150, 300, 350, vid);

                // Set video to Video Frame
                vf.EmbeddedVideo = vid;

                // Set Play Mode and Volume of the Video
                vf.PlayMode = VideoPlayModePreset.Auto;
                vf.Volume   = AudioVolumeMode.Loud;

                // Write the PPTX file to disk
                pres.Save(resultPath, SaveFormat.Pptx);
            }
            //ExEnd:EmbeddedVideoFrame
        }
Example #14
0
        public async void StartBackgroundJob(object sender, DoWorkEventArgs e)
        {
            while (!_backgroundWorker.CancellationPending)
            {
                IVideoFrame frame = await QueryFrameAsync();

                if (frame != null)
                {
                    _currentFrame = frame;
                    IVideoFrame filteredFrame = frame.Clone();

                    if (_videoFilters != null)
                    {
                        foreach (IVideoFilter filter in _videoFilters)
                        {
                            filter.Filter(filteredFrame);
                        }
                    }

                    _currentFilteredFrame = filteredFrame;
                    _currentFrameTag      = _frameTagCounter++;

                    int progress = (int)(256 * frame.Timestamp / _videoProperties.Duration);
                    if (progress != _progress)
                    {
                        _progress = progress;
                        PropertyChanged?.Invoke(this, new PropertyChangedEventArgs(nameof(Progress)));
                    }

                    if (frame.Timestamp != _position)
                    {
                        _position = frame.Timestamp;
                        PropertyChanged?.Invoke(this, new PropertyChangedEventArgs(nameof(Position)));
                    }
                }
            }
        }
Example #15
0
        public static void Run()
        {
            // The path to the documents directory.
            string dataDir = RunExamples.GetDataDir_Shapes();


            // Create directory if it is not already present.
            bool IsExists = System.IO.Directory.Exists(dataDir);

            if (!IsExists)
            {
                System.IO.Directory.CreateDirectory(dataDir);
            }
            //Instantiate Presentation class that represents the PPTX
            using (Presentation pres = new Presentation())
            {
                //Get the first slide
                ISlide sld = pres.Slides[0];

                //Embedd vide inside presentation
                IVideo vid = pres.Videos.AddVideo(new FileStream(dataDir + "Wildlife.wmv", FileMode.Open));

                //Add Video Frame
                IVideoFrame vf = sld.Shapes.AddVideoFrame(50, 150, 300, 350, vid);

                //Set video to Video Frame
                vf.EmbeddedVideo = vid;

                //Set Play Mode and Volume of the Video
                vf.PlayMode = VideoPlayModePreset.Auto;
                vf.Volume   = AudioVolumeMode.Loud;

                //Write the PPTX file to disk
                pres.Save(dataDir + "VideoFrame.pptx", SaveFormat.Pptx);
            }
        }
Example #16
0
 /// <summary>Adds a frame.</summary>
 /// <param name="frame">The frame.</param>
 public void AddFrame(IVideoFrame frame)
 {
     m_Frames.Add(frame);
 }
Example #17
0
 public bool GetCurrentFrame(out IVideoFrame frame, out int frameTag)
 {
     frame    = _currentFilteredFrame;
     frameTag = _currentFrameTag;
     return(_currentFilteredFrame != null);
 }
Example #18
0
        void IRenderer.Present(IVideoFrame src, rectangle dstrec, IntPtr window) // painting on block or rpreview-something with alpha=255
        {
            dstrec = new rectangle(point.Zero, this.viewsize);

            if (islost || device.TestCooperativeLevel() == ResultCode.DeviceLost /*||
                                                                                  * this.lastsize.Width != r.Width || this.lastsize.Height != r.Height*/)
            {
                Reset();
                //      this.lastsize = r.Size;

                islost = false;
            }
            if (src != null)
            {
                this.frame.Set(0, src.Width, src.Height, 0);

                var dr = (this.frame as IDirectXFrame).Textures[0].LockRectangle(0, LockFlags.Discard);

                Debug.Assert(this.frame.Width == src.Width);
                Debug.Assert(this.frame.Height == src.Height);


                using (var lck = this.opentk.GetDrawLock())
                {
                    src.CopyTo(dr.DataPointer, dr.Pitch);
                    //       Marshal.Copy(fill, 0, dr.DataPointer, dr.Pitch * src.Height);
                }
                (this.frame as IDirectXFrame).Textures[0].UnlockRectangle(0);
            }
            else
            {
                this.frame.Set(0, this.renderframe.Width, this.renderframe.Height, 0);

                var dr = (this.frame as IDirectXFrame).Textures[0].LockRectangle(0, LockFlags.Discard);

                Debug.Assert(this.frame.Width == this.renderframe.Width);
                Debug.Assert(this.frame.Height == this.renderframe.Height);

                using (var lck = this.opentk.GetDrawLock())
                {
                    this.renderframe.CopyTo(dr.DataPointer, dr.Pitch);
                    //       Marshal.Copy(fill, 0, dr.DataPointer, dr.Pitch * renderframe.Height);
                }
                (this.frame as IDirectXFrame).Textures[0].UnlockRectangle(0);
            }
            //    IDirectXFrame framesrc = (IDirectXFrame)src;

            /*   device.Viewport = new SharpDX.Mathematics.Interop.RawViewport()
             * {
             *     X = 0,
             *     Y = 0,
             *     Width = this.viewsize.width,
             *     Height = viewsize.height,
             *     MinDepth=0,
             *     MaxDepth=1
             * };*/

            device.Clear(ClearFlags.Target, new SharpDX.Mathematics.Interop.RawColorBGRA(0, 0, 255, 255), 1.0f, 0);

            device.BeginScene();

            device.SetStreamSource(0, vertices2, 0, Utilities.SizeOf <vertex>());
            device.VertexDeclaration = vertexDecl2;

            var m = Matrix.Scaling(dstrec.width, -dstrec.height, 1) * Matrix.Translation(dstrec.x, dstrec.height, 0);

            //   Matrix proj = Matrix.Scaling(1, -1, 1);
            //   m= Matrix.Multiply(m, proj);
            var worldViewProj = m * CreateViewMatrix(this.viewsize.width, this.viewsize.height);

            presenteffect.SetValue("worldViewProj", worldViewProj);
            presenteffect.SetValue("alpha", 1.0f);
            presenteffect.SetTexture("texture0", (this.frame as IDirectXFrame).Textures[0]);

            //       effect.Technique = technique;
            presenteffect.Begin();
            presenteffect.BeginPass(0);

            device.DrawPrimitives(PrimitiveType.TriangleList, 0, 2);

            presenteffect.EndPass();
            presenteffect.End();

            device.EndScene();

            presenteffect.SetTexture("texture0", null);
        }
Example #19
0
        void IRenderer.Present(IVideoFrame renderedframe, rectangle dstrec, IntPtr ctl)
        {
            try
            {
                if (!this.stopevent.WaitOne(0, false))
                {
                    if (renderedframe == null)
                    {
                        //   this.Xwt.SwapBuffers(this.window);
                    }
                    else
                    {
                        if (presentshader == null)
                        {
                            this.vertices3     = new vertices <vertex>(_vertices3);
                            this.presentshader = new shader(shadervertex, shaderfragment, vertices3);
                            // presentshader.Bind(this.vertices3);
                            this.vertices3.define("position", "position");
                            this.vertices3.define("texcoord", "texcoord");
                            GL.UseProgram(this.presentshader);
                            var pos = GL.GetUniformLocation(this.presentshader, "tex");
                            GL.Uniform1(pos, 0);
                        }
                        vertices3.Apply(this.presentshader);
                        var frame = (RenderFrame)renderedframe;

                        GL.Viewport(dstrec.x, dstrec.y, dstrec.width, dstrec.height);// new Rectangle(this.window.Location,this.window.ClientSize));

                        GL.ClearColor(1, 1, 0, 1);
                        GL.Clear(ClearBufferMask.ColorBufferBit); // We're not using stencil buffer so why bother with clearing?

                        GL.Disable(EnableCap.DepthTest);
                        GL.Disable(EnableCap.StencilTest);

                        GL.Disable(EnableCap.Blend);
                        GL.BlendFunc(BlendingFactor.One, BlendingFactor.OneMinusSrcAlpha);

                        GL.BindTexture(TextureTarget.Texture2D, frame.Textures[0]);

                        //  GL.DrawElements(BeginMode.Triangles,6,DrawElementsType.UnsignedInt,0)
                        GL.DrawArrays(PrimitiveType.Triangles, 0, 6);

                        GL.DisableVertexAttribArray(0);
                        GL.Disable(EnableCap.Blend);
                        GL.BindBuffer(BufferTarget.ArrayBuffer, 0);
                        GL.BindTexture(TextureTarget.Texture2D, 0);
                        GL.UseProgram(0);
                        GL.BindVertexArray(0);

                        //    this.Xwt.SwapBuffers(this.window);
                        //       GL.Flush();
                    }
                }
            }
            catch (Exception e)
            {
                //         Log.LogException(e);
            }
            finally
            {
            }
        }
Example #20
0
        private void PaintVideoFrame(IVideoFrame frame, Bitmap bmp)
        {
            bool isEmptyFrame = frame == null;
            if (!isEmptyFrame)
                isEmptyFrame = frame.ImageArray == null;

            if (isEmptyFrame)
            {
                using (Graphics g = Graphics.FromImage(mainForm.picboxVideo.Image))
                {
                    if (bmp == null)
                        g.Clear(Color.Green);
                    else
                        g.DrawImage(bmp, 0, 0);

                    g.Save();
                }

                mainForm.picboxVideo.Invalidate();
                return;
            }

            currentFrameNo = frame.FrameNumber;

            renderedFrameCounter++;

            if (renderedFrameCounter == 20)
            {
                renderedFrameCounter = 0;
                endTicks = DateTime.Now.Ticks;
                if (startTicks != 0)
                {
                    renderFps = 20.0 / new TimeSpan(endTicks - startTicks).TotalSeconds;
                }
                startTicks = DateTime.Now.Ticks;
            }

            using (Graphics g = Graphics.FromImage(mainForm.picboxVideo.Image))
            {
                g.DrawImage(bmp, 0, 0);

                g.Save();
            }

            mainForm.picboxVideo.Invalidate();
            bmp.Dispose();
        }
Example #21
0
 private void Device_NewFrame(object sender, NewFrameEventArgs eventArgs)
 {
     _frame = new VideoFrame(eventArgs.Frame);
     _signal.Set();
 }
Example #22
0
        internal DirectX9Renderer(FrameFactory owner, IXwtRender xwt, Canvas widget, System.Windows.FrameworkElement window, System.Windows.Window main, IRenderOwner renderer, FPS fps, size videosize)
        {
            this.owner      = owner;
            this.xwt        = xwt;
            this.widget     = widget;
            this.window     = window as System.Windows.Controls.Panel;
            this.mainwindow = main;
            this.videosize  = videosize;
            this.renderer   = renderer;

            var w = System.Windows.Window.GetWindow(this.mainwindow);
            var h = new WindowInteropHelper(w);

            this.hwnd = h.Handle;

            /*    mainwindow.Measure(new System.Windows.Size(double.PositiveInfinity, double.PositiveInfinity));
             *  mainwindow.Arrange(new Rect(0, 0, mainwindow.Width, mainwindow.Height));
             *
             *  this.window.Arrange(ew Rect(0, 0, this.window.ActualWidth, this.window.ActualHeight));*/



            //window..CompositionTarget

            //    OpenTKRenderer.usecnt = 1;            //  this.ctlhandle = this.ctl.Handle;
            this.viewsize = new size(Convert.ToInt32(window.ActualWidth), Convert.ToInt32(window.ActualHeight));

            this.window.SizeChanged += Ctl_SizeChanged;

            xwt.CreateForWidgetContext(this, renderer, widget);

            this.opentk = this.owner.opentk.Open(this.owner.opentkxwt, widget, renderer, fps, videosize);

            this.thread = new WorkerThread();
            this.thread.Do(() =>
            {
                //     System.Drawing.Rectangle r = new System.Drawing.Rectangle(System.Drawing.Point.Empty, this.viewsize);// Win32Helper.GetClientRect(ctlhandle);

                //     this.lastsize = new System.Drawing.Size(r.Width, r.Height);

                this.pp = new PresentParameters(this.videosize.width, this.videosize.height);
                pp.DeviceWindowHandle     = this.hwnd;
                pp.EnableAutoDepthStencil = true;
                pp.SwapEffect             = SwapEffect.Copy;
                pp.PresentationInterval   = PresentInterval.Immediate;

                try
                {
                    /*       this.direct3D = new Direct3DEx();
                     *     this.isex = true;
                     *     this.device = new DeviceEx(this.direct3D as Direct3DEx, 0, DeviceType.Hardware, this.hwnd, CreateFlags.Multithreaded | CreateFlags.HardwareVertexProcessing, pp);*/
                }
                catch
                {
                    if (this.direct3D != null)
                    {
                        throw;
                    }
                }
                if (this.direct3D == null)
                {
                    this.direct3D = new Direct3D();
                    this.device   = new Device(this.direct3D, 0, DeviceType.Hardware, this.hwnd, CreateFlags.Multithreaded | CreateFlags.HardwareVertexProcessing, pp);
                }
                this.depthtexture = new Texture(this.device, 4096, 4096, 1, Usage.DepthStencil, Format.D24S8, Pool.Default);
                this.depthsurface = this.depthtexture.GetSurfaceLevel(0);
                this.olddepth     = this.device.DepthStencilSurface;
                this.device.DepthStencilSurface = this.depthsurface;

                //       this.lastsize = r.Size;

                // Compiles the effect
                this.presenteffect = _LoadEffect("render");// Effect.FromFile(device, "render.fx", ShaderFlags.None);
                this.technique     = presenteffect.GetTechnique(0);
                this.effect2       = _LoadEffect("render2");
                this.technique2    = effect2.GetTechnique(0);
                this.effect3       = _LoadEffect("render3");


                // Get the technique

                // Prepare matrices

                // Creates and sets the Vertex Declaration
                this.vertexDecl2 = new VertexDeclaration(device, vertexElems2);
                //    device.SetStreamSource(0, vertices2, 0, Utilities.SizeOf<vertex>());
                //      device.VertexDeclaration = vertexDecl2;

                this.vertices2 = new VertexBuffer(device, Utilities.SizeOf <vertex>() * 6, Usage.WriteOnly, VertexFormat.None, isex ? Pool.Default : Pool.Managed);
                vertices2.Lock(0, 0, LockFlags.None).WriteRange(this.initdata);
                vertices2.Unlock();

                this.indices = new IndexBuffer(device, sizeof(int) * initdata2.Length, Usage.WriteOnly, isex ? Pool.Default : Pool.Managed, false);
                this.indices.Lock(0, 0, LockFlags.None).WriteRange(this.initdata2);
                this.indices.Unlock();

                this.frame = new VideoFrame(this);
                this.frame.Set(opentk.AlphaFormat);
            });

            this._layer = new layer(this);
            this._layer?.rec.Arrange(new Rect(0, 0, this.window.ActualWidth, this.window.ActualHeight));

            this.window.Children.Add((FrameworkElement)_layer);

            //this.initdone.Set();
        }
Example #23
0
 public VideoFrameRecord(IVideoFrame frame, ulong index, ulong timestamp)
 {
     Frame     = frame;
     Index     = index;
     Timestamp = timestamp;
 }
Example #24
0
        public VideoFrameEnvelop(IVideoFrame videoFrame)
        {
            try
            {
                ExposureDuration = videoFrame.ExposureDuration;
            }
            catch(NotSupportedException)
            { }

            try
            {
                ExposureStartTime = videoFrame.ExposureStartTime;
            }
            catch (NotSupportedException)
            { }

            try
            {
                FrameNumber = videoFrame.FrameNumber;
            }
            catch (NotSupportedException)
            { }

            try
            {
                // TODO: This is an extreamly slow and naive implementation
                if (videoFrame.ImageArray is int[,])
                {
                    ImageArrayDimentions = 2;

                    int[,] pixels = (int[,])videoFrame.ImageArray;

                    ImageArrayLength2 = pixels.GetLength(0);
                    ImageArrayLength1 = pixels.GetLength(1);

                    ImageArrayPacked = new byte[ImageArrayLength1 * ImageArrayLength2 * sizeof(int)];

                    int idx = 0;
                    for (int y = 0; y < ImageArrayLength2; y++)
                    {
                        for (int x = 0; x < ImageArrayLength1; x++)
                        {
                            int intVal = pixels[y, x];
                            ImageArrayPacked[idx] = (byte)(intVal & 0xFF);
                            ImageArrayPacked[idx + 1] = (byte)((intVal >> 8) & 0xFF);
                            ImageArrayPacked[idx + 2] = (byte)((intVal >> 16) & 0xFF);
                            ImageArrayPacked[idx + 3] = (byte)((intVal >> 24) & 0xFF);
                            idx += 4;
                        }
                    }
                }

                ImageArray = videoFrame.ImageArray;

            }
            catch (NotSupportedException)
            { }

            try
            {
                ImageInfo = videoFrame.ImageInfo;
            }
            catch (NotSupportedException)
            { }
        }
Example #25
0
 public IsolatedVideoFrame(IVideoFrame videoFrame)
 {
     m_VideoFrame = videoFrame;
 }
Example #26
0
        protected virtual void Dispose(bool disposing)
        {
            Trace.WriteLine("OccuRec: ASCOMServer::VideoFrame::Dispose()");

            m_VideoFrame = null;

            RemotingServices.Disconnect(this);
        }
Example #27
0
        private void DisplayVideoFrames(object state)
        {
            while (running)
            {
                if (videoObject != null &&
                    videoObject.IsConnected &&
                    previewOn)
                {
                    try
                    {
                        IVideoFrame frame = videoObject.LastVideoFrame;

                        if (frame != null &&
                            (frame.FrameNumber == -1 || frame.FrameNumber != lastDisplayedVideoFrameNumber))
                        {
                            lastDisplayedVideoFrameNumber = frame.FrameNumber;

                            Bitmap bmp = null;

                            if (Settings.Default.UsePreviewBitmap)
                            {
                                using (var memStr = new MemoryStream(frame.PreviewBitmap))
                                {
                                    bmp = (Bitmap)Image.FromStream(memStr);
                                }
                            }
                            else if (Settings.Default.UseNativeCode)
                            {
                                cameraImage.SetImageArray(
                                    frame.ImageArray,
                                    imageWidth,
                                    imageHeight,
                                    videoObject.SensorType);

                                byte[] bmpBytes = cameraImage.GetDisplayBitmapBytes();
                                using (MemoryStream memStr = new MemoryStream(bmpBytes))
                                {
                                    bmp = (Bitmap)Image.FromStream(memStr);
                                }
                            }
                            else
                            {
                                Array safeArr = (Array)frame.ImageArray;

                                int[,] pixels;
                                if (safeArr is int[, ])
                                {
                                    pixels = (int[, ])safeArr;
                                }
                                else if (safeArr is int[, , ])
                                {
                                    // R,G,B planes
                                    throw new NotSupportedException();
                                }
                                else
                                {
                                    throw new NotSupportedException("Unsupported pixel format in Managed mode.");
                                }

                                bmp = new Bitmap(imageWidth, imageHeight);
                                BitmapData bmData = bmp.LockBits(new Rectangle(0, 0, bmp.Width, bmp.Height), ImageLockMode.ReadWrite, PixelFormat.Format24bppRgb);
                                try
                                {
                                    unsafe
                                    {
                                        int    stride = bmData.Stride;
                                        IntPtr Scan0  = bmData.Scan0;
                                        byte * p      = (byte *)(void *)Scan0;

                                        int nOffset = stride - bmp.Width * 3;

                                        for (int y = 0; y < bmp.Height; ++y)
                                        {
                                            for (int x = 0; x < bmp.Width; ++x)
                                            {
                                                byte red = (byte)pixels[y, x];
                                                p[0] = red;
                                                p[1] = red;
                                                p[2] = red;

                                                p += 3;
                                            }
                                            p += nOffset;
                                        }
                                    }
                                }
                                finally
                                {
                                    bmp.UnlockBits(bmData);
                                }
                            }

                            Invoke(new PaintVideoFrameDelegate(PaintVideoFrame), new object[] { frame, bmp });
                        }
                    }
                    catch (ObjectDisposedException) { }
                    catch (Exception ex)
                    {
                        Trace.WriteLine(ex);

                        Bitmap errorBmp = new Bitmap(pictureBox.Width, pictureBox.Height);
                        using (Graphics g = Graphics.FromImage(errorBmp))
                        {
                            g.Clear(Color.Tomato);
                            g.DrawString(ex.Message, debugTextFont, Brushes.Black, 10, 10);
                            g.Save();
                        }
                        try
                        {
                            Invoke(new PaintVideoFrameDelegate(PaintVideoFrame), new object[] { null, errorBmp });
                        }
                        catch (InvalidOperationException)
                        {
                            // InvalidOperationException could be thrown when closing down the app i.e. when the form has been already disposed
                        }
                    }
                }

                Thread.Sleep(1);
                Application.DoEvents();
            }
        }
Example #28
0
        private void PaintVideoFrame(IVideoFrame frame, Bitmap bmp)
        {
            bool isEmptyFrame = frame == null;

            if (!isEmptyFrame)
            {
                isEmptyFrame = frame.ImageArray == null;
            }

            if (isEmptyFrame)
            {
                using (Graphics g = Graphics.FromImage(pictureBox.Image))
                {
                    if (bmp == null)
                    {
                        g.Clear(Color.Green);
                    }
                    else
                    {
                        g.DrawImage(bmp, 0, 0);
                    }

                    g.Save();
                }

                pictureBox.Invalidate();
                return;
            }

            currentFrameNo = frame.FrameNumber;
            UpdateState();
            renderedFrameCounter++;

            if (renderedFrameCounter == 20)
            {
                renderedFrameCounter = 0;
                endTicks             = DateTime.Now.Ticks;
                if (startTicks != 0)
                {
                    renderFps = 20.0 / new TimeSpan(endTicks - startTicks).TotalSeconds;
                }
                startTicks = DateTime.Now.Ticks;
            }

            using (Graphics g = Graphics.FromImage(pictureBox.Image))
            {
                g.DrawImage(bmp, 0, 0);

                g.Save();
            }

            pictureBox.Invalidate();
            bmp.Dispose();

            if (framesBeforeUpdatingCameraVideoFormat >= 0)
            {
                framesBeforeUpdatingCameraVideoFormat--;
            }

            if (framesBeforeUpdatingCameraVideoFormat == 0)
            {
                lblVideoFormat.Text = videoObject.CameraVideoFormat;
            }
        }
Example #29
0
 public AstroImage(IVideoFrame videoFrame, int width, int height, uint maxSignalValue = 255)
     : this((int[,])videoFrame.ImageArray, width, height, maxSignalValue)
 {
 }