Esempio n. 1
0
        private bool InitializeBuffers(SharpDX.Direct3D11.Device device)
        {
            // Create the vertex array.
            var vertices = new PositionVertex[VertexCount];

            // Create the index array.
            int[] indices = new int[IndexCount];

            // Load the vertex array and index array with data.
            for (int i = 0; i < VertexCount; i++)
            {
                vertices[i].position = new Vector3(Model[i].x, Model[i].y, Model[i].z);
                indices[i]           = i;
            }

            // Now finally create the vertex buffer.
            // Create the vertex buffer.
            VertexBuffer = SharpDX.Direct3D11.Buffer.Create(device, BindFlags.VertexBuffer, vertices);

            // Create the index buffer.
            IndexBuffer = SharpDX.Direct3D11.Buffer.Create(device, BindFlags.IndexBuffer, indices);

            // Release the arrays now that the buffers have been created and loaded.
            vertices = null;
            indices  = null;
            ReleaseSkyDomeModel();

            return(true);
        }
        public static UnorderedAccessView CreateBufferUAV(SharpDX.Direct3D11.Device device, Buffer buffer, UnorderedAccessViewBufferFlags flags = UnorderedAccessViewBufferFlags.None)
        {
            UnorderedAccessViewDescription uavDesc = new UnorderedAccessViewDescription
            {
                Dimension = UnorderedAccessViewDimension.Buffer,
                Buffer    = new UnorderedAccessViewDescription.BufferResource {
                    FirstElement = 0
                }
            };

            if ((buffer.Description.OptionFlags & ResourceOptionFlags.BufferAllowRawViews) == ResourceOptionFlags.BufferAllowRawViews)
            {
                // A raw buffer requires R32_Typeless
                uavDesc.Format              = Format.R32_Typeless;
                uavDesc.Buffer.Flags        = UnorderedAccessViewBufferFlags.Raw | flags;
                uavDesc.Buffer.ElementCount = buffer.Description.SizeInBytes / 4;
            }
            else if ((buffer.Description.OptionFlags & ResourceOptionFlags.BufferStructured) == ResourceOptionFlags.BufferStructured)
            {
                uavDesc.Format              = Format.Unknown;
                uavDesc.Buffer.Flags        = flags;
                uavDesc.Buffer.ElementCount = buffer.Description.SizeInBytes / buffer.Description.StructureByteStride;
            }
            else
            {
                throw new ArgumentException("Buffer must be raw or structured", "buffer");
            }

            return(new UnorderedAccessView(device, buffer, uavDesc));
        }
Esempio n. 3
0
        void Render(TimeSpan sceneTime)
        {
            try {
                SharpDX.Direct3D11.Device device = this.Device;
                if (device == null)
                {
                    return;
                }

                if (device.IsDisposed)
                {
                    NeedsReloading();
                    return;
                }

                Texture2D renderTarget = this.RenderTarget2;
                if (renderTarget == null)
                {
                    return;
                }

                int targetWidth  = renderTarget.Description.Width;
                int targetHeight = renderTarget.Description.Height;

                device.ImmediateContext.OutputMerger.SetTargets(this.DepthStencilView2, this.RenderTargetView2);
                device.ImmediateContext.Rasterizer.SetViewport(new Viewport(0, 0, targetWidth, targetHeight, 0.0f, 1.0f));

                device.ImmediateContext.ClearRenderTargetView(this.RenderTargetView2, this.ClearColor);
                device.ImmediateContext.ClearDepthStencilView(this.DepthStencilView2, DepthStencilClearFlags.Depth | DepthStencilClearFlags.Stencil, 1.0f, 0);



                if (this.Scene != null)
                {
                    if (!this.SceneAttached)
                    {
                        this.SceneAttached = true;
                        this.RenderScene.Attach(this);
                    }

                    this.Scene.Update(this.RenderTimer.Elapsed);
                    this.Scene.Render();
                }

                device.ImmediateContext.Flush();
            } catch (Exception exc)
            {
                Console.WriteLine("[EXC] " + exc.Message);
                Console.WriteLine("[EXC] " + exc.StackTrace);
                ;
            }
        }
Esempio n. 4
0
        public void Init(SharpDX.Direct3D11.Device device)
        {
            vertexBuffer        = Buffer.Create(device, BindFlags.VertexBuffer, vertices, vertices.Length * ImmediateVertexSize, ResourceUsage.Dynamic, CpuAccessFlags.Write);
            vertexBufferBinding = new VertexBufferBinding()
            {
                Buffer = vertexBuffer, Offset = 0, Stride = ImmediateVertexSize
            };

            var cb = new ImmediateConstantBuffer()
            {
                WVPMatrix   = Matrix.Identity,
                iResolution = Vector4.Zero,
                iTime       = 0,
                CameraPos   = Vector3.Zero,
                CameraDir   = Matrix.Identity,
            };

            worldViewProjectionBuffer = Buffer.Create(device, BindFlags.ConstantBuffer, ref cb);
            this.ConstantBuffer       = cb;

            for (int i = 0; i < textures.Length; ++i)
            {
                textures[i].texture = null;
                textures[i].view    = null;
                textures[i].sampler = new SamplerState(device, new SamplerStateDescription()
                {
                    Filter             = Filter.MinMagMipLinear,
                    AddressU           = TextureAddressMode.Clamp,
                    AddressV           = TextureAddressMode.Clamp,
                    AddressW           = TextureAddressMode.Clamp,
                    BorderColor        = Color.Black,
                    ComparisonFunction = Comparison.Never,
                    MaximumAnisotropy  = 16,
                    MipLodBias         = 0,
                    MinimumLod         = 0,
                    MaximumLod         = float.MaxValue,
                });
            }

            ShaderBank = new ShaderBank(device);
            ShaderBank.LoadShader("raytracer", "raytracer.fx",
                                  new InputElement("POSITION", 0, Format.R32G32B32A32_Float, 0, 0));

            Shader  = ShaderBank["raytracer"];
            Context = device.ImmediateContext;
        }
Esempio n. 5
0
        private static SharpDX.Direct3D11.Texture2D CreateTexture2DFromBitmap(SharpDX.Direct3D11.Device device, SharpDX.DataStream buffer, int width, int height)
        {
            int iStride = width * 4;

            return(new SharpDX.Direct3D11.Texture2D(device, new SharpDX.Direct3D11.Texture2DDescription()
            {
                Width = width,
                Height = height,
                ArraySize = 1,
                BindFlags = SharpDX.Direct3D11.BindFlags.ShaderResource,
                Usage = SharpDX.Direct3D11.ResourceUsage.Immutable,
                CpuAccessFlags = SharpDX.Direct3D11.CpuAccessFlags.None,
                Format = SharpDX.DXGI.Format.R8G8B8A8_UNorm,
                MipLevels = 1,
                OptionFlags = SharpDX.Direct3D11.ResourceOptionFlags.None,
                SampleDescription = new SharpDX.DXGI.SampleDescription(1, 0),
            }, new SharpDX.DataRectangle(buffer.DataPointer, iStride)));
        }
Esempio n. 6
0
        internal ShaderResourceView GetShaderResourceFromFile(SharpDX.Direct3D11.Device device, string imagePath)
        {
            using (var oBitmapSource = LoadBitmap(new SharpDX.WIC.ImagingFactory2(), imagePath))
            {
                //The stride is the number of bytes per "line". This is the width times the number of bytes per pixel, which is 4
                int iStride = oBitmapSource.Size.Width * 4;
                using (var oBuffer = new SharpDX.DataStream(oBitmapSource.Size.Height * iStride, true, true))
                {
                    // Copy the content of the WIC to the buffer
                    oBitmapSource.CopyPixels(iStride, oBuffer);

                    using (var oTexture = CreateTexture2DFromBitmap(device, oBuffer, oBitmapSource.Size.Width, oBitmapSource.Size.Height))
                    {
                        ShaderResourceView oShaderResourceView = new ShaderResourceView(device, oTexture);
                        return(oShaderResourceView);
                    }
                }
            }
        }
        public SC_SharpDX_ScreenCapture(int adapter, int numOutput, SharpDX.Direct3D11.Device device_)
        {
            //_textureByteArray[0] = 0;
            imageptrList      = new IntPtr[num_cols * num_rows];
            _frameCaptureData = new SC_SharpDX_ScreenFrame();

            arrayOfTexture2DFrac = new Texture2D[num_cols * num_rows];

            pastearray    = new int[num_cols * num_rows];
            pastearrayTwo = new int[num_cols * num_rows];

            arrayOfBytesTwo = new byte[_textureDescriptionFinal.Width * _textureDescriptionFinal.Height];

            _lastShaderResourceViewArray = new ShaderResourceView[num_cols * num_rows];
            _ShaderResourceViewArray     = new ShaderResourceView[num_cols * num_rows];


            _numAdapter = adapter;
            _numOutput  = numOutput;


            try
            {
                using (var _factory = new SharpDX.DXGI.Factory1())
                {
                    this._adapter = _factory.GetAdapter1(_numAdapter);
                }
            }
            catch (SharpDXException ex)
            {
                Console.WriteLine(ex.ToString());
                return;
            }

            try
            {
                //this._device = new Device(_adapter);
                //this._device = sccsVD4VE_LightNWithoutVr.SC_Console_DIRECTX._dxDevice.Device;

                this._device = device_;
            }
            catch (SharpDXException ex)
            {
                Console.WriteLine(ex.ToString());
                return;
            }

            try
            {
                //initializeOutput();
                using (var _output = _adapter.GetOutput(_numOutput))
                {
                    // Width/Height of desktop to capture
                    //getDesktopBoundaries();
                    _width  = ((SharpDX.Rectangle)_output.Description.DesktopBounds).Width;
                    _height = ((SharpDX.Rectangle)_output.Description.DesktopBounds).Height;
                    _frameCaptureData.width  = _width;
                    _frameCaptureData.height = _height;
                    this._output1            = _output.QueryInterface <Output1>();
                }
            }
            catch (SharpDXException ex)
            {
                Console.WriteLine(ex.ToString());
                return;
            }

            try
            {
                //duplicateOutput();
                this._outputDuplication = _output1.DuplicateOutput(_device);
            }
            catch (SharpDXException ex)
            {
                Console.WriteLine(ex.ToString());
                return;
            }

            try
            {
                //getTextureDescription();
                this._textureDescription = new Texture2DDescription
                {
                    CpuAccessFlags    = CpuAccessFlags.Read,
                    BindFlags         = BindFlags.None,//BindFlags.None, //| BindFlags.RenderTarget
                    Format            = Format.B8G8R8A8_UNorm,
                    Width             = _width,
                    Height            = _height,
                    OptionFlags       = ResourceOptionFlags.None,
                    MipLevels         = 1,
                    ArraySize         = 1,
                    SampleDescription = { Count = 1, Quality = 0 },
                    Usage             = ResourceUsage.Staging
                };

                this._textureDescriptionFinal = new Texture2DDescription
                {
                    CpuAccessFlags    = CpuAccessFlags.None,
                    BindFlags         = BindFlags.ShaderResource | BindFlags.RenderTarget,
                    Format            = Format.B8G8R8A8_UNorm,
                    Width             = _width,
                    Height            = _height,
                    OptionFlags       = ResourceOptionFlags.GenerateMipMaps,
                    MipLevels         = 1,
                    ArraySize         = 1,
                    SampleDescription = { Count = 1, Quality = 0 },
                    Usage             = ResourceUsage.Default
                };



                wid = _textureDescriptionFinal.Width / num_cols;
                hgt = _textureDescriptionFinal.Height / num_rows;

                /*this._textureDescriptionFinalFrac = new Texture2DDescription
                 * {
                 *  CpuAccessFlags = CpuAccessFlags.None,
                 *  BindFlags = BindFlags.ShaderResource | BindFlags.RenderTarget,
                 *  Format = Format.B8G8R8A8_UNorm,
                 *  Width = wid,
                 *  Height = hgt,
                 *  OptionFlags = ResourceOptionFlags.GenerateMipMaps,
                 *  MipLevels = 1,
                 *  ArraySize = 1,
                 *  SampleDescription = { Count = 1, Quality = 0 },
                 *  Usage = ResourceUsage.Default
                 * };*/

                this._textureDescriptionFinalFrac = new Texture2DDescription
                {
                    CpuAccessFlags    = CpuAccessFlags.Read,
                    BindFlags         = BindFlags.None,//BindFlags.None, //| BindFlags.RenderTarget
                    Format            = Format.B8G8R8A8_UNorm,
                    Width             = wid,
                    Height            = hgt,
                    OptionFlags       = ResourceOptionFlags.None,
                    MipLevels         = 1,
                    ArraySize         = 1,
                    SampleDescription = { Count = 1, Quality = 0 },
                    Usage             = ResourceUsage.Staging
                };



                piece     = new Bitmap(wid, hgt);
                gr        = Graphics.FromImage(piece);
                dest_rect = new System.Drawing.Rectangle(0, 0, wid, hgt);

                strider = wid * 4;

                for (int i = 0; i < arrayOfImage.Length; i++)
                {
                    arrayOfImage[i] = new int[wid * hgt * 4];
                }

                for (int i = 0; i < arrayOfBytes.Length; i++)
                {
                    arrayOfBytes[i] = new byte[wid * hgt * 4];
                }


                piece     = new System.Drawing.Bitmap(wid, hgt);
                dest_rect = new System.Drawing.Rectangle(0, 0, wid, hgt);

                //int num_rows = _textureDescriptionFinal.Height / hgt;
                //int num_cols = _textureDescriptionFinal.Width / wid;
                source_rect = new System.Drawing.Rectangle(0, 0, wid, hgt);


                for (int tex2D = 0; tex2D < 10 * 10; tex2D++)
                {
                    arrayOfTexture2DFrac[tex2D] = new Texture2D(_device, _textureDescriptionFinalFrac);
                }
            }
            catch (SharpDXException ex)
            {
                Console.WriteLine(ex.ToString());
                return;
            }

            _texture2D      = new Texture2D(_device, _textureDescription);
            _texture2DFinal = new Texture2D(_device, _textureDescriptionFinal);

            resourceViewDescription = new ShaderResourceViewDescription
            {
                Format    = _texture2DFinal.Description.Format,
                Dimension = SharpDX.Direct3D.ShaderResourceViewDimension.Texture2D,
                Texture2D = new ShaderResourceViewDescription.Texture2DResource
                {
                    MipLevels       = -1,
                    MostDetailedMip = 0
                }
            };

            _bitmap = new System.Drawing.Bitmap(_width, _height, PixelFormat.Format32bppArgb);
            var boundsRect = new System.Drawing.Rectangle(0, 0, _width, _height);
            var bmpData    = _bitmap.LockBits(boundsRect, ImageLockMode.ReadOnly, _bitmap.PixelFormat);

            _bytesTotal = Math.Abs(bmpData.Stride) * _bitmap.Height;
            _bitmap.UnlockBits(bmpData);
            _textureByteArray = new byte[_bytesTotal];



            /*try
             * {
             *
             * }
             * catch (SharpDXException ex)
             * {
             *  Console.WriteLine(ex.ToString());
             *  return;
             * }*/
        }
Esempio n. 8
0
        // Initialize hardware-dependent resources.
        // Device-independent resources, such as ID2D1Geometry, are kept on the CPU.
        // Device-dependent resources, such as ID2D1RenderTarget and ID2D1LinearGradientBrush, directly map to resources on the GPU 
        // (when hardware acceleration is available). 
        // Rendering calls are performed by combining vertex and coverage information from a geometry with texturing information produced by the device-dependent resources.
        private void CreateDeviceResources()
        {
            // http://msdn.microsoft.com/zh-tw/library/windows/apps/dn481540.aspx

            // Unlike the original C++ sample, we don't have smart pointers so we need to
            // dispose Direct3D objects explicitly
            Utilities.Dispose(ref d3dDevice);
            Utilities.Dispose(ref d2dDevice);
            Utilities.Dispose(ref d2dContext);
            Utilities.Dispose(ref d2dFactory);

#if DEBUG
            var debugLevel = SharpDX.Direct2D1.DebugLevel.Information;
#else
            var debugLevel = SharpDX.Direct2D1.DebugLevel.None;
#endif
            d2dFactory = new SharpDX.Direct2D1.Factory1(SharpDX.Direct2D1.FactoryType.SingleThreaded);

            // This flag adds support for surfaces with a different color channel ordering
            // than the API default. It is required for compatibility with Direct2D.
            var creationFlags = DeviceCreationFlags.BgraSupport;

#if DEBUG
            // If the project is in a debug build, enable debugging via SDK Layers.
            creationFlags |= DeviceCreationFlags.Debug;
#endif

            // This array defines the set of DirectX hardware feature levels this app will support.
            // Note the ordering should be preserved.
            // Don't forget to declare your application's minimum required feature level in its
            // description.  All applications are assumed to support 9.1 unless otherwise stated.
            FeatureLevel[] featureLevels =
            {
                FeatureLevel.Level_11_1,
                FeatureLevel.Level_11_0,
                FeatureLevel.Level_10_1,
                FeatureLevel.Level_10_0,
                FeatureLevel.Level_9_3,
                FeatureLevel.Level_9_2,
                FeatureLevel.Level_9_1,
            };

            // Create the Direct3D 11 API device object.
            d3dDevice = new Device(DriverType.Hardware, creationFlags, featureLevels);

            // Get the Direct3D 11.1 API device.
            // DXGI : DirectX Graphics Infrastructure
            // DXGI 是一組用來設定和管理低階圖形與圖形卡資源的 API
            // 為了直接存取 GPU 並管理其資源,必須有一個對應用程式描述它的方式。
            // 您所需最重要的 GPU 資訊就是繪製像素的位置,這樣它才能夠將這些像素傳送到螢幕上。
            // 這通常稱為「背景緩衝區」—GPU 記憶體中的一個位置,您可以在該處繪製像素,
            // 然後「翻轉」或「交換」,並在收到重新整理訊號時傳送到螢幕上。
            // DXGI 可讓您取得該位置以及使用該緩衝區 (稱為「交換鏈結」,
            // 因為這是可交換的緩衝區鏈結,允許多個緩衝處理策略) 的方法。
            using (var dxgiDevice = d3dDevice.QueryInterface<SharpDX.DXGI.Device>())
            {
                // Create the Direct2D device object and a corresponding context.
                // 是 GPU 資源的虛擬表示法
                // ID3D11Device 包含您不常呼叫的圖形方法,通常是在任何轉譯發生之前呼叫這些方法,用來取得和設定開始繪製像素時所需的一組資源。
                d2dDevice = new SharpDX.Direct2D1.Device(dxgiDevice);

                // 是轉譯管線與處理程序的跨裝置抽象概念
                // ID3D11DeviceContext 則包含您在每個框架呼叫的方法:
                // 在緩衝區與檢視及其他資源中載入、變更輸出合併與轉譯器狀態、管理著色器,
                // 以及繪製將這些資源在狀態與著色器之間傳遞的結果。
                d2dContext = new SharpDX.Direct2D1.DeviceContext(d2dDevice, DeviceContextOptions.EnableMultithreadedOptimizations);

                // Query for ISurfaceImageSourceNative interface.
                using (var sisNative = ComObject.QueryInterface<ISurfaceImageSourceNative>(this))
                    sisNative.Device = dxgiDevice;
            }
            setPrimitiveBlend();
        }
Esempio n. 9
0
 /// <summary>
 /// スワップチェーンが変更された時に呼び出される
 /// </summary>
 /// <param name="device"></param>
 /// <param name="swapChain"></param>
 /// <param name="desc">変更後のスワップチェーン情報</param>
 internal virtual void OnSwapChainResized(SharpDX.Direct3D11.Device device, SwapChain swapChain, SwapChainDescription desc)
 {
 }
Esempio n. 10
0
        /*private void CaptureFrame(object sender, DoWorkEventArgs e)
         * {
         *  //Bitmap bmp = GetFrame();
         *  //e.Result = bmp;
         * }*/

        private void Init()
        {
            captureWorker                     = new BackgroundWorker();
            captureWorker.DoWork             += new DoWorkEventHandler(CaptureFrame);
            captureWorker.RunWorkerCompleted += new RunWorkerCompletedEventHandler(HandleCapturedFrame);

            EncoderWorker                     = new BackgroundWorker();
            EncoderWorker.DoWork             += new DoWorkEventHandler(EncodeFrameTask);
            EncoderWorker.RunWorkerCompleted += new RunWorkerCompletedEventHandler(HandleEncodedFrame);

            DecoderWorker                     = new BackgroundWorker();
            DecoderWorker.DoWork             += new DoWorkEventHandler(DecodeFrameTask);
            DecoderWorker.RunWorkerCompleted += new RunWorkerCompletedEventHandler(HandleDecodedFrame);

            // # of graphics card adapter
            const int numAdapter = 0;

            // # of output device (i.e. monitor)
            const int numOutput = 0;

            // Create DXGI Factory1
            var factory = new SharpDX.DXGI.Factory1();
            var adapter = factory.GetAdapter1(numAdapter);

            // Create device from Adapter
            device = new Device(adapter);

            // Get DXGI.Output
            var output  = adapter.GetOutput(numOutput);
            var output1 = output.QueryInterface <Output1>();

            // Width/Height of desktop to capture
            transcoderOptions.InputWidth  = ((SharpDX.Rectangle)output.Description.DesktopBounds).Width;
            transcoderOptions.InputHeight = ((SharpDX.Rectangle)output.Description.DesktopBounds).Height;

            outBytes = new byte[GetNumBytesRender()];
            GCHandle pinned = GCHandle.Alloc(outBytes, GCHandleType.Pinned);

            outBytesPtr = pinned.AddrOfPinnedObject();
            //pinned.Free();

            GCHandle pinned2 = GCHandle.Alloc(transcoderOptions, GCHandleType.Pinned);

            transcoderOptionsPtr = pinned2.AddrOfPinnedObject();

            GCHandle pinned3 = GCHandle.Alloc(frameBytes, GCHandleType.Pinned);

            frameBytesPtr = pinned3.AddrOfPinnedObject();

            this.ClientSize = new Size(1600, 900);

            // Create Staging texture CPU-accessible
            var textureDesc = new Texture2DDescription
            {
                CpuAccessFlags    = CpuAccessFlags.Read,
                BindFlags         = BindFlags.None,
                Format            = Format.B8G8R8A8_UNorm,
                Width             = DisplayWidth(),
                Height            = DisplayHeight(),
                OptionFlags       = ResourceOptionFlags.None,
                MipLevels         = 1,
                ArraySize         = 1,
                SampleDescription = { Count = 1, Quality = 0 },
                Usage             = ResourceUsage.Staging
            };

            screenTexture = new Texture2D(device, textureDesc);

            // Duplicate the output
            duplicatedOutput = output1.DuplicateOutput(device);

            InitializeDeviceResources();
        }
Esempio n. 11
0
        /// <summary>
        /// deviceを作成します。
        /// </summary>
        /// <param name="control">レンダリング先となるcontrol</param>
        /// <param name="ocu_config">設定</param>
        /// <returns>deviceの作成に成功したか</returns>
        public bool InitializeApplication(Control control, OcuConfig ocu_config)
        {
            this.ocu_config = ocu_config;
            oculus = new OculusWrap.Wrap();

            // Initialize the Oculus runtime.
            bool success = oculus.Initialize();
            if (!success)
            {
            MessageBox.Show("Failed to initialize the Oculus runtime library.", "Uh oh", MessageBoxButtons.OK, MessageBoxIcon.Error);
            return false;
            }

            // Use the head mounted display, if it's available, otherwise use the debug HMD.
            int numberOfHeadMountedDisplays = oculus.Hmd_Detect();
            if (numberOfHeadMountedDisplays > 0)
            hmd = oculus.Hmd_Create(0);
            else
            hmd = oculus.Hmd_CreateDebug(OculusWrap.OVR.HmdType.DK2);

            if (hmd == null)
            {
            MessageBox.Show("Oculus Rift not detected.", "Uh oh", MessageBoxButtons.OK, MessageBoxIcon.Error);
            return false;
            }

            if (hmd.ProductName == string.Empty)
            MessageBox.Show("The HMD is not enabled.", "There's a tear in the Rift", MessageBoxButtons.OK, MessageBoxIcon.Error);

            // Specify which head tracking capabilities to enable.
            hmd.SetEnabledCaps(OculusWrap.OVR.HmdCaps.LowPersistence | OculusWrap.OVR.HmdCaps.DynamicPrediction);

            // Start the sensor which informs of the Rift's pose and motion
            hmd.ConfigureTracking(OculusWrap.OVR.TrackingCaps.ovrTrackingCap_Orientation | OculusWrap.OVR.TrackingCaps.ovrTrackingCap_MagYawCorrection | OculusWrap.OVR.TrackingCaps.ovrTrackingCap_Position, OculusWrap.OVR.TrackingCaps.None);

            // Create DirectX drawing device.
            device = new Device(SharpDX.Direct3D.DriverType.Hardware, DeviceCreationFlags.None);

            ctx = device.ImmediateContext;

            Stopwatch sw = new Stopwatch();
            sw.Start();

            string effect_file = Path.Combine(Application.StartupPath, @"toonshader.fx.bin");
            if (! File.Exists(effect_file))
            {
            Console.WriteLine("File not found: " + effect_file);
            return false;
            }
            try
            {
            var shader_bytecode = ShaderBytecode.FromFile(effect_file);
            effect = new Effect(device, shader_bytecode);
            }
            catch (SharpDX.CompilationException e)
            {
            Console.WriteLine(e.Message + ": " + effect_file);
            return false;
            }

            sw.Stop();
            Console.WriteLine("toonshader.fx.bin read time: " + sw.Elapsed);

            string techmap_file = Path.Combine(Application.StartupPath, @"techmap.txt");
            if (!File.Exists(techmap_file))
            {
            Console.WriteLine("File not found: " + techmap_file);
            return false;
            }
            techmap.Load(techmap_file);

            control.MouseDown += new MouseEventHandler(form_OnMouseDown);

            // Define the properties of the swap chain.
            SwapChainDescription swapChainDescription = DefineSwapChainDescription(control);

            // Create DirectX Graphics Interface factory, used to create the swap chain.
            dxgi_factory = new SharpDX.DXGI.Factory();
            // Create the swap chain.
            swap_chain = new SwapChain(dxgi_factory, device, swapChainDescription);

            // Retrieve the back buffer of the swap chain.
            buf0 = swap_chain.GetBackBuffer<Texture2D>(0);
            buf0_view = new RenderTargetView(device, buf0);

            // Create a depth buffer, using the same width and height as the back buffer.
            Texture2DDescription depthBufferDescription = DefineDepthBufferDescription(control);

            // Create the depth buffer.
            ztex = new Texture2D(device, depthBufferDescription);
            ztex_view = new DepthStencilView(device, ztex);

            ctx.OutputMerger.SetRenderTargets(ztex_view, buf0_view);

            viewport = new Viewport(0, 0, hmd.Resolution.Width, hmd.Resolution.Height, 0.0f, 1.0f);
            ctx.Rasterizer.SetViewport(viewport);

            // Retrieve the DXGI device, in order to set the maximum frame latency.
            using (SharpDX.DXGI.Device1 dxgiDevice = device.QueryInterface<SharpDX.DXGI.Device1>())
            {
            dxgiDevice.MaximumFrameLatency = 1;
            }

            layers = new OculusWrap.Layers();
            layer_eye_fov = layers.AddLayerEyeFov();

            CreateEyeTextures();

            CreateMirrorTexture(control);

            World_variable = effect.GetVariableBySemantic("World").AsMatrix();
            WorldView_variable = effect.GetVariableBySemantic("WorldView").AsMatrix();
            WorldViewProjection_variable = effect.GetVariableBySemantic("WorldViewProjection").AsMatrix();
            /* for HUD */
            Projection_variable = effect.GetVariableBySemantic("Projection").AsMatrix();

            LocalBoneMats_variable = effect.GetVariableByName("LocalBoneMats").AsMatrix();
            LightDirForced_variable = effect.GetVariableByName("LightDirForced").AsVector();
            UVSCR_variable = effect.GetVariableByName("UVSCR").AsVector();

            cb_variable = effect.GetConstantBufferByName("cb");

            ShadeTex_texture_variable = effect.GetVariableByName("ShadeTex_texture").AsShaderResource();
            ColorTex_texture_variable = effect.GetVariableByName("ColorTex_texture").AsShaderResource();

            //figures.Camera = camera;
            figures.TSOFileOpen += delegate(TSOFile tso)
            {
            tso.Open(device, effect);
            techmap.AssignTechniqueIndices(tso);
            };

            // Define an input layout to be passed to the vertex shader.
            var technique = effect.GetTechniqueByIndex(0);
            il = new InputLayout(device, technique.GetPassByIndex(0).Description.Signature, TSOSubMesh.ie);

            // Setup the immediate context to use the shaders and model we defined.
            ctx.InputAssembler.InputLayout = il;

            DefineBlendState();
            DefineDepthStencilState();
            DefineRasterizerState();

            main_camera = new Camera()
            {
            Position = ocu_config.Position,
            Rotation = Quaternion.Identity,
            };

            directInput = new DirectInput();
            keyboard = new Keyboard(directInput);
            keyboard.Acquire();

            keyboardState = keyboard.GetCurrentState();

            return true;
        }
        public RLocalDesktopCapture(int outputDeviceIndex)
        {
            // # of graphics card adapter
            const int numAdapter = 0;

            // # of output device (i.e. monitor)
            int numOutput = outputDeviceIndex;

            // Create DXGI Factory1
            var factory = new SharpDX.DXGI.Factory1();
            var adapter = factory.GetAdapter1(numAdapter);

            // Create device from Adapter
            device = new Device(adapter);

            // Get DXGI.Output
            var output  = adapter.GetOutput(numOutput);
            var output1 = output.QueryInterface <Output1>();

            // Width/Height of desktop to capture
            DisplayWidth  = ((SharpDX.Rectangle)output.Description.DesktopBounds).Width;
            DisplayHeight = ((SharpDX.Rectangle)output.Description.DesktopBounds).Height;

            int stride = 4 * DisplayWidth;
            int size   = stride * DisplayHeight;

            FrameBytes = new byte[size];

            GCHandle pinned = GCHandle.Alloc(outBytes, GCHandleType.Pinned);

            outBytesPtr = pinned.AddrOfPinnedObject();
            //pinned.Free();

            GCHandle pinned2 = GCHandle.Alloc(transcoderOptions, GCHandleType.Pinned);

            transcoderOptionsPtr = pinned2.AddrOfPinnedObject();

            GCHandle pinned3 = GCHandle.Alloc(FrameBytes, GCHandleType.Pinned);

            frameBytesPtr = pinned3.AddrOfPinnedObject();

            // Create Staging texture CPU-accessible
            var textureDesc = new Texture2DDescription
            {
                CpuAccessFlags    = CpuAccessFlags.Read,
                BindFlags         = BindFlags.None,
                Format            = Format.B8G8R8A8_UNorm,
                Width             = DisplayWidth,
                Height            = DisplayHeight,
                OptionFlags       = ResourceOptionFlags.None,
                MipLevels         = 1,
                ArraySize         = 1,
                SampleDescription = { Count = 1, Quality = 0 },
                Usage             = ResourceUsage.Staging
            };

            screenTexture = new Texture2D(device, textureDesc);

            // Duplicate the output
            duplicatedOutput = output1.DuplicateOutput(device);

            StartWorkTimer();
        }
Esempio n. 13
0
        /// <summary>
        /// deviceを作成します。
        /// </summary>
        /// <param name="control">レンダリング先となるcontrol</param>
        /// <param name="ocu_config">設定</param>
        /// <returns>deviceの作成に成功したか</returns>
        public bool InitializeApplication(Control control, OcuConfig ocu_config)
        {
            this.ocu_config = ocu_config;
            oculus          = new OculusWrap.Wrap();

            // Initialize the Oculus runtime.
            bool success = oculus.Initialize();

            if (!success)
            {
                MessageBox.Show("Failed to initialize the Oculus runtime library.", "Uh oh", MessageBoxButtons.OK, MessageBoxIcon.Error);
                return(false);
            }

            // Use the head mounted display, if it's available, otherwise use the debug HMD.
            int numberOfHeadMountedDisplays = oculus.Hmd_Detect();

            if (numberOfHeadMountedDisplays > 0)
            {
                hmd = oculus.Hmd_Create(0);
            }
            else
            {
                hmd = oculus.Hmd_CreateDebug(OculusWrap.OVR.HmdType.DK2);
            }

            if (hmd == null)
            {
                MessageBox.Show("Oculus Rift not detected.", "Uh oh", MessageBoxButtons.OK, MessageBoxIcon.Error);
                return(false);
            }

            if (hmd.ProductName == string.Empty)
            {
                MessageBox.Show("The HMD is not enabled.", "There's a tear in the Rift", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }

            // Specify which head tracking capabilities to enable.
            hmd.SetEnabledCaps(OculusWrap.OVR.HmdCaps.LowPersistence | OculusWrap.OVR.HmdCaps.DynamicPrediction);

            // Start the sensor which informs of the Rift's pose and motion
            hmd.ConfigureTracking(OculusWrap.OVR.TrackingCaps.ovrTrackingCap_Orientation | OculusWrap.OVR.TrackingCaps.ovrTrackingCap_MagYawCorrection | OculusWrap.OVR.TrackingCaps.ovrTrackingCap_Position, OculusWrap.OVR.TrackingCaps.None);

            // Create DirectX drawing device.
            device = new Device(SharpDX.Direct3D.DriverType.Hardware, DeviceCreationFlags.None);

            ctx = device.ImmediateContext;

            Stopwatch sw = new Stopwatch();

            sw.Start();

            string effect_file = Path.Combine(Application.StartupPath, @"toonshader.fx.bin");

            if (!File.Exists(effect_file))
            {
                Console.WriteLine("File not found: " + effect_file);
                return(false);
            }
            try
            {
                var shader_bytecode = ShaderBytecode.FromFile(effect_file);
                effect = new Effect(device, shader_bytecode);
            }
            catch (SharpDX.CompilationException e)
            {
                Console.WriteLine(e.Message + ": " + effect_file);
                return(false);
            }

            sw.Stop();
            Console.WriteLine("toonshader.fx.bin read time: " + sw.Elapsed);

            string techmap_file = Path.Combine(Application.StartupPath, @"techmap.txt");

            if (!File.Exists(techmap_file))
            {
                Console.WriteLine("File not found: " + techmap_file);
                return(false);
            }
            techmap.Load(techmap_file);

            control.MouseDown += new MouseEventHandler(form_OnMouseDown);

            // Define the properties of the swap chain.
            SwapChainDescription swapChainDescription = DefineSwapChainDescription(control);

            // Create DirectX Graphics Interface factory, used to create the swap chain.
            dxgi_factory = new SharpDX.DXGI.Factory();
            // Create the swap chain.
            swap_chain = new SwapChain(dxgi_factory, device, swapChainDescription);

            // Retrieve the back buffer of the swap chain.
            buf0      = swap_chain.GetBackBuffer <Texture2D>(0);
            buf0_view = new RenderTargetView(device, buf0);

            // Create a depth buffer, using the same width and height as the back buffer.
            Texture2DDescription depthBufferDescription = DefineDepthBufferDescription(control);

            // Create the depth buffer.
            ztex      = new Texture2D(device, depthBufferDescription);
            ztex_view = new DepthStencilView(device, ztex);

            ctx.OutputMerger.SetRenderTargets(ztex_view, buf0_view);

            viewport = new Viewport(0, 0, hmd.Resolution.Width, hmd.Resolution.Height, 0.0f, 1.0f);
            ctx.Rasterizer.SetViewport(viewport);

            // Retrieve the DXGI device, in order to set the maximum frame latency.
            using (SharpDX.DXGI.Device1 dxgiDevice = device.QueryInterface <SharpDX.DXGI.Device1>())
            {
                dxgiDevice.MaximumFrameLatency = 1;
            }

            layers        = new OculusWrap.Layers();
            layer_eye_fov = layers.AddLayerEyeFov();

            CreateEyeTextures();

            CreateMirrorTexture(control);

            World_variable               = effect.GetVariableBySemantic("World").AsMatrix();
            WorldView_variable           = effect.GetVariableBySemantic("WorldView").AsMatrix();
            WorldViewProjection_variable = effect.GetVariableBySemantic("WorldViewProjection").AsMatrix();
            /* for HUD */
            Projection_variable = effect.GetVariableBySemantic("Projection").AsMatrix();

            LocalBoneMats_variable  = effect.GetVariableByName("LocalBoneMats").AsMatrix();
            LightDirForced_variable = effect.GetVariableByName("LightDirForced").AsVector();
            UVSCR_variable          = effect.GetVariableByName("UVSCR").AsVector();

            cb_variable = effect.GetConstantBufferByName("cb");

            ShadeTex_texture_variable = effect.GetVariableByName("ShadeTex_texture").AsShaderResource();
            ColorTex_texture_variable = effect.GetVariableByName("ColorTex_texture").AsShaderResource();

            //figures.Camera = camera;
            figures.TSOFileOpen += delegate(TSOFile tso)
            {
                tso.Open(device, effect);
                techmap.AssignTechniqueIndices(tso);
            };

            // Define an input layout to be passed to the vertex shader.
            var technique = effect.GetTechniqueByIndex(0);

            il = new InputLayout(device, technique.GetPassByIndex(0).Description.Signature, TSOSubMesh.ie);

            // Setup the immediate context to use the shaders and model we defined.
            ctx.InputAssembler.InputLayout = il;

            DefineBlendState();
            DefineDepthStencilState();
            DefineRasterizerState();

            main_camera = new Camera()
            {
                Position = ocu_config.Position,
                Rotation = Quaternion.Identity,
            };

            directInput = new DirectInput();
            keyboard    = new Keyboard(directInput);
            keyboard.Acquire();

            keyboardState = keyboard.GetCurrentState();

            return(true);
        }
Esempio n. 14
0
        private void CreateTreeNode2(SharpDX.Direct3D11.Device device, QuadTreeNodeType node, float positionX, float positionZ, float width, out QuadTreeNodeType createdNode)
        {
            // Store the node position and size.
            node.positionX = positionX;
            node.positionZ = positionZ;
            node.width     = width;

            // Initialize the triangle count to zero for the node.
            node.TriangleCount = 0;

            // Initialize the vertex and index buffer to null.
            node.VertexBuffer = null;
            node.IndexBuffer  = null;

            // Initialize the Nodes vertex array to null.
            node.vertexArray = null;

            // Count the number of triangles that are inside this node.
            int numTriangles = CountTriangles(positionX, positionZ, width);

            // Case 1: If there are no triangles in this node then return as it is empty and requires no processing.
            if (numTriangles == 0)
            {
                createdNode = node;
                return;
            }

            // Initialize the children nodes of this node to null.
            node.Nodes = new QuadTreeNodeType[4];

            // Case 2: If there are too many triangles in this node then split it into four equal sized smaller tree nodes.
            if (numTriangles > MAX_TRIANGLES)
            {
                for (int i = 0; i < 4; i++)
                {
                    // Calculate the position offsets for the new child node.
                    float offsetX = (((i % 2) < 1) ? -1.0f : 1.0f) * (width / 4.0f);
                    float offsetZ = (((i % 4) < 2) ? -1.0f : 1.0f) * (width / 4.0f);

                    // See if there are any triangles in the new node.
                    int count = CountTriangles((positionX + offsetX), (positionZ + offsetZ), (width / 2.0));

                    if (count > 0)
                    {
                        // If there are triangles inside where this new node would be then create the child node.
                        node.Nodes[i] = new QuadTreeNodeType();
                        QuadTreeNodeType aNewNode;

                        // Extend the tree starting from this new child node now.
                        CreateTreeNode2(device, node.Nodes[i], (positionX + offsetX), (positionZ + offsetZ), (width / 2.0f), out aNewNode);

                        node.Nodes[i] = aNewNode;
                        createdNode   = aNewNode;
                    }
                }

                createdNode = node;
                return;
            }

            // Case 3: If this node is not empty and the triangle count for it is less than the max then this node is at the bottom of the tree so create the list of triangles to store in it.
            node.TriangleCount = numTriangles;

            // Calculate the number of vertices.
            int vertexCount = numTriangles * 3;

            // Create the vertex array.
            PositionTextureNormalVertex[] vertices = new PositionTextureNormalVertex[vertexCount];
            // Create the index array
            int[] indices = new int[vertexCount];

            // Create the vertex array.
            node.vertexArray = new XYZType[vertexCount];

            // Initialize the index for this new vertex and index array.
            int index = 0;

            // Go through all the triangles in the vertex list.
            for (int i = 0; i < TriangleCount; i++)
            {
                // If the triangle is inside this node then add it to the vertex array.
                if (IsTriangleContained(i, positionX, positionZ, width))
                {
                    // Calculate the index into the terrain vertex list.
                    int vertexIndex = i * 3;

                    // Get the three vertices of this triangle from the vertex list.
                    vertices[index] = VertexList[vertexIndex];
                    indices[index]  = index;
                    // Also store the vertex position information in the node vertex array.
                    node.vertexArray[index].x = VertexList[vertexIndex].position.X;
                    node.vertexArray[index].y = VertexList[vertexIndex].position.Y;
                    node.vertexArray[index].z = VertexList[vertexIndex].position.Z;

                    // Increment the indexes.
                    index++;
                    vertexIndex++;

                    vertices[index]           = VertexList[vertexIndex];
                    indices[index]            = index;
                    node.vertexArray[index].x = VertexList[vertexIndex].position.X;
                    node.vertexArray[index].y = VertexList[vertexIndex].position.Y;
                    node.vertexArray[index].z = VertexList[vertexIndex].position.Z;

                    // Increment the indexes.
                    index++;
                    vertexIndex++;
                    vertices[index]           = VertexList[vertexIndex];
                    indices[index]            = index;
                    node.vertexArray[index].x = VertexList[vertexIndex].position.X;
                    node.vertexArray[index].y = VertexList[vertexIndex].position.Y;
                    node.vertexArray[index].z = VertexList[vertexIndex].position.Z;

                    index++;
                }
            }

            // Set up the description of the vertex buffer. no Need its static
            // Create the vertex buffer.
            node.VertexBuffer = SharpDX.Direct3D11.Buffer.Create(device, BindFlags.VertexBuffer, vertices);

            // Create the index buffer.
            node.IndexBuffer = SharpDX.Direct3D11.Buffer.Create(device, BindFlags.IndexBuffer, indices);

            // Release the vertex and index arrays now that the data is stored in the buffers in the node.
            vertices = null;
            indices  = null;

            createdNode = node;

            return;
        }