Exemplo n.º 1
0
        /*
        private unsafe Mem GetCounter()
        {
            fixed (ulong* dataptr = &voxelctr)
            {
                counter = manager.Context.CreateBuffer(MemFlags.READ_WRITE, 8, new IntPtr(dataptr));
            }
            return counter;
        }*/
        private unsafe void DoRayCasting(BitmapData output)
        {
            try
            {
                int deviceIndex = 0;
                outputBuffer = manager.Context.CreateBuffer(MemFlags.USE_HOST_PTR, output.Stride * output.Height, output.Scan0);

                if (first || changeDistance)
                {

                    // модель камеры UVN
                    camPos = new Float4()
                    {
                        S0 =
                        vol.GetSize() / 2 - (float)Math.Cos(camAngle * Math.PI / 180) * camDist,
                        S1 = vol.GetSize() / 2,
                        S2 = vol.GetSize() / 2 - (float)Math.Sin(camAngle * Math.PI / 180) * camDist,
                        S3 = 0
                    };

                    first = false;
                    changeDistance = false;
                    camPosOld = camPos;
                }

                else
                {
                    // поворот вокруг оси куба визуализации
                    if (angleChange && leftChange)
                    {
                        camPosOld.S0 -= camLookAt.S0;
                        camPosOld.S2 -= camLookAt.S2;

                        camPos.S0 = (float)Math.Cos(camAngle * Math.PI / 180) * camPosOld.S0 + (float)Math.Sin(camAngle * Math.PI / 180) * camPosOld.S2;
                        camPos.S1 = vol.GetSize() / 2;
                        camPos.S2 = -(float)Math.Sin(camAngle * Math.PI / 180) * camPosOld.S0 + (float)Math.Cos(camAngle * Math.PI / 180) * camPosOld.S2;
                        camPos.S3 = 0;

                        camPos.S0 += camLookAt.S0;
                        camPos.S2 += camLookAt.S2;

                        camPosOld = camPos;
                        angleChange = false;
                        leftChange = false;
                    }
                    if (angleChange && rightChange)
                    {
                        camPosOld.S0 -= camLookAt.S0;
                        camPosOld.S2 -= camLookAt.S2;

                        camPos.S0 = (float)Math.Cos(camAngle * Math.PI / 180) * camPosOld.S0 - (float)Math.Sin(camAngle * Math.PI / 180) * camPosOld.S2;
                        camPos.S1 = vol.GetSize() / 2;
                        camPos.S2 = (float)Math.Sin(camAngle * Math.PI / 180) * camPosOld.S0 + (float)Math.Cos(camAngle * Math.PI / 180) * camPosOld.S2;
                        camPos.S3 = 0;

                        camPos.S0 += camLookAt.S0;
                        camPos.S2 += camLookAt.S2;

                        camPosOld = camPos;
                        angleChange = false;
                        leftChange = false;

                    }

                }

                camLookAt = new Float4()
                {
                    S0 = vol.GetSize() / camfactorX,
                    S1 = vol.GetSize() / camfactorX,
                    S2 = vol.GetSize() / camfactorZ,
                    S3 = 0
                };

                //light = camPos;

                // направление камеры, UVN модель
                camForward = camLookAt.Sub(camPos).Normalize(); // направление просмотра
                var up = new Float4(0.0f, 1.0f, 0.0f, 0.0f);
                var right = MathClass.Cross(up, camForward).Normalize().Times(1.5f);
                up = MathClass.Cross(camForward, right).Normalize().Times(-1.5f);

                /*  обработка выходного изображения BitmapData в OpenCl устройстве */
                for (var x = 0; x < output.Width; x += blocksize)
                {
                    for (var y = 0; y < output.Height; y += blocksize)
                    {
                        var rayTracingGlobalWorkSize = new IntPtr[2]; // work_dim = 2
                        rayTracingGlobalWorkSize[0] = (IntPtr)(output.Width - x > blocksize ? blocksize : output.Width - x);
                        rayTracingGlobalWorkSize[1] = (IntPtr)(output.Height - y > blocksize ? blocksize : output.Height - y);

                        var rayTracingGlobalOffset = new IntPtr[2];
                        rayTracingGlobalOffset[0] = (IntPtr)x;
                        rayTracingGlobalOffset[1] = (IntPtr)y;

                        float ka = (float)(Convert.ToDouble(kamb.Text));
                        float kd = (float)(Convert.ToDouble(kdiff.Text));
                        float ks = (float)(Convert.ToDouble(kspec.Text));
                        float exp = (float)(Convert.ToDouble(specexp.Text));

                        float kkc = (float)(Convert.ToDouble(this.kc.Text));
                        float kkl = (float)(Convert.ToDouble(this.kl.Text));
                        float kkq = (float)(Convert.ToDouble(this.kq.Text));

                        /* передали аргументы в kernel функцию */
                        kernel.SetArg(0, output.Width);
                        kernel.SetArg(1, output.Height);
                        kernel.SetArg(2, outputBuffer);  // в ядре с global, поскольку для выполнения требуется доступ к output
                        kernel.SetArg(3, output.Stride);
                        kernel.SetArg(4, camPos);
                        kernel.SetArg(5, camForward);
                        kernel.SetArg(6, right);
                        kernel.SetArg(7, up);
                        kernel.SetArg(8, vol.CreateBuffer());
                        kernel.SetArg(9, vol.GetSize());
                        kernel.SetArg(10, light);
                        kernel.SetArg(11, boxMinCon);
                        kernel.SetArg(12, boxMaxCon);
                        kernel.SetArg(13, Convert.ToInt16(colorMi.Text));
                        kernel.SetArg(14, Convert.ToInt16(colorMa.Text));
                        kernel.SetArg(15, _cutArrea.Checked ? (short)1 : (short)0);
                        kernel.SetArg(16, _trilinear.Checked ? (short)1 : (short)0);
                        kernel.SetArg(17, tf.Checked ? (short)1: (short)0 );
                        kernel.SetArg(18, GetColors());
                        kernel.SetArg(19, winWidth_vox);
                        kernel.SetArg(20, winCentre_vox);
                        kernel.SetArg(21, form_this.knots_counter);
                        kernel.SetArg(22, Convert.ToInt16(colorMi2.Text));
                        kernel.SetArg(23, Convert.ToInt16(colorMa2.Text));
                        kernel.SetArg(24, GetOpacity());
                        kernel.SetArg(25, ka);
                        kernel.SetArg(26, kd);
                        kernel.SetArg(27, ks);
                        kernel.SetArg(28, exp);
                        kernel.SetArg(29, kkc);
                        kernel.SetArg(30, kkl);
                        kernel.SetArg(31, kkq);
                        //kernel.SetArg(32, GetCounter());

                        /* Ставит в очередь команду для исполнения kernel на устройстве */
                        /*
                            rayTracingGlobalOffset -
                         *  globalWorkOffset: может использоваться для указания массива значений
                         *  размерности work_dim unsigned который описывает смещение используемое для расчета  global ID  work-item
                         *  вместо того чтобы global IDs всегда начинался со смещение (0, 0,... 0).

                         *  rayTracingGlobalWorkSize -
                         *  globalWorkSize: общее число global work-items вычисляется как global_work_size[0] *...* global_work_size[work_dim - 1].
                         *
                         */
                        manager.CQ[deviceIndex].EnqueueNDRangeKernel(kernel, 2, rayTracingGlobalOffset, rayTracingGlobalWorkSize, null);
                    }
                }

                /* подождали пока все work-items выполнятся */
                manager.CQ[deviceIndex].EnqueueBarrier();

                /* для того чтобы получить доступ к памяти и записать в выходное изображение мы просим у OpenCL *наложить* данные в хост-устройство */
                IntPtr p = manager.CQ[deviceIndex].EnqueueMapBuffer(outputBuffer, true, MapFlags.READ, IntPtr.Zero, (IntPtr)(output.Stride * output.Height));
                //IntPtr z = manager.CQ[deviceIndex].EnqueueMapBuffer(counter, true, MapFlags.READ_WRITE, IntPtr.Zero, (IntPtr)(sizeof(ulong)));

                /* когда мы заканчиваем работать с буфером надо вызвать эту функцию */
                manager.CQ[deviceIndex].EnqueueUnmapMemObject(outputBuffer, p);
                //manager.CQ[deviceIndex].EnqueueUnmapMemObject(counter, z);
                manager.CQ[deviceIndex].Finish();
                realctr += voxelctr;
                voxelCounter.Text = Convert.ToString(realctr);
            }
            catch (Exception ex)
            {
                MessageBox.Show("Ray casting exception:" + ex.Message, "Exception");
                //Environment.Exit(-1);
            }
            finally
            {
                if (outputBuffer != null)
                {
                    outputBuffer.Dispose();
                }
            }
        }
Exemplo n.º 2
0
        private unsafe void DoRayCasting(BitmapData output)
        {
            try{
                int deviceIndex = 0;
                outputBuffer = manager.Context.CreateBuffer(MemFlags.USE_HOST_PTR, output.Stride * output.Height, output.Scan0);
                if (first || changeDistance)
                {
                    // camera model UVN
                    camPos = new Float4()
                    {
                        S0 =
                        vol.GetSize() / 2 - (float)Math.Cos(camAngle * Math.PI / 180) * camDist,
                        S1 = vol.GetSize() / 2,
                        S2 = vol.GetSize() / 2 - (float)Math.Sin(camAngle * Math.PI / 180) * camDist,
                        S3 = 0
                    };

                    first = false;
                    changeDistance = false;
                    camPosOld = camPos;
                }
                else{
                    // rotation around the axis of the cube visualization
                    if (angleChange && leftChange){
                        camPosOld.S0 -= camLookAt.S0;
                        camPosOld.S2 -= camLookAt.S2;
                        camPos.S0 = (float)Math.Cos(camAngle * Math.PI / 180) * camPosOld.S0 + (float)Math.Sin(camAngle * Math.PI / 180) * camPosOld.S2;
                        camPos.S1 = vol.GetSize() / 2;
                        camPos.S2 = -(float)Math.Sin(camAngle * Math.PI / 180) * camPosOld.S0 + (float)Math.Cos(camAngle * Math.PI / 180) * camPosOld.S2;
                        camPos.S3 = 0;
                        camPos.S0 += camLookAt.S0;
                        camPos.S2 += camLookAt.S2;
                        camPosOld = camPos;
                        angleChange = false;
                        leftChange = false;
                    }
                    if (angleChange && rightChange){
                        camPosOld.S0 -= camLookAt.S0;
                        camPosOld.S2 -= camLookAt.S2;
                        camPos.S0 = (float)Math.Cos(camAngle * Math.PI / 180) * camPosOld.S0 - (float)Math.Sin(camAngle * Math.PI / 180) * camPosOld.S2;
                        camPos.S1 = vol.GetSize() / 2;
                        camPos.S2 = (float)Math.Sin(camAngle * Math.PI / 180) * camPosOld.S0 + (float)Math.Cos(camAngle * Math.PI / 180) * camPosOld.S2;
                        camPos.S3 = 0;
                        camPos.S0 += camLookAt.S0;
                        camPos.S2 += camLookAt.S2;
                        camPosOld = camPos;
                        angleChange = false;
                        leftChange = false;
                    }
                }

                camLookAt = new Float4(){
                    S0 = vol.GetSize() / camfactorX,
                    S1 = vol.GetSize() / camfactorX,
                    S2 = vol.GetSize() / camfactorZ,
                    S3 = 0
                };

                // direction of the camera, UVN model
                camForward = camLookAt.Sub(camPos).Normalize(); // viewing direction
                var up = new Float4(0.0f, 1.0f, 0.0f, 0.0f);
                var right = MathClass.Cross(up, camForward).Normalize().Times(1.5f);
                up = MathClass.Cross(camForward, right).Normalize().Times(-1.5f);
                /*  processing of the output image in BitmapData OpenCl device */
                for (var x = 0; x < output.Width; x += blocksize){
                    for (var y = 0; y < output.Height; y += blocksize){
                        var rayTracingGlobalWorkSize = new IntPtr[2]; // work_dim = 2
                        rayTracingGlobalWorkSize[0] = (IntPtr)(output.Width - x > blocksize ? blocksize : output.Width - x);
                        rayTracingGlobalWorkSize[1] = (IntPtr)(output.Height - y > blocksize ? blocksize : output.Height - y);
                        var rayTracingGlobalOffset = new IntPtr[2];
                        rayTracingGlobalOffset[0] = (IntPtr)x;
                        rayTracingGlobalOffset[1] = (IntPtr)y;
                        float ka = (float)(Convert.ToDouble(kamb.Text));
                        float kd = (float)(Convert.ToDouble(kdiff.Text));
                        float ks = (float)(Convert.ToDouble(kspec.Text));
                        float exp = (float)(Convert.ToDouble(specexp.Text));
                        float kkc = (float)(Convert.ToDouble(this.kc.Text));
                        float kkl = (float)(Convert.ToDouble(this.kl.Text));
                        float kkq = (float)(Convert.ToDouble(this.kq.Text));
                        /* arguments passed to kernel function */
                        kernel.SetArg(0, output.Width);
                        kernel.SetArg(1, output.Height);
                        kernel.SetArg(2, outputBuffer);  // in the nucleus with global, as is required for access to the output
                        kernel.SetArg(3, output.Stride);
                        kernel.SetArg(4, camPos);
                        kernel.SetArg(5, camForward);
                        kernel.SetArg(6, right);
                        kernel.SetArg(7, up);
                        kernel.SetArg(8, vol.CreateBuffer());
                        kernel.SetArg(9, vol.GetSize());
                        kernel.SetArg(10, light);
                        kernel.SetArg(11, boxMinCon);
                        kernel.SetArg(12, boxMaxCon);
                        kernel.SetArg(13, Convert.ToInt16(colorMi.Text));
                        kernel.SetArg(14, Convert.ToInt16(colorMa.Text));
                        kernel.SetArg(15, _cutArrea.Checked ? (short)1 : (short)0);
                        kernel.SetArg(16, _trilinear.Checked ? (short)1 : (short)0);
                        kernel.SetArg(17, tf.Checked ? (short)1: (short)0 );
                        kernel.SetArg(18, GetColors());
                        kernel.SetArg(19, winWidth_vox);
                        kernel.SetArg(20, winCentre_vox);
                        kernel.SetArg(21, form_this.knots_counter);
                        kernel.SetArg(22, Convert.ToInt16(colorMi2.Text));
                        kernel.SetArg(23, Convert.ToInt16(colorMa2.Text));
                        kernel.SetArg(24, GetOpacity());
                        kernel.SetArg(25, ka);
                        kernel.SetArg(26, kd);
                        kernel.SetArg(27, ks);
                        kernel.SetArg(28, exp);
                        kernel.SetArg(29, kkc);
                        kernel.SetArg(30, kkl);
                        kernel.SetArg(31, kkq);
                        kernel.SetArg(32, GetCounter());
                        /* Queues a command for kernel execution on the device */
                        /*  rayTracingGlobalOffset -
                         *  globalWorkOffset: can be used to specify an array of values
                         * work_dim unsigned dimension that describes the offset used to calculate  global ID  work-item
                         *  instead of global IDs always start at offset (0, 0,... 0).
                         *  rayTracingGlobalWorkSize -
                         *  globalWorkSize: общее число global work-items вычисляется как global_work_size[0] *...* global_work_size[work_dim - 1].*/
                        manager.CQ[deviceIndex].EnqueueNDRangeKernel(kernel, 2, rayTracingGlobalOffset, rayTracingGlobalWorkSize, null);
                    }
                }

                /*wait until all the work-items executed */
                manager.CQ[deviceIndex].EnqueueBarrier();
                /* to get access to memory and written to the output image we ask OpenCL * impose * data to the host device */
                IntPtr p = manager.CQ[deviceIndex].EnqueueMapBuffer(outputBuffer, true, MapFlags.READ, IntPtr.Zero, (IntPtr)(output.Stride * output.Height));
                IntPtr z = manager.CQ[deviceIndex].EnqueueMapBuffer(counter, true, MapFlags.READ_WRITE, IntPtr.Zero, (IntPtr)(sizeof(ulong)));
                /* when we finish working with the buffer should call this function */
                manager.CQ[deviceIndex].EnqueueUnmapMemObject(outputBuffer, p);
                manager.CQ[deviceIndex].EnqueueUnmapMemObject(counter, z);
                manager.CQ[deviceIndex].Finish();
                realctr += voxelctr;
                voxelCounter.Text = Convert.ToString(realctr);
            }
            catch (Exception ex){
                MessageBox.Show("Ray casting exception:" + ex.Message, "Exception");
                Environment.Exit(-1);
            }
            finally{
                if (outputBuffer != null){
                    outputBuffer.Dispose();
                }
            }
        }