public void LoadHeadDICOMTestDataSet(List<ushort> buffer, ushort num_of_images, double winCentre, double winWidth, double intercept, bool signed) { ushort i = 0; short k; winCentre_vox = (int)(winCentre); winWidth_vox = (int)(winWidth); int winMax = Convert.ToInt32(winCentre_vox + 0.5 * winWidth); int winMin = winMax - (int)(winWidth); winMax -= 32768; winMin -= 32768; vol = new VoxelVolume(517, manager); boxMinCon = new Float4(0, 0, 0, 0); boxMaxCon = new Float4(512, 512, 512, 0); /* для ускорения работы с буфером вокселей, ось x самая внутренняя. VolumetricMethodsInVisualEffects2010 */ for (var f = 1; f <= num_of_images; f++) { for (var y = 0; y < 512; y++) { for (var x = 0; x < 512; x++) { i = buffer[(f - 1) * 512 * 512 + (x * 512 + y)]; k = (short)(i - 32768); if (k > winMin && k < winMax) // учесть преобразование wincentre { var xx = (int)((x / 512.0) * (513.0 - 5)); var yy = 512 - (int)((y / 512.0) * (513.0 - 5)); var zz = (int)((f / 348.0) * (349.0)); vol.SetValue(xx, (330 - zz * 2), yy, k); vol.SetValue(xx, (330 - zz * 2 + 1), yy, k); vol.SetValue(xx, (330 - zz * 2 + 2), yy, k); } } } } camfactorX = 2; //camfactorY = 2; camfactorZ = 2; }
/* private unsafe Mem GetCounter() { fixed (ulong* dataptr = &voxelctr) { counter = manager.Context.CreateBuffer(MemFlags.READ_WRITE, 8, new IntPtr(dataptr)); } return counter; }*/ private unsafe void DoRayCasting(BitmapData output) { try { int deviceIndex = 0; outputBuffer = manager.Context.CreateBuffer(MemFlags.USE_HOST_PTR, output.Stride * output.Height, output.Scan0); if (first || changeDistance) { // модель камеры UVN camPos = new Float4() { S0 = vol.GetSize() / 2 - (float)Math.Cos(camAngle * Math.PI / 180) * camDist, S1 = vol.GetSize() / 2, S2 = vol.GetSize() / 2 - (float)Math.Sin(camAngle * Math.PI / 180) * camDist, S3 = 0 }; first = false; changeDistance = false; camPosOld = camPos; } else { // поворот вокруг оси куба визуализации if (angleChange && leftChange) { camPosOld.S0 -= camLookAt.S0; camPosOld.S2 -= camLookAt.S2; camPos.S0 = (float)Math.Cos(camAngle * Math.PI / 180) * camPosOld.S0 + (float)Math.Sin(camAngle * Math.PI / 180) * camPosOld.S2; camPos.S1 = vol.GetSize() / 2; camPos.S2 = -(float)Math.Sin(camAngle * Math.PI / 180) * camPosOld.S0 + (float)Math.Cos(camAngle * Math.PI / 180) * camPosOld.S2; camPos.S3 = 0; camPos.S0 += camLookAt.S0; camPos.S2 += camLookAt.S2; camPosOld = camPos; angleChange = false; leftChange = false; } if (angleChange && rightChange) { camPosOld.S0 -= camLookAt.S0; camPosOld.S2 -= camLookAt.S2; camPos.S0 = (float)Math.Cos(camAngle * Math.PI / 180) * camPosOld.S0 - (float)Math.Sin(camAngle * Math.PI / 180) * camPosOld.S2; camPos.S1 = vol.GetSize() / 2; camPos.S2 = (float)Math.Sin(camAngle * Math.PI / 180) * camPosOld.S0 + (float)Math.Cos(camAngle * Math.PI / 180) * camPosOld.S2; camPos.S3 = 0; camPos.S0 += camLookAt.S0; camPos.S2 += camLookAt.S2; camPosOld = camPos; angleChange = false; leftChange = false; } } camLookAt = new Float4() { S0 = vol.GetSize() / camfactorX, S1 = vol.GetSize() / camfactorX, S2 = vol.GetSize() / camfactorZ, S3 = 0 }; //light = camPos; // направление камеры, UVN модель camForward = camLookAt.Sub(camPos).Normalize(); // направление просмотра var up = new Float4(0.0f, 1.0f, 0.0f, 0.0f); var right = MathClass.Cross(up, camForward).Normalize().Times(1.5f); up = MathClass.Cross(camForward, right).Normalize().Times(-1.5f); /* обработка выходного изображения BitmapData в OpenCl устройстве */ for (var x = 0; x < output.Width; x += blocksize) { for (var y = 0; y < output.Height; y += blocksize) { var rayTracingGlobalWorkSize = new IntPtr[2]; // work_dim = 2 rayTracingGlobalWorkSize[0] = (IntPtr)(output.Width - x > blocksize ? blocksize : output.Width - x); rayTracingGlobalWorkSize[1] = (IntPtr)(output.Height - y > blocksize ? blocksize : output.Height - y); var rayTracingGlobalOffset = new IntPtr[2]; rayTracingGlobalOffset[0] = (IntPtr)x; rayTracingGlobalOffset[1] = (IntPtr)y; float ka = (float)(Convert.ToDouble(kamb.Text)); float kd = (float)(Convert.ToDouble(kdiff.Text)); float ks = (float)(Convert.ToDouble(kspec.Text)); float exp = (float)(Convert.ToDouble(specexp.Text)); float kkc = (float)(Convert.ToDouble(this.kc.Text)); float kkl = (float)(Convert.ToDouble(this.kl.Text)); float kkq = (float)(Convert.ToDouble(this.kq.Text)); /* передали аргументы в kernel функцию */ kernel.SetArg(0, output.Width); kernel.SetArg(1, output.Height); kernel.SetArg(2, outputBuffer); // в ядре с global, поскольку для выполнения требуется доступ к output kernel.SetArg(3, output.Stride); kernel.SetArg(4, camPos); kernel.SetArg(5, camForward); kernel.SetArg(6, right); kernel.SetArg(7, up); kernel.SetArg(8, vol.CreateBuffer()); kernel.SetArg(9, vol.GetSize()); kernel.SetArg(10, light); kernel.SetArg(11, boxMinCon); kernel.SetArg(12, boxMaxCon); kernel.SetArg(13, Convert.ToInt16(colorMi.Text)); kernel.SetArg(14, Convert.ToInt16(colorMa.Text)); kernel.SetArg(15, _cutArrea.Checked ? (short)1 : (short)0); kernel.SetArg(16, _trilinear.Checked ? (short)1 : (short)0); kernel.SetArg(17, tf.Checked ? (short)1: (short)0 ); kernel.SetArg(18, GetColors()); kernel.SetArg(19, winWidth_vox); kernel.SetArg(20, winCentre_vox); kernel.SetArg(21, form_this.knots_counter); kernel.SetArg(22, Convert.ToInt16(colorMi2.Text)); kernel.SetArg(23, Convert.ToInt16(colorMa2.Text)); kernel.SetArg(24, GetOpacity()); kernel.SetArg(25, ka); kernel.SetArg(26, kd); kernel.SetArg(27, ks); kernel.SetArg(28, exp); kernel.SetArg(29, kkc); kernel.SetArg(30, kkl); kernel.SetArg(31, kkq); //kernel.SetArg(32, GetCounter()); /* Ставит в очередь команду для исполнения kernel на устройстве */ /* rayTracingGlobalOffset - * globalWorkOffset: может использоваться для указания массива значений * размерности work_dim unsigned который описывает смещение используемое для расчета global ID work-item * вместо того чтобы global IDs всегда начинался со смещение (0, 0,... 0). * rayTracingGlobalWorkSize - * globalWorkSize: общее число global work-items вычисляется как global_work_size[0] *...* global_work_size[work_dim - 1]. * */ manager.CQ[deviceIndex].EnqueueNDRangeKernel(kernel, 2, rayTracingGlobalOffset, rayTracingGlobalWorkSize, null); } } /* подождали пока все work-items выполнятся */ manager.CQ[deviceIndex].EnqueueBarrier(); /* для того чтобы получить доступ к памяти и записать в выходное изображение мы просим у OpenCL *наложить* данные в хост-устройство */ IntPtr p = manager.CQ[deviceIndex].EnqueueMapBuffer(outputBuffer, true, MapFlags.READ, IntPtr.Zero, (IntPtr)(output.Stride * output.Height)); //IntPtr z = manager.CQ[deviceIndex].EnqueueMapBuffer(counter, true, MapFlags.READ_WRITE, IntPtr.Zero, (IntPtr)(sizeof(ulong))); /* когда мы заканчиваем работать с буфером надо вызвать эту функцию */ manager.CQ[deviceIndex].EnqueueUnmapMemObject(outputBuffer, p); //manager.CQ[deviceIndex].EnqueueUnmapMemObject(counter, z); manager.CQ[deviceIndex].Finish(); realctr += voxelctr; voxelCounter.Text = Convert.ToString(realctr); } catch (Exception ex) { MessageBox.Show("Ray casting exception:" + ex.Message, "Exception"); //Environment.Exit(-1); } finally { if (outputBuffer != null) { outputBuffer.Dispose(); } } }
public static Float4 Sub(this Float4 a, Float4 b) { return new Float4(a.S0 - b.S0, a.S1 - b.S1, a.S2 - b.S2, a.S3 - b.S3); }
public void SetArg(int argIndex, Float4 c) { ErrorCode result; result = OpenCL.SetKernelArg(KernelID, (uint)argIndex, (IntPtr)sizeof(Float4), &c); if (result != ErrorCode.SUCCESS) throw new OpenCLException("SetArg failed with error code " + result, result); }
/* функция скалярного произведения */ public static float Dot(Float4 a, Float4 b) { return (a.S0 * b.S0) + (a.S1 * b.S1) + (a.S2 * b.S2); }
/* функция векторного произведения */ public static Float4 Cross(Float4 a, Float4 b) { return new Float4(((a.S1 * b.S2) - (a.S2 * b.S1)), ((a.S2 * b.S0) - (a.S0 * b.S2)), ((a.S0 * b.S1) - (a.S1 * b.S0)), 0); }
public static Float4 Add(this Float4 a, Float4 b) { return new Float4(a.S0 + b.S0, a.S1 + b.S1, a.S2 + b.S2, a.S3 + b.S3); }
private unsafe void DoRayCasting(BitmapData output) { try{ int deviceIndex = 0; outputBuffer = manager.Context.CreateBuffer(MemFlags.USE_HOST_PTR, output.Stride * output.Height, output.Scan0); if (first || changeDistance) { // camera model UVN camPos = new Float4() { S0 = vol.GetSize() / 2 - (float)Math.Cos(camAngle * Math.PI / 180) * camDist, S1 = vol.GetSize() / 2, S2 = vol.GetSize() / 2 - (float)Math.Sin(camAngle * Math.PI / 180) * camDist, S3 = 0 }; first = false; changeDistance = false; camPosOld = camPos; } else{ // rotation around the axis of the cube visualization if (angleChange && leftChange){ camPosOld.S0 -= camLookAt.S0; camPosOld.S2 -= camLookAt.S2; camPos.S0 = (float)Math.Cos(camAngle * Math.PI / 180) * camPosOld.S0 + (float)Math.Sin(camAngle * Math.PI / 180) * camPosOld.S2; camPos.S1 = vol.GetSize() / 2; camPos.S2 = -(float)Math.Sin(camAngle * Math.PI / 180) * camPosOld.S0 + (float)Math.Cos(camAngle * Math.PI / 180) * camPosOld.S2; camPos.S3 = 0; camPos.S0 += camLookAt.S0; camPos.S2 += camLookAt.S2; camPosOld = camPos; angleChange = false; leftChange = false; } if (angleChange && rightChange){ camPosOld.S0 -= camLookAt.S0; camPosOld.S2 -= camLookAt.S2; camPos.S0 = (float)Math.Cos(camAngle * Math.PI / 180) * camPosOld.S0 - (float)Math.Sin(camAngle * Math.PI / 180) * camPosOld.S2; camPos.S1 = vol.GetSize() / 2; camPos.S2 = (float)Math.Sin(camAngle * Math.PI / 180) * camPosOld.S0 + (float)Math.Cos(camAngle * Math.PI / 180) * camPosOld.S2; camPos.S3 = 0; camPos.S0 += camLookAt.S0; camPos.S2 += camLookAt.S2; camPosOld = camPos; angleChange = false; leftChange = false; } } camLookAt = new Float4(){ S0 = vol.GetSize() / camfactorX, S1 = vol.GetSize() / camfactorX, S2 = vol.GetSize() / camfactorZ, S3 = 0 }; // direction of the camera, UVN model camForward = camLookAt.Sub(camPos).Normalize(); // viewing direction var up = new Float4(0.0f, 1.0f, 0.0f, 0.0f); var right = MathClass.Cross(up, camForward).Normalize().Times(1.5f); up = MathClass.Cross(camForward, right).Normalize().Times(-1.5f); /* processing of the output image in BitmapData OpenCl device */ for (var x = 0; x < output.Width; x += blocksize){ for (var y = 0; y < output.Height; y += blocksize){ var rayTracingGlobalWorkSize = new IntPtr[2]; // work_dim = 2 rayTracingGlobalWorkSize[0] = (IntPtr)(output.Width - x > blocksize ? blocksize : output.Width - x); rayTracingGlobalWorkSize[1] = (IntPtr)(output.Height - y > blocksize ? blocksize : output.Height - y); var rayTracingGlobalOffset = new IntPtr[2]; rayTracingGlobalOffset[0] = (IntPtr)x; rayTracingGlobalOffset[1] = (IntPtr)y; float ka = (float)(Convert.ToDouble(kamb.Text)); float kd = (float)(Convert.ToDouble(kdiff.Text)); float ks = (float)(Convert.ToDouble(kspec.Text)); float exp = (float)(Convert.ToDouble(specexp.Text)); float kkc = (float)(Convert.ToDouble(this.kc.Text)); float kkl = (float)(Convert.ToDouble(this.kl.Text)); float kkq = (float)(Convert.ToDouble(this.kq.Text)); /* arguments passed to kernel function */ kernel.SetArg(0, output.Width); kernel.SetArg(1, output.Height); kernel.SetArg(2, outputBuffer); // in the nucleus with global, as is required for access to the output kernel.SetArg(3, output.Stride); kernel.SetArg(4, camPos); kernel.SetArg(5, camForward); kernel.SetArg(6, right); kernel.SetArg(7, up); kernel.SetArg(8, vol.CreateBuffer()); kernel.SetArg(9, vol.GetSize()); kernel.SetArg(10, light); kernel.SetArg(11, boxMinCon); kernel.SetArg(12, boxMaxCon); kernel.SetArg(13, Convert.ToInt16(colorMi.Text)); kernel.SetArg(14, Convert.ToInt16(colorMa.Text)); kernel.SetArg(15, _cutArrea.Checked ? (short)1 : (short)0); kernel.SetArg(16, _trilinear.Checked ? (short)1 : (short)0); kernel.SetArg(17, tf.Checked ? (short)1: (short)0 ); kernel.SetArg(18, GetColors()); kernel.SetArg(19, winWidth_vox); kernel.SetArg(20, winCentre_vox); kernel.SetArg(21, form_this.knots_counter); kernel.SetArg(22, Convert.ToInt16(colorMi2.Text)); kernel.SetArg(23, Convert.ToInt16(colorMa2.Text)); kernel.SetArg(24, GetOpacity()); kernel.SetArg(25, ka); kernel.SetArg(26, kd); kernel.SetArg(27, ks); kernel.SetArg(28, exp); kernel.SetArg(29, kkc); kernel.SetArg(30, kkl); kernel.SetArg(31, kkq); kernel.SetArg(32, GetCounter()); /* Queues a command for kernel execution on the device */ /* rayTracingGlobalOffset - * globalWorkOffset: can be used to specify an array of values * work_dim unsigned dimension that describes the offset used to calculate global ID work-item * instead of global IDs always start at offset (0, 0,... 0). * rayTracingGlobalWorkSize - * globalWorkSize: общее число global work-items вычисляется как global_work_size[0] *...* global_work_size[work_dim - 1].*/ manager.CQ[deviceIndex].EnqueueNDRangeKernel(kernel, 2, rayTracingGlobalOffset, rayTracingGlobalWorkSize, null); } } /*wait until all the work-items executed */ manager.CQ[deviceIndex].EnqueueBarrier(); /* to get access to memory and written to the output image we ask OpenCL * impose * data to the host device */ IntPtr p = manager.CQ[deviceIndex].EnqueueMapBuffer(outputBuffer, true, MapFlags.READ, IntPtr.Zero, (IntPtr)(output.Stride * output.Height)); IntPtr z = manager.CQ[deviceIndex].EnqueueMapBuffer(counter, true, MapFlags.READ_WRITE, IntPtr.Zero, (IntPtr)(sizeof(ulong))); /* when we finish working with the buffer should call this function */ manager.CQ[deviceIndex].EnqueueUnmapMemObject(outputBuffer, p); manager.CQ[deviceIndex].EnqueueUnmapMemObject(counter, z); manager.CQ[deviceIndex].Finish(); realctr += voxelctr; voxelCounter.Text = Convert.ToString(realctr); } catch (Exception ex){ MessageBox.Show("Ray casting exception:" + ex.Message, "Exception"); Environment.Exit(-1); } finally{ if (outputBuffer != null){ outputBuffer.Dispose(); } } }
public void LoadHeadDICOMTestDataSet(List<ushort> buffer, ushort num_of_images, double winCentre, double winWidth, double intercept, bool signed) { ushort i = 0; short k; winCentre_vox = (int)(winCentre); winWidth_vox = (int)(winWidth); int winMax = Convert.ToInt32(winCentre_vox + 0.5 * winWidth); int winMin = winMax - (int)(winWidth); winMax -= 32768; winMin -= 32768; vol = new VoxelVolume(517, manager); boxMinCon = new Float4(0, 0, 0, 0); boxMaxCon = new Float4(512, 512, 512, 0); /* to speed up the buffer voxels x axis innermost. VolumetricMethodsInVisualEffects2010 */ for (var f = 1; f <= num_of_images; f++){ for (var y = 0; y < 512; y++){ for (var x = 0; x < 512; x++){ i = buffer[(f - 1) * 512 * 512 + (x * 512 + y)]; k = (short)(i - 32768); if (k > winMin && k < winMax) // consider converting win centre { var xx = (int)((x / 512.0) * (513.0 - 5)); var yy = 512 - (int)((y / 512.0) * (513.0 - 5)); var zz = (int)((f / 348.0) * (349.0)); vol.SetValue(xx, (330 - zz * 2), yy, k); vol.SetValue(xx, (330 - zz * 2 + 1), yy, k); vol.SetValue(xx, (330 - zz * 2 + 2), yy, k); } } } GC.Collect(); } GC.Collect(); camfactorX = 2; camfactorY = 2; camfactorZ = 2; }