public override Vec3 GetColorValue(float u, float v, Vec3 p) { Vec3 p_N = p.normalize(); double sines = Math.Sin(n * p_N.x) * Math.Sin(n * p_N.y) * Math.Sin(n * p_N.z);//n这个系数越大,格子间距越小.但是这个算法有问题,可能是我传入的p用的问题 if (sines <= 0) { return(odd.GetColorValue(u, v, p)); } else { return(even.GetColorValue(u, v, p)); } }
public static void GetSphereUV(Vec3 p, ref float u, ref float v) { //球面坐标投影到uv坐标 p = p.normalize(); //单位球面上坐标可以用θ(theta),φ(phi)两个角度变量表示.theta is the angle down from the pole, and phi is the angle around the axis through the poles, //比如 //x = cos(phi) cos(theta) //y = sin(phi) cos(theta) //z = sin(theta) //然后uv坐标用这两个角度的话 //u = phi / (2*Pi) phi取值范围[0,2pi] //v = theta / Pi theta取值范围[0,pi] double phi = Math.Atan2(p.y, p.x); //取值范围为-π≤θ≤π double theta = Math.Asin(p.z); //取值范围为-π/2 ≤θ≤π/2 //范围映射下到需要的范围 phi取值范围[0,2pi] theta取值范围[0,pi] phi += Math.PI; theta += Math.PI / 2; //求uv u = (float)(phi / (2 * Math.PI)); v = (float)(theta / Math.PI); }
Vec3 u, v, w;//camera coordinate,可能不需要保存 //有时间有心情再把一些特性分离出component吧,一堆参数看着恶心,目前先把功能跑起来 public Camera(Vec3 lookfrom, Vec3 lookat, Vec3 viewup, int _width, int _height, float _distance, float _render_depth, float _aperture, bool _enable_motion_blur) { viewpoint = lookfrom; Vec3 viewdir = lookat - lookfrom; width = _width; height = _height; distance = _distance; render_depth = _render_depth; aperture = _aperture; enable_motion_blur = _enable_motion_blur; //建立右手系的相机坐标 w = -viewdir.normalize(); //相机的z轴,对于相机往里(往后 u = Vec3.cross(viewup, w).normalize(); //相机的x轴,对于相机水平向右 v = Vec3.cross(w, u).normalize(); //相机的y轴,对于相机水平向上 //按照相机的uvw坐标轴,构建二维像素坐标.[像素屏幕宽width,高height) pixel_origin = lookfrom - width / 2 * u - height / 2 * v - distance * w; pixel_horizontal = width * u; pixel_vertical = height * v; }