Esempio n. 1
0
        /// <summary>
        /// Renders the scene into a pixel buffer.
        /// </summary>
        public void Render(PixelBuffer pixbuf)
        {
            float dx = 1.0f / pixbuf.Width, dy = 1.0f / pixbuf.Height;

            camera.AspectRatio = (float)pixbuf.Width / pixbuf.Height;

            // Free parallelism, why not! Note a Parallel.For loop
            // over each row is slightly faster but less readable.
            Parallel.ForEach(pixbuf, (pixel) =>
            {
                var color = Vector.Zero;
                float u   = pixel.X * dx;
                float v   = pixel.Y * dy;

                var rays = new[]
                {
                    camera.Trace(2 * (u - 0.25f * dx) - 1, 2 * (v - 0.25f * dy) - 1),
                    camera.Trace(2 * (u + 0.25f * dx) - 1, 2 * (v - 0.25f * dy) - 1),
                    camera.Trace(2 * (u - 0.25f * dx) - 1, 2 * (v + 0.25f * dy) - 1),
                    camera.Trace(2 * (u + 0.25f * dx) - 1, 2 * (v + 0.25f * dy) - 1),
                };

                // Trace a packet of 4 coherent AA rays
                var packet = scene.Intersects4(rays);

                // Convert the packet to a set of usable ray-geometry intersections
                Intersection <Model>[] hits = packet.ToIntersection <Model>(scene);

                for (int t = 0; t < 4; ++t)
                {
                    if (hits[t].HasHit)
                    {
                        color += new Vector(0.1f, 0.1f, 0.1f);

                        var ray   = rays[t];
                        var model = hits[t].Instance;

                        // Parse the surface normal returned and then process it manually
                        var rawNormal = new Vector(hits[t].NX, hits[t].NY, hits[t].NZ);
                        var normal    = model.CorrectNormal(rawNormal); // Important!

                        // Calculate the new ray towards the light source
                        var hitPoint = ray.PointAt(hits[t].Distance);
                        var toLight  = lightPosition - hitPoint; // from A to B = B - A
                        var lightRay = new Ray(hitPoint + normal * Constants.Epsilon, toLight);

                        // Is the light source occluded? If so, no point calculating any lighting
                        if (!scene.Occludes(lightRay, 0, toLight.Length()))
                        {
                            // Compute the Lambertian cosine term (rendering equation)
                            float cosLight = Vector.Dot(normal, toLight.Normalize());

                            // Calculate the total light attenuation (inverse square law + cosine law)
                            var attenuation = lightIntensity * cosLight / Vector.Dot(toLight, toLight);

                            color += model.Material(hits[t].Mesh).BRDF(toLight.Normalize(), ray.Direction, normal) * attenuation;
                        }
                    }
                }

                // Average the 4 per-pixel samples
                pixbuf.SetColor(pixel, color / 4);
            });
        }
Esempio n. 2
0
        /// <summary>
        /// Renders the scene into a pixel buffer.
        /// </summary>
        public void Render(PixelBuffer pixbuf, TraversalFlags mode = TraversalFlags.Single)
        {
            float dx = 1.0f / pixbuf.Width, dy = 1.0f / pixbuf.Height;

            camera.AspectRatio = (float)pixbuf.Width / pixbuf.Height;

            // Free parallelism, why not! Note a Parallel.For loop
            // over each row is slightly faster but less readable.
            Parallel.ForEach(pixbuf, (pixel) =>
            {
                var color = Vector.Zero;
                float u   = pixel.X * dx;
                float v   = pixel.Y * dy;

                Ray[] rays = null;
                Intersection <Model>[] hits = null;
                if (mode == TraversalFlags.Single)
                {
                    rays       = new[] { camera.Trace(2 * (u - 0.25f * dx) - 1, 2 * (v - 0.25f * dy) - 1) };
                    var packet = scene.Intersects(rays[0]);
                    hits       = new Intersection <Model>[] { packet.ToIntersection <Model>(scene) };
                }
                else if (mode == TraversalFlags.Packet4)
                {
                    rays = new[]
                    {
                        camera.Trace(2 * (u - 0.25f * dx) - 1, 2 * (v - 0.25f * dy) - 1),
                        camera.Trace(2 * (u + 0.25f * dx) - 1, 2 * (v - 0.25f * dy) - 1),
                        camera.Trace(2 * (u - 0.25f * dx) - 1, 2 * (v + 0.25f * dy) - 1),
                        camera.Trace(2 * (u + 0.25f * dx) - 1, 2 * (v + 0.25f * dy) - 1)
                    };
                    // Trace a packet of coherent AA rays
                    var packet = scene.Intersects4(rays);
                    // Convert the packet to a set of usable ray-geometry intersections
                    hits = packet.ToIntersection <Model>(scene);
                }
                else if (mode == TraversalFlags.Packet8)
                {
                    // Sampling pattern Rotated grid
                    // https://en.wikipedia.org/wiki/Supersampling#Supersampling_patterns
                    // ------------
                    // | X   X    |
                    // |   X    X |
                    // | X    X   |
                    // |    X   X |
                    // ------------
                    //https://www.desmos.com/calculator/l2ynkbsahy
                    rays = new[]
                    {
                        camera.Trace(2 * (u - 0.333f * dx) - 1, 2 * (v - 0.166f * dy) - 1),
                        camera.Trace(2 * (u - 0.166f * dx) - 1, 2 * (v - 0.333f * dy) - 1),
                        camera.Trace(2 * (u - 0.300f * dx) - 1, 2 * (v + 0.300f * dy) - 1),
                        camera.Trace(2 * (u - 0.100f * dx) - 1, 2 * (v + 0.100f * dy) - 1),
                        camera.Trace(2 * (u + 0.100f * dx) - 1, 2 * (v - 0.100f * dy) - 1),
                        camera.Trace(2 * (u + 0.300f * dx) - 1, 2 * (v - 0.300f * dy) - 1),
                        camera.Trace(2 * (u + 0.166f * dx) - 1, 2 * (v + 0.333f * dy) - 1),
                        camera.Trace(2 * (u + 0.333f * dx) - 1, 2 * (v + 0.166f * dy) - 1)
                    };
                    // Trace a packet of coherent AA rays
                    var packet = scene.Intersects8(rays);
                    // Convert the packet to a set of usable ray-geometry intersections
                    hits = packet.ToIntersection <Model>(scene);
                }
                else
                {
                    throw new Exception("Invalid mode");
                }

                for (int t = 0; t < hits.Length; ++t)
                {
                    if (hits[t].HasHit)
                    {
                        color += new Vector(0.1f, 0.1f, 0.1f);

                        var ray   = rays[t];
                        var model = hits[t].Instance;

                        // Parse the surface normal returned and then process it manually
                        var rawNormal = new Vector(hits[t].NX, hits[t].NY, hits[t].NZ);
                        var normal    = model.CorrectNormal(rawNormal); // Important!

                        // Calculate the new ray towards the light source
                        var hitPoint = ray.PointAt(hits[t].Distance);
                        var toLight  = lightPosition - hitPoint; // from A to B = B - A
                        var lightRay = new Ray(hitPoint + normal * Constants.Epsilon, toLight);

                        // Is the light source occluded? If so, no point calculating any lighting
                        if (!scene.Occludes(lightRay, 0, toLight.Length()))
                        {
                            // Compute the Lambertian cosine term (rendering equation)
                            float cosLight = Vector.Dot(normal, toLight.Normalize());

                            // Calculate the total light attenuation (inverse square law + cosine law)
                            var attenuation = lightIntensity * cosLight / Vector.Dot(toLight, toLight);

                            color += model.Material(hits[t].Mesh).BRDF(toLight.Normalize(), ray.Direction, normal) * attenuation;
                        }
                    }
                }
                // Average the per-pixel samples
                pixbuf.SetColor(pixel, color / rays.Length);
            });
        }