vec4 v_Color; // This will be passed into the fragment shader. void main() // The entry point for our vertex shader. { v_Color = a_Color; // Pass the color through to the fragment shader. // It will be interpolated across the triangle. gl_Position = u_MVPMatrix // gl_Position is a special variable used to store the final position. * a_Position; // Multiply the vertex by the matrix to get the final point in } // normalized screen coordinates.
void main() { vPosition = uMVMatrix * vec4(aVertexPosition, 1.0f); gl_Position = uPMatrix * vPosition; vTextureCoord = aTextureCoord; vTransformedNormal = uNMatrix * aVertexNormal; }
void mainImage(out vec4 fragColor, vec2 fragCoord) { vec2 texCoord = fragCoord.xy / iResolution.xy; vec2 thetaphi = ((texCoord * 2.0f) - vec2(1.0f)) * vec2(3.1415926535897932384626433832795f, 1.5707963267948966192313216916398f); vec3 rayDirection = vec3(cos(thetaphi.y) * cos(thetaphi.x), sin(thetaphi.y), cos(thetaphi.y) * sin(thetaphi.x)); fragColor = textureCube(iChannel0, rayDirection); }
void main() { gl_Position = prMatrix * mvMatrix * vec4(aPos, 1.0f); vec3 rotNorm = (mvMatrix * vec4(aNorm, 0.0f)).xyz; float i = max(0.0f, dot(rotNorm, dirDif)); color = vec4(.9f * i, .5f * i, 0.0f, 1.0f); i = pow(max(0f, dot(rotNorm, dirHalf)), 120.0f); color += vec4(i, i, i, 0f); }
void main() { gl_Position = prMatrix * mvMatrix * vec4(aPos, 1.0f); vec3 rotNorm = (mvMatrix * vec4(aNorm, .0f)).xyz; float i = abs( dot(rotNorm, dirDif) ); color = vec4(i*u_color.rgb, u_color.a); i = .5f*pow( max( 0.0f, dot(rotNorm, dirHalf) ), 40.0f); color += vec4(i, i, i, 0.0f); }
[varying] vec4 v_Color; // This will be passed into the fragment shader. // The entry point for our vertex shader. void main() { // Pass through the color. v_Color = a_Color; // gl_Position is a special variable used to store the final position. // Multiply the vertex by the matrix to get the final point in normalized screen coordinates. gl_Position = u_MVPMatrix * a_Position; }
vec3 v_Normal; // This will be passed into the fragment shader. // The entry point for our vertex shader. void main() { // Transform the vertex into eye space. v_Position = vec3(u_MVMatrix * a_Position); // Pass through the color. v_Color = a_Color; // Transform the normal's orientation into eye space. v_Normal = vec3(u_MVMatrix * vec4(a_Normal, 0.0f)); // gl_Position is a special variable used to store the final position. // Multiply the vertex by the matrix to get the final point in normalized screen coordinates. gl_Position = u_MVPMatrix * a_Position; }
void main() { vec3 uAmbientColor = vec3(0.2f, 0.2f, 0.2f); vec3 uDirectionalColor = vec3(0.8f, 0.8f, 0.8f); vec3 uLightingDirection = vec3(0f, 1f, 0f); gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0f); gl_PointSize = 2.0f; //vLightWeighting = vec3(1.0, 1.0, 1.0); vec4 transformedNormal = uNMatrix * vec4(aVertexNormal.xyz, 1.0f); float directionalLightWeighting = max(dot(transformedNormal.xyz, uLightingDirection), 0.0f); vLightWeighting = uAmbientColor + uDirectionalColor * directionalLightWeighting; vColor = aVertexColor; }
void main() { vec3 pos = aVertexPosition; // -- displace the x coordinate based on the time and the z position pos.x += cos(fTime + (aVertexPosition.z / 4.0f)); // -- displace the y coordinate based on the time and the z position pos.y += sin(fTime + (aVertexPosition.z / 4.0f)); // -- transform the vertex gl_Position = uPMatrix * uMVMatrix * vec4(pos, 1.0f); // -- copy the vertex color vColor = aVertexColor; // -- displace the texture's y (v) coordinate. This gives the illusion of movement. vec2 texcoord = aTextureCoord; texcoord.y = texcoord.y + (fTime); // -- copy the texture coordinate vTextureCoord = texcoord; }
// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151016/azimuthal void mainImage(out vec4 fragColor, vec2 fragCoord) { const float pi2 = 6.283185307179586476925286766559f; vec4 c = vec4(0.0f, 0.0f, 0.0f, 1.0f); vec2 uv = default(vec2); // texture coord = scaled spherical coordinates float a, d; // azimuth,distance d = length(fragCoord); if (d < 1.0) // inside projected sphere surface { a = atan(-fragCoord.x, fragCoord.y); if (a < 0.0) a += pi2; if (a > pi2) a -= pi2; uv.x = a / pi2; uv.y = d; c = texture2D(iChannel0, uv); } fragColor = c; }
vec4 v_Color; // This will be passed into the fragment shader. void main() // The entry point for our vertex shader. { // Transform the vertex into eye space. vec3 modelViewVertex = vec3(u_MVMatrix * a_Position); // Transform the normal's orientation into eye space. vec3 modelViewNormal = vec3(u_MVMatrix * vec4(a_Normal, 0.0f)); // Will be used for attenuation. float distance = length(u_LightPos - modelViewVertex); // Get a lighting direction vector from the light to the vertex. vec3 lightVector = normalize(u_LightPos - modelViewVertex); // Calculate the dot product of the light vector and vertex normal. If the normal and light vector are // pointing in the same direction then it will get max illumination. float diffuse = max(dot(modelViewNormal, lightVector), 0.1f); // Attenuate the light based on distance. diffuse = diffuse * (1.0f / (1.0f + (0.25f * distance * distance))); // Multiply the color by the illumination level. It will be interpolated across the triangle. v_Color = a_Color * diffuse; // gl_Position is a special variable used to store the final position. // Multiply the vertex by the matrix to get the final point in normalized screen coordinates. gl_Position = u_MVPMatrix * a_Position; } // normalized screen coordinates.
// n must be normalized float sdPlane(vec3 p, vec4 n) => dot(p, n.xyz) + n.w;
void mainImage(out vec4 fragColor, vec2 fragCoord) { vec2 p = (-iResolution.xy + 2.0f * fragCoord.xy) / iResolution.y; vec2 m = iMouse.xy / iResolution.xy; //----------------------------------------------------- // camera //----------------------------------------------------- // camera movement vec3 ro, ta; doCamera(out ro, out ta, iGlobalTime, m.x); // camera matrix mat3 camMat = calcLookAtMatrix(ro, ta, 0.0f); // 0.0 is the camera roll // create view ray vec3 rd = normalize(camMat * vec3(p.xy, 2.0f)); // 2.0 is the lens length //----------------------------------------------------- // render //----------------------------------------------------- vec3 col = doBackground(); // raymarch float t = calcIntersection(ro, rd); if (t > -0.5) { // geometry vec3 pos = ro + t * rd; vec3 nor = calcNormal(pos); // materials vec3 mal = doMaterial(pos, nor); col = mix(doLighting(pos, nor, rd, t, mal), col, fog(t, max(0.0f, 0.2f - pos.y * 0.3f))); } //----------------------------------------------------- // postprocessing //----------------------------------------------------- // gamma col = pow(clamp(col, 0.0f, 1.0f), vec3(0.4545f)); col.g = smoothstep(0.0f, 1.05f, col.g); col.r = smoothstep(0.1f, 1.1f, col.r); col *= 1.0f + dot(p, p * 0.08f); fragColor = vec4(col, 1.0f); }
protected vec4 texture2DProjLod(sampler2D sampler, vec4 coord, float lod) { throw new NotImplementedException(); }
vec4 taylorInvSqrt( vec4 r ) { return 1.79284291400159f - 0.85373472095314f * r; }
vec4 permute( vec4 x ) { return mod( ( ( x * 34.0f ) + 1.0f ) * x, 289.0f ); }
vec4 mod289(vec4 x) { return x - floor(x * (1.0f / 289.0f)) * 289.0f; }
vec4 permute(vec4 x) { return mod289(((x * 34.0f) + 1.0f) * x); }
float sdPlane(vec3 p, vec4 n) { // n must be normalized return dot(p, n.xyz) + n.w; }
static protected vec3 vec3(vec4 v) { throw new NotImplementedException(); }
//void mainImage(out vec4 fragColor, in vec2 fragCoord) void mainImage(out vec4 fragColor, vec2 fragCoord) { vec3 resolution = iResolution; vec2 uv = gl_FragCoord.xy / resolution.xy; uv = 2.0f * uv - 1.0f; uv.x *= resolution.x / resolution.y; vec2 m = iMouse.xy / resolution.xy; m = 2.0f * m - 1.0f; m.x *= resolution.x / resolution.y; es[0] = new Esfera { center = vec3(3.0f, -1.0f, 0.0f), r = 2.0f, id = 0, m = mee }; es[1] = new Esfera { center = vec3(-3.0f, -1.0f, -5.0f), r = 2.0f, id = 0, m = me }; vec3 at = vec3(0.0f); vec3 eye = vec3(6.0f * 2.0f * sin(0.5f * iGlobalTime), 5, 10); vec3 look = normalize(at - eye); vec3 up = vec3(0.0f, 1.0f, 0.0f); vec3 ww = cross(look, up); vec3 vv = cross(ww, look); vec3 dx = tan(radians(30.0f)) * ww; vec3 dy = tan(radians(30.0f)) * vv; eye.xy *= abs(m.xy); Ray R = new Ray { origin = eye, direction = normalize(look + dx * uv.x + dy * uv.y) }; vec3 col = trace(R); fragColor = vec4(col, 1.0f); }
void main() { gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0f); vColor = aVertexColor; }
void mainImage(out vec4 fragColor, [In] vec2 fragCoord) { //planeDistance = sin(iGlobalTime); offset = 2.0f * sqrt(2.0f) / sqrt(24.0f); vec2 p = (-iResolution.xy + 2.0f * fragCoord.xy) / iResolution.y; vec2 m = iMouse.xy / iResolution.xy; //----------------------------------------------------- // camera //----------------------------------------------------- // camera movement vec3 ro, ta; doCamera(out ro, out ta, iGlobalTime, m); // camera matrix mat3 camMat = calcLookAtMatrix(ro, ta, 0.0f); // 0.0 is the camera roll // create view ray vec3 rd = normalize(camMat * vec3(p.xy, 2.0f)); // 2.0 is the lens length //----------------------------------------------------- // render //----------------------------------------------------- vec3 col = doBackground(); // raymarch float t = calcIntersection(ro, rd); if (t > -0.5) { // geometry vec3 pos = ro + t * rd; vec3 nor = calcNormal(pos); // materials vec3 mal = doMaterial(pos, nor); col = doLighting(pos, nor, rd, t, mal); } //----------------------------------------------------- // postprocessing //----------------------------------------------------- // gamma col = pow(clamp(col, 0.0f, 1.0f), vec3(0.5f)); fragColor = vec4(col, 1.0f); }