void main() { vUv = uv; gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0f); }
void main() { vPosition = uMVMatrix * vec4(aVertexPosition, 1.0f); gl_Position = uPMatrix * vPosition; vTextureCoord = aTextureCoord; vTransformedNormal = uNMatrix * aVertexNormal; }
void mainImage(out vec4 fragColor, vec2 fragCoord) { vec2 texCoord = fragCoord.xy / iResolution.xy; vec2 thetaphi = ((texCoord * 2.0f) - vec2(1.0f)) * vec2(3.1415926535897932384626433832795f, 1.5707963267948966192313216916398f); vec3 rayDirection = vec3(cos(thetaphi.y) * cos(thetaphi.x), sin(thetaphi.y), cos(thetaphi.y) * sin(thetaphi.x)); fragColor = textureCube(iChannel0, rayDirection); }
/// <summary> /// This is a javascript application. /// </summary> /// <param name="page">HTML document rendered by the web server which can now be enhanced.</param> public Application(IApp page) { var a = new vec2 { x = 3, y = 4 }; a.ToString().ToDocumentTitle(); // Send data from JavaScript to the server tier service.WebMethod2( @"A string from JavaScript.", value => value.ToDocumentTitle() ); }
vec2 v_TexCoordinate; // This will be passed into the fragment shader. // The entry point for our vertex shader. void main() { // Transform the vertex into eye space. v_Position = vec3(u_MVMatrix * a_Position); // Pass through the texture coordinate. v_TexCoordinate = a_TexCoordinate; // Transform the normal's orientation into eye space. v_Normal = vec3(u_MVMatrix * vec4(a_Normal, 0.0f)); // gl_Position is a special variable used to store the final position. // Multiply the vertex by the matrix to get the final point in normalized screen coordinates. gl_Position = u_MVPMatrix * a_Position; }
// Construct perspective matrix vec3 computeSpherePosition(vec2 uv, float r) { var p = default(vec3); float fi = uv.x * PI * 2.0f; float th = uv.y * PI; p.x = r * sin(th) * cos(fi); p.y = r * sin(th) * sin(fi); p.z = r * cos(th); return p; }
void main() { gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0f); vTextureCoord = aTextureCoord; if (!uUseLighting) { vLightWeighting = vec3(1.0f, 1.0f, 1.0f); } else { vec3 transformedNormal = uNMatrix * aVertexNormal; float directionalLightWeighting = max(dot(transformedNormal, uLightingDirection), 0.0f); vLightWeighting = uAmbientColor + uDirectionalColor * directionalLightWeighting; } }
void main() { vec3 pos = aVertexPosition; // -- displace the x coordinate based on the time and the z position pos.x += cos(fTime + (aVertexPosition.z / 4.0f)); // -- displace the y coordinate based on the time and the z position pos.y += sin(fTime + (aVertexPosition.z / 4.0f)); // -- transform the vertex gl_Position = uPMatrix * uMVMatrix * vec4(pos, 1.0f); // -- copy the vertex color vColor = aVertexColor; // -- displace the texture's y (v) coordinate. This gives the illusion of movement. vec2 texcoord = aTextureCoord; texcoord.y = texcoord.y + (fTime); // -- copy the texture coordinate vTextureCoord = texcoord; }
void main() { vec4 mvPosition = uMVMatrix * vec4(aVertexPosition, 1.0f); gl_Position = uPMatrix * mvPosition; vTextureCoord = aTextureCoord; if (!uUseLighting) { vLightWeighting = vec3(1.0f, 1.0f, 1.0f); } else { vec3 lightDirection = normalize(uPointLightingLocation - mvPosition.xyz); vec3 transformedNormal = uNMatrix * aVertexNormal; float directionalLightWeighting = max(dot(transformedNormal, lightDirection), 0.0f); vLightWeighting = uAmbientColor + uPointLightingColor * directionalLightWeighting; } }
// https://sites.google.com/a/jsc-solutions.net/work/knowledge-base/15-dualvr/20151016/azimuthal void mainImage(out vec4 fragColor, vec2 fragCoord) { const float pi2 = 6.283185307179586476925286766559f; vec4 c = vec4(0.0f, 0.0f, 0.0f, 1.0f); vec2 uv = default(vec2); // texture coord = scaled spherical coordinates float a, d; // azimuth,distance d = length(fragCoord); if (d < 1.0) // inside projected sphere surface { a = atan(-fragCoord.x, fragCoord.y); if (a < 0.0) a += pi2; if (a > pi2) a -= pi2; uv.x = a / pi2; uv.y = d; c = texture2D(iChannel0, uv); } fragColor = c; }
void main() { vec2 position = -1.0f + 2.0f * gl_FragCoord.xy / resolution.xy; float a = atan(position.y, position.x); float r = sqrt(dot(position, position)); var uv = new vec2(); uv.x = cos(a) / r; uv.y = sin(a) / r; uv /= 10.0f; uv += time * 0.05f; vec3 color = texture2D(texture, uv).rgb; gl_FragColor = vec4(color * r * 1.5f, 1.0f); }
static protected vec4 vec4(vec2 x, vec2 y) { throw new NotImplementedException(); }
protected vec4 texture2D(sampler2D sampler, vec2 coord) { throw new NotImplementedException(); }
void main() { gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0f); vTextureCoord = aTextureCoord; }
static protected vec4 vec4(vec2 x, float y, float z) { throw new NotImplementedException(); }
float r(vec2 n) { return r(dot(n, vec2(2.46f, -1.21f))); }
//void mainImage(out vec4 fragColor, in vec2 fragCoord) void mainImage(out vec4 fragColor, vec2 fragCoord) { vec3 resolution = iResolution; vec2 uv = gl_FragCoord.xy / resolution.xy; uv = 2.0f * uv - 1.0f; uv.x *= resolution.x / resolution.y; vec2 m = iMouse.xy / resolution.xy; m = 2.0f * m - 1.0f; m.x *= resolution.x / resolution.y; es[0] = new Esfera { center = vec3(3.0f, -1.0f, 0.0f), r = 2.0f, id = 0, m = mee }; es[1] = new Esfera { center = vec3(-3.0f, -1.0f, -5.0f), r = 2.0f, id = 0, m = me }; vec3 at = vec3(0.0f); vec3 eye = vec3(6.0f * 2.0f * sin(0.5f * iGlobalTime), 5, 10); vec3 look = normalize(at - eye); vec3 up = vec3(0.0f, 1.0f, 0.0f); vec3 ww = cross(look, up); vec3 vv = cross(ww, look); vec3 dx = tan(radians(30.0f)) * ww; vec3 dy = tan(radians(30.0f)) * vv; eye.xy *= abs(m.xy); Ray R = new Ray { origin = eye, direction = normalize(look + dx * uv.x + dy * uv.y) }; vec3 col = trace(R); fragColor = vec4(col, 1.0f); }
static protected vec3 vec3(float z, vec2 v) { throw new NotImplementedException(); }
void main() { gl_Position = vec4(position, 0.0f, 1.0f); tc = vec2(position.x, position.y * h); }
void main() { vUv = uv; vec4 mPosition = modelMatrix * vec4(position, 1.0f); vec3 nWorld = normalize(mat3(modelMatrix[0].xyz, modelMatrix[1].xyz, modelMatrix[2].xyz) * normal); vReflect = normalize(reflect(normalize(mPosition.xyz - cameraPosition), nWorld)); pos = position; //float noise = .3 * pnoise( 8.0 * vec3( normal ) ); float noise = 10.0f * -.10f * turbulence(.5f * normal + time); //float noise = - stripes( normal.x + 2.0 * turbulence( normal ), 1.6 ); float displacement = -weight * noise; displacement += 5.0f * pnoise(0.05f * position + vec3(2.0f * time), vec3(100.0f)); ao = noise; vec3 newPosition = position + normal * vec3(displacement); gl_Position = projectionMatrix * modelViewMatrix * vec4(newPosition, 1.0f); }
void mainImage(out vec4 fragColor, vec2 fragCoord) { vec2 p = (-iResolution.xy + 2.0f * fragCoord.xy) / iResolution.y; vec2 m = iMouse.xy / iResolution.xy; //----------------------------------------------------- // camera //----------------------------------------------------- // camera movement vec3 ro, ta; doCamera(out ro, out ta, iGlobalTime, m.x); // camera matrix mat3 camMat = calcLookAtMatrix(ro, ta, 0.0f); // 0.0 is the camera roll // create view ray vec3 rd = normalize(camMat * vec3(p.xy, 2.0f)); // 2.0 is the lens length //----------------------------------------------------- // render //----------------------------------------------------- vec3 col = doBackground(); // raymarch float t = calcIntersection(ro, rd); if (t > -0.5) { // geometry vec3 pos = ro + t * rd; vec3 nor = calcNormal(pos); // materials vec3 mal = doMaterial(pos, nor); col = mix(doLighting(pos, nor, rd, t, mal), col, fog(t, max(0.0f, 0.2f - pos.y * 0.3f))); } //----------------------------------------------------- // postprocessing //----------------------------------------------------- // gamma col = pow(clamp(col, 0.0f, 1.0f), vec3(0.4545f)); col.g = smoothstep(0.0f, 1.05f, col.g); col.r = smoothstep(0.1f, 1.1f, col.r); col *= 1.0f + dot(p, p * 0.08f); fragColor = vec4(col, 1.0f); }
void main() { gl_Position = vec4(aPos, 1.0f); vTexCoord = aTexCoord; }