Shader Problem

Started by AGP, January 22, 2014, 04:58:53 PM

Previous topic - Next topic

AGP

Sorry about the shader question. I made a slight change to this in order to update it with the camera's position. Ray, of course, is always the center of the ocean plane. All I get is a black plane in jpct. The first method is how I update the shader (once per gameloop iteration, naturally).


     public void update(SimpleVector cameraPosition) {
shader.setUniform("iGlobalTime", (timeF+=.1f));
shader.setUniform("pos", cameraPosition);
     }



// License Creative Commons Attribution-NonCommercial-ShareAlike 3.0 Unported License.

uniform vec3 iResolution;
uniform vec3 iChannelResolution[4];
uniform vec3 pos;
uniform vec3 ray;
uniform float iGlobalTime;
uniform float iChannelTime[4];
uniform sampler2D iChannel0;
uniform sampler2D iChannel1;
uniform sampler2D iChannel2;
uniform sampler2D iChannel3;

const float tau = 6.28318530717958647692;

// Gamma correction
#define GAMMA (2.2)

vec3 ToLinear( in vec3 col ) {
// simulate a monitor, converting colour values into light values
return pow( col, vec3(GAMMA) );
}

vec3 ToGamma( in vec3 col ) {
// convert back into colour values, so the correct light will come out of the monitor
return pow( col, vec3(1.0/GAMMA) );
}

vec3 localRay;

// Set up a camera looking at the scene.
// origin - camera is positioned relative to, and looking at, this point
// distance - how far camera is from origin
// rotation - about x & y axes, by left-hand screw rule, relative to camera looking along +z
// zoom - the relative length of the lens
void CamPolar( out vec3 pos, out vec3 ray, in vec3 origin, in vec2 rotation, in float distance, in float zoom ) {
// get rotation coefficients
vec2 c = vec2(cos(rotation.x),cos(rotation.y));
vec4 s;
s.xy = vec2(sin(rotation.x),sin(rotation.y)); // worth testing if this is faster as sin or sqrt(1.0-cos);
s.zw = -s.xy;

// ray in view space
ray.xy = gl_FragCoord.xy - iResolution.xy*.5;
ray.z = iResolution.y*zoom;
ray = normalize(ray);
localRay = ray;

// rotate ray
ray.yz = ray.yz*c.xx + ray.zy*s.zx;
ray.xz = ray.xz*c.yy + ray.zx*s.yw;

// position camera
pos = origin - distance*vec3(c.x*s.y,s.z,c.x*c.y);
}

// Noise functions, distinguished by variable types

vec2 Noise( in vec3 x ) {
    vec3 p = floor(x);
    vec3 f = fract(x);
f = f*f*(3.0-2.0*f);
// vec3 f2 = f*f; f = f*f2*(10.0-15.0*f+6.0*f2);

vec2 uv = (p.xy+vec2(37.0,17.0)*p.z) + f.xy;

// hardware interpolation lacks precision
// vec4 rg = texture2D( iChannel0, (uv+0.5)/256.0, -100.0 );
vec4 rg = mix( mix(
texture2D( iChannel0, (floor(uv)+0.5)/256.0, -100.0 ),
texture2D( iChannel0, (floor(uv)+vec2(1,0)+0.5)/256.0, -100.0 ),
fract(uv.x) ),
  mix(
texture2D( iChannel0, (floor(uv)+vec2(0,1)+0.5)/256.0, -100.0 ),
texture2D( iChannel0, (floor(uv)+1.5)/256.0, -100.0 ),
fract(uv.x) ),
fract(uv.y) );
 

return mix( rg.yw, rg.xz, f.z );
}

vec4 Noise( in vec2 x ) {
    vec2 p = floor(x.xy);
    vec2 f = fract(x.xy);
f = f*f*(3.0-2.0*f);
// vec3 f2 = f*f; f = f*f2*(10.0-15.0*f+6.0*f2);

vec2 uv = p.xy + f.xy;
return texture2D( iChannel0, (uv+0.5)/256.0, -100.0 );
}

vec4 Noise( in ivec2 x ) {
return texture2D( iChannel0, (vec2(x)+0.5)/256.0, -100.0 );
}

vec2 Noise( in ivec3 x ) {
vec2 uv = vec2(x.xy)+vec2(37.0,17.0)*float(x.z);
return texture2D( iChannel0, (uv+0.5)/256.0, -100.0 ).xz;
}

float Waves( vec3 pos ) {
pos *= .2*vec3(1,1,1);

const int octaves = 6;
float f = 0.0;

// need to do the octaves from large to small, otherwise things don't line up
// (because I rotate by 45 degrees on each octave)
pos += iGlobalTime*vec3(0,.1,.1);
for ( int i=0; i < octaves; i++ ) {
pos = (pos.yzx + pos.zyx*vec3(1,-1,1))/sqrt(2.0);
f  = f*2.0+abs(Noise(pos).x-.5)*2.0;
pos *= 2.0;
}
f /= exp2(float(octaves));

return (.5-f)*1.0;
}

float WavesDetail( vec3 pos ) {
pos *= .2*vec3(1,1,1);

const int octaves = 8;
float f = 0.0;

// need to do the octaves from large to small, otherwise things don't line up
// (because I rotate by 45 degrees on each octave)
pos += iGlobalTime*vec3(0,.1,.1);
for ( int i=0; i < octaves; i++ ) {
pos = (pos.yzx + pos.zyx*vec3(1,-1,1))/sqrt(2.0);
f  = f*2.0+abs(Noise(pos).x-.5)*2.0;
pos *= 2.0;
}
f /= exp2(float(octaves));

return (.5-f)*1.0;
}

float WavesSmooth( vec3 pos ) {
pos *= .2*vec3(1,1,1);

const int octaves = 2;
float f = 0.0;

// need to do the octaves from large to small, otherwise things don't line up
// (because I rotate by 45 degrees on each octave)
pos += iGlobalTime*vec3(0,.1,.1);
for ( int i=0; i < octaves; i++ ) {
pos = (pos.yzx + pos.zyx*vec3(1,-1,1))/sqrt(2.0);
//f  = f*2.0+abs(Noise(pos).x-.5)*2.0;
f  = f*2.0+sqrt(pow(Noise(pos).x-.5,2.0)+.01)*2.0;
pos *= 2.0;
}
f /= exp2(float(octaves));

return (.5-f)*1.0;
}

float WaveCrests( vec3 ipos ) {
vec3 pos = ipos;
pos *= .2*vec3(1,1,1);

const int octaves1 = 6;
const int octaves2 = 16;
float f = 0.0;

// need to do the octaves from large to small, otherwise things don't line up
// (because I rotate by 45 degrees on each octave)
pos += iGlobalTime*vec3(0,.1,.1);
vec3 pos2 = pos;
for ( int i=0; i < octaves1; i++ ) {
pos = (pos.yzx + pos.zyx*vec3(1,-1,1))/sqrt(2.0);
f  = f*1.5+abs(Noise(pos).x-.5)*2.0;
pos *= 2.0;
}
pos = pos2 * exp2(float(octaves1));
pos.y = -.05*iGlobalTime;
for ( int i=octaves1; i < octaves2; i++ ) {
pos = (pos.yzx + pos.zyx*vec3(1,-1,1))/sqrt(2.0);
f  = f*1.5+pow(abs(Noise(pos).x-.5)*2.0,1.0);
pos *= 2.0;
}
f /= 1500.0;

f -= Noise(ivec2(gl_FragCoord.xy)).x*.01;

return pow(smoothstep(.4,-.1,f),6.0);
}


vec3 Sky( vec3 ray ) {
return vec3(.4,.45,.5);
}


float OceanDistanceField( vec3 pos ) {
return pos.y - Waves(pos);
}

float OceanDistanceFieldDetail( vec3 pos ) {
return pos.y - WavesDetail(pos);
}

vec3 OceanNormal( vec3 pos ) {
vec3 norm;
vec2 d = vec2(.01*length(pos),0);

norm.x = OceanDistanceFieldDetail( pos+d.xyy )-OceanDistanceFieldDetail( pos-d.xyy );
norm.y = OceanDistanceFieldDetail( pos+d.yxy )-OceanDistanceFieldDetail( pos-d.yxy );
norm.z = OceanDistanceFieldDetail( pos+d.yyx )-OceanDistanceFieldDetail( pos-d.yyx );

return normalize(norm);
}

float TraceOcean( vec3 pos, vec3 ray ) {
float h = 1.0;
float t = 0.0;
for ( int i=0; i < 100; i++ ) {
if ( h < .01 || t > 100.0 )
break;
h = OceanDistanceField( pos+t*ray );
t += h;
}

if ( h > .1 )
return 0.0;

return t;
}

vec3 ShadeOcean( vec3 pos, vec3 ray ) {
vec3 norm = OceanNormal(pos);
float ndotr = dot(ray,norm);

float fresnel = pow(1.0-abs(ndotr),5.0);

vec3 reflectedRay = ray-2.0*norm*ndotr;
vec3 refractedRay = ray+(-cos(1.33*acos(-ndotr))-ndotr)*norm;
refractedRay = normalize(refractedRay);

const float crackFudge = .0;

// reflection
vec3 reflection = Sky(reflectedRay);

vec3 col = vec3(0,.04,.04); // under-sea colour

col = mix( col, reflection, fresnel );

// foam
col = mix( col, vec3(1), WaveCrests(pos) );

return col;
}

void main(void) {
// vec2 camRot = vec2(.5,.5)+vec2(-.35,4.5)*(iMouse.yx/iResolution.yx);
// vec3 pos, ray;
// CamPolar( pos, ray, vec3(0), camRot, 3.0, 1.0 );

float to = TraceOcean( pos, ray );

vec3 result;
if ( to > 0.0 )
result = ShadeOcean( pos+ray*to, ray );
else result = Sky( ray );

// vignette effect
result *= 1.1*smoothstep( .35, 1.0, localRay.z );

gl_FragColor = vec4(ToGamma(result),1.0);
}

EgonOlsen

Most likely the values that you are giving it for cam and ray aren't the ones that they are supposed to be. I'm not sure what ray is supposed to be, but from the name of it, i would assume that it's the view vector, i.e. the (normalized) vector from the camera to the plane's center, not the center itself.

AGP

I see what you mean, but help me out, please. What would you pass it?

EgonOlsen



shader.setUniform("ray", plane.getTransformedCenter().calcSub(cameraPosition).normalize());


..or something. But that's just a guess based on the name and the expectation that there has to be some view vector in the equation.

AGP

That had been my guess, too. Alas, it's still black. Very frustrating.

AGP

#5
Do you happen to have another suggestion?

And, by the way, could you make a setUniform method for type samplerCube (which from what I can see would be your SkyBox class)?

EgonOlsen

...not sure. The shader is pretty complex. I have no idea what cam and ray are supposed to be exactly. But if it works without setting them from the outside, the issue has to be that they are not the values they are supposed to be when setting them. I would somehow start from there. If nothing else helps, i would convert the shader method that calculates them to Java and see what the actual outcome is to get a feel for it.
About cube mapping: There's no point in adding such a uniform to a shader if the backend itself doesn't support cube mapping and i have no plans nor time to add support for it anytime soon, i'm afraid.

AGP

#7
I found this other shader that seems to need it. I think it uses to blend the colors of the ocean with the sky.


//http://members.gamedev.net/rootevilgames/mwhite/OceanWater.htm

uniform samplerCube skyDome;
uniform sampler2D water;

uniform vec4 waterColour;

varying mat3 tanSpace;

void main(void) {
   vec3 ref = ((gl_TexCoord[1] + texture2D(water, gl_TexCoord[0].xy)) * 0.5).xyz;
   ref *= 2.0;
   ref -= vec3(1.0, 1.0, 1.0);
   
   ref = tanSpace * ref;
 
   vec4 sample = waterColour
                   + ( (textureCube(skyDome, ref) - waterColour)
                   * (dot(ref,vec3(0.0,0.0,1.0)) * waterColour.w));
   
   
   
   gl_FragColor = vec4( sample.xyz, 0.825);
}


uniform float fTime0_X;
uniform float waveHeight;
uniform vec2 windDir;
uniform float roughness;

uniform vec4 vViewPosition;

varying mat3 tanSpace;

void main(void) {
   float height = sin( 1.0 * (gl_Vertex.x + (windDir.x * fTime0_X)));
   height += 1.0;
   height = pow( max(0.0, height), roughness);
   
   float height2 = sin( 0.01 * (gl_Vertex.y + (windDir.y * fTime0_X)));
   height2 += 1.0;
   height2 = pow( max(0.0, height2), roughness);

   
   vec4 pos = gl_Vertex;
   pos.z = waveHeight * ((height + height2) / 2.0);
   
   gl_Position = gl_ModelViewProjectionMatrix * pos.xzyw;
   
   
   
   vec4 ref = normalize(reflect( (vViewPosition - pos.xzyw), vec4(0.0, 0.0, 1.0, 1.0)));
   ref += vec4(1.0,1.0,1.0,1.0);
   ref *= 0.5;
   
   gl_TexCoord[1] = ref;
   
   gl_TexCoord[0].xy = 4.0 * (gl_MultiTexCoord0.yx + vec2(0.0, fTime0_X * 0.01));
   
   //
   //   Find Surface Normal
   vec3 binormal = normalize(vec3( cos(1.0 * (gl_Vertex.x + (windDir.x * fTime0_X))),
                         1.0,
                         0.0));
                         
   vec3 tangent = normalize(
                              vec3( 0.0,
                                    1.0,
                                     0.01 * cos(0.01 * (gl_Vertex.y + (windDir.y * fTime0_X))))
                         );
   vec3 normal = cross(binormal, tangent);
   normal += vec3(1.0,1.0,1.0);
   normal *= 0.5;
   
   tanSpace = mat3( vec3(1.0, 0.0, 0.0)
                     , normal,
                     vec3(0.0, 0.0, 1.0));
}

EgonOlsen

Yes, most likely. But to inject this uniform isn't the problem. That's a simple int just like with sampler2D. However, i would have to change a lot in the engine to actually support cube mapping and as said, i have no plans to do so ATM.

AGP

I posted a question on Shader Toy. Let's hope that the author answers it.

AGP

And here is his response:

Quote
Ray needs to be different for each pixel. If you look at the code generating ray you can see it's based on gl_FragCoord.

You'd want something like ray = localRay*cameraMatrix (with localRay computed the way it is now), then pass cameraMatrix as a uniform variable.

So from what I understand we have to further multiply our current ray with the camera matrix. The part I don't get is why I would have to pass the cameraMatrix as a uniform variable and how I would do that. Also, is the multiplication ray.matMul(camera.getProjectionMatrix())?

EgonOlsen

You can pass a matrix to a uniform...that's not the problem. I'm not sure which matrix he's talking about though. It might be the projection matrix. Or maybe it's the model view matrix (in which case you already have it via gl_ModelViewMatrix in the shader). It seems to me that you then have to calculate the ray in the shader as before like in CamPolar


ray.xy = gl_FragCoord.xy - iResolution.xy*.5;
ray.z = iResolution.y*zoom;
ray = normalize(ray);
localRay = ray;


(that would be localRay then) and transform it by using that matrix (that seems to replace the rest that CamPolar does). The ray that you are passing into the uniform seems not to be needed. In addition, you seem to need "pos" which is the camera's position.

This is all wild guessing...this shader doesn't seem to be made to be used in an actual 3d scene. I would scrap it...

AGP

Every shader in ShaderToy is a scene of their own. It would be much more useful if this weren't the case. But why would you scrap it if it can come to work and it looks great?

EgonOlsen

Quote from: AGP on January 24, 2014, 06:03:30 PM
Every shader in ShaderToy is a scene of their own. It would be much more useful if this weren't the case. But why would you scrap it if it can come to work and it looks great?
I never understood how stuff like ShaderToy works without a proper 3d scene behind it...now i know...it doesn't... :P. If that's the way it is, there has to be some guide on how to convert the results to something less detached from the real world.
Why i would scrap it? Because it's pretty complicated and i personally think that it's impossible to adopt a shader of which you don't how you to set it up correctly. Anyway, this is what i would do if i had to:


  • try to figure out which matrix he means when talking about "cameraMatrix"
  • provide that matrix and pass it into the shader
  • pass the camera position to pos
  • calculate localRay like above
  • multiply localRay with that matrix to calculate ray
  • hope for the best, but expect the worst...

EgonOlsen

BTW: When passing matrices and vectors to a shader, keep in mind that the coordinate systems differ. For a vector, you have to negate y and z. For a matrix, you can use Matrix.transformToGL().