Search Unity

Shader question - how to recalculate normals after vertices displacement?

Discussion in 'Shaders' started by unity_ycAc5ynYwYxpFQ, Jun 1, 2020.

  1. unity_ycAc5ynYwYxpFQ

    unity_ycAc5ynYwYxpFQ

    Joined:
    Sep 17, 2017
    Posts:
    8
    Hi I am writing a shader to simulate waves on the ocean. So I displaced the vertices without encountering any problems. But then, when I added diffuse light in the frag property, the color stays the same across the whole mesh. This is because the model is a grid and the normals all point in the same direction. However I don't know how to change the normals direction once the vertices are displaced.


    Here is my code:

    Could you guys help me out?

    Code (CSharp):
    1. Shader "Unlit/Ocean"
    2. {
    3.     Properties
    4.     {
    5.         _MainTex ("Texture", 2D) = "white" {}
    6.         _Color ("Color", color) = (0,0,0,1)
    7.     }
    8.     SubShader
    9.     {
    10.         Tags { "RenderType"="Opaque" }
    11.         LOD 100
    12.  
    13.         Pass
    14.         {
    15.             CGPROGRAM
    16.             #pragma vertex vert
    17.             #pragma fragment frag
    18.  
    19.             #include "UnityCG.cginc"
    20.  
    21.             float4 _Color;
    22.  
    23.             struct VertexInput
    24.             {
    25.                 float4 vertex : POSITION;
    26.                 float3 normal : NORMAL;
    27.             };
    28.  
    29.             struct VertexOutput
    30.             {
    31.                 float4 vertex : SV_POSITION;
    32.                 float4 worldPos : TEXCOORD0;
    33.                 float3 normal : TEXCOORD1;
    34.             };
    35.  
    36.             VertexOutput vert (VertexInput input)
    37.             {
    38.                 VertexOutput output;
    39.                
    40.  
    41.                 // Displacing vertices
    42.                 input.vertex.z = sin(_Time * 100 + input.vertex.x*2)/5;
    43.  
    44.                 output.normal = normalize(input.normal);
    45.                 output.vertex = UnityObjectToClipPos(input.vertex);
    46.  
    47.                 // normals
    48.  
    49.  
    50.  
    51.                 return output;
    52.             }
    53.  
    54.             fixed4 frag (VertexOutput output) : SV_Target
    55.             {
    56.                 float3 lightDir = float3(1,1,1);
    57.  
    58.                 float diffuseLightFallOff = dot(lightDir, normalize(output.normal));
    59.  
    60.  
    61.                 float3 finalColor = diffuseLightFallOff * _Color;
    62.  
    63.  
    64.                 return float4(finalColor,1);
    65.             }
    66.             ENDCG
    67.         }
    68.     }
    69. }
     
  2. FlyingOreos

    FlyingOreos

    Joined:
    Mar 24, 2014
    Posts:
    12
    Two ways that I know of:

    1)
    Use partial derivatives in the fragment shader to calculate the normal.
    The normal being in the space of the position you provide:

    Code (CSharp):
    1.    float3 ddxPos = ddx(position);
    2.    float3 ddyPos = ddy(position)  * _ProjectionParams.x;
    3.    float3 normal = normalize( cross(ddxPos, ddyPos));
    What's happening here is that the GPU peeks into neighboring pixels to see the rates of change in position in the x and y direction of the screen. These changes can then be treated as two vectors, from which we can create a normal, using the cross product between them.

    2)
    Recalculate in the vertex shader. This tutorial covers it nicely.
    https://www.ronja-tutorials.com/2018/06/16/Wobble-Displacement.html
     
    CyrilGhys likes this.
  3. Przemyslaw_Zaworski

    Przemyslaw_Zaworski

    Joined:
    Jun 9, 2017
    Posts:
    328
    Example (shape blending between input object, for example sphere, into cube) with normal reconstruction after vertex displacement in real-time:

    Code (CSharp):
    1. // Morph input shape (for example Sphere) to cube, with normal reconstruction and diffuse lighting
    2.  
    3. Shader "Cubemorph"
    4. {
    5.     SubShader
    6.     {
    7.         Pass
    8.         {
    9.             CGPROGRAM
    10.             #pragma vertex VSMain
    11.             #pragma fragment PSMain
    12.            
    13.             float4 _LightColor0;
    14.            
    15.             float3 morph (float3 base)
    16.             {
    17.                 float3 offset = base;
    18.                 offset *= 2.0 / length(offset.xyz);
    19.                 offset = 0.5*clamp( offset.xyz, -1.0, 1.0 );
    20.                 return lerp(base,offset,sin(_Time.g)*0.5+0.5);
    21.             }
    22.            
    23.             void VSMain (inout float4 vertex:POSITION, inout float2 uv:TEXCOORD0, inout float3 normal:NORMAL, float4 tangent:TANGENT)
    24.             {
    25.                 float3 position = morph( vertex );
    26.                 float3 bitangent = cross( normal, tangent.xyz );
    27.                 float3 nt = ( morph( vertex + tangent.xyz * 0.01 ) - position );
    28.                 float3 nb = ( morph( vertex + bitangent * 0.01 ) - position );
    29.                 normal = cross( nt, nb );
    30.                 vertex = UnityObjectToClipPos(float4(position,vertex.w));
    31.             }
    32.            
    33.             float4 PSMain (float4 vertex:POSITION, float2 uv:TEXCOORD0, float3 normal:NORMAL) : SV_TARGET
    34.             {
    35.                 float3 LightDirection = normalize( _WorldSpaceLightPos0 );
    36.                 float3 NormalDirection = normalize(mul((float3x3)unity_ObjectToWorld,normal));
    37.                 float3 diffuse = max(dot(LightDirection, NormalDirection),0.0) * _LightColor0;
    38.                 return float4(diffuse, 1);
    39.             }
    40.             ENDCG
    41.         }
    42.     }
    43. }
     
  4. atcarter714

    atcarter714

    Joined:
    Jul 25, 2021
    Posts:
    65
    This is a bit old but I wanted to share the pseudo-solution most professionals use for this. The trick is to generate "fake" neighbor vertex data to compute a well approximated normal. What do I mean by "fake"? I mean you don't need to share actual vertex information but predict where the neighbors will be.

    When you displace vertex normals, you're generally using something like a sine wave or a greyscale height/displacement map. So you simply sample around the heightmap pixel the vertex is on to figure out where the neighbors will be or you "look ahead" or behind in the sine wave to guess where the neighbors will end up. Then you compute your normal based on that.

    You can also simply use a normal map if your rendering budget can afford it.
     
    Ascended_Fern and ehsan_wwe1 like this.