Search Unity

Question Hybrid Renderer V2 - Scaling issues when using custom raymarching shader to render entities

Discussion in 'Graphics for ECS' started by nanobot_games, Jun 11, 2021.

  1. nanobot_games

    nanobot_games

    Joined:
    Jul 22, 2019
    Posts:
    10
    I was hoping someone might be able to help as it's been frustrating me to no end!

    I am drawing shapes using raymarching and it works fine normally and scales with the object size, but as soon as I add a convert to entity component on the game object, it renders it on a tiny scale within the mesh boundary - and does not change in size when I modify the scale of the gameobject that is converted.

    I am adding the material with my shader to a sphere with Scale of (90,9,90) as an example.


    The shader is below:
    Code (CSharp):
    1. /* -*- mode:Shader coding:utf-8-with-signature -*-
    2. */
    3. Shader "Custom/capture_zone_shader"
    4. {
    5.     Properties
    6.     {
    7.         _BaseMap("Base Map", 2D) = "white" {}
    8.         _Threshold("Cutout threshold", Range(0,1)) = 0.1
    9.     }
    10.     SubShader
    11.     {
    12.         Tags
    13.         {
    14.             "RenderType" = "Opaque""IgnoreProjector" = "True" "RenderPipeline" = "UniversalPipeline"
    15.             "DisableBatching" = "False"
    16.  
    17.             "ShaderModel" = "4.5"
    18.         }
    19.         HLSLINCLUDE
    20.         #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl"
    21.  
    22.         TEXTURE2D(_BaseMap);
    23.         SAMPLER(sampler_BaseMap);
    24.  
    25.         CBUFFER_START(UnityPerMaterial)
    26.         sampler2D _MainTex;
    27.         float _Threshold;
    28.         float4 _BaseMap_ST;
    29.         CBUFFER_END
    30.         ENDHLSL
    31.  
    32.         Pass
    33.         {
    34.             Name "Unlit"
    35.             ZWrite[_ZWrite]
    36.             Cull Off
    37.             ZTest Always
    38.             Fog
    39.             {
    40.                 Mode Off
    41.             }
    42.             Blend SrcAlpha OneMinusSrcAlpha // alpha blending
    43.  
    44.             HLSLPROGRAM
    45.             #pragma prefer_hlslcc gles
    46.             #pragma exclude_renderers d3d11_9x
    47.  
    48.             #pragma target 4.5
    49.  
    50.             #pragma vertex vert
    51.             #pragma fragment frag
    52.  
    53.             #pragma multi_compile_fog
    54.             #pragma multi_compile_instancing
    55.             #pragma multi_compile _ DOTS_INSTANCING_ON
    56.  
    57.             sampler2D _GrabTexture;
    58.  
    59.             #define SURFACE_DISTANCE 1e-3
    60.             #define UNITY_PROJ_COORD(a) a
    61.  
    62.             float4 ObjectToClipPos(float3 pos)
    63.             {
    64.                 return mul(UNITY_MATRIX_VP, mul(UNITY_MATRIX_M, float4(pos, 1)));
    65.                 //float4 clipPos = mul(UNITY_MATRIX_VP, mul(unity_ObjectToWorld, float4(pos, 1.0)));
    66.                 //return clipPos;
    67.             }
    68.  
    69.             float4 ComputeNonStereoScreenPosition(float4 pos)
    70.             {
    71.                 float4 o = pos * 0.5f;
    72.                 o.xy = float2(o.x, o.y * _ProjectionParams.x) + o.w;
    73.                 o.zw = pos.zw;
    74.                 return o;
    75.             }
    76.  
    77.             inline float3 ToLocal(float3 pos)
    78.             {
    79.                 return mul(unity_WorldToObject, float4(pos, 1.0)).xyz;
    80.  
    81.                 //return mul(unity_WorldToObject, float4(pos.xyz, 1));
    82.             }
    83.  
    84.             inline float3 ToWorld(float3 pos)
    85.             {
    86.                 return mul(unity_ObjectToWorld, float4(pos, 1.0)).xyz;
    87.             }
    88.  
    89.             inline float3 GetCameraPosition() { return UNITY_MATRIX_I_V._m03_m13_m23; }
    90.             inline float3 GetCameraForward() { return -UNITY_MATRIX_V[2].xyz; }
    91.  
    92.  
    93.             inline float EncodeDepthCS(float4 pos)
    94.             {
    95.                 float z = pos.z / pos.w;
    96.                 #if defined(SHADER_API_GLCORE) || \
    97.     defined(SHADER_API_OPENGL) || \
    98.     defined(SHADER_API_GLES) || \
    99.     defined(SHADER_API_GLES3)
    100.     return z * 0.5 + 0.5;
    101.                 #else
    102.                 return z;
    103.                 #endif
    104.             }
    105.  
    106.             inline float EncodeDepthWS(float3 positionWS)
    107.             {
    108.                 float4 positionCS = TransformWorldToHClip(positionWS);
    109.                 return EncodeDepthCS(positionCS);
    110.             }
    111.  
    112.             inline float GetCameraFarClip() { return _ProjectionParams.z; }
    113.             inline float GetCameraNearClip() { return _ProjectionParams.y; }
    114.             inline float GetCameraFocalLength() { return abs(UNITY_MATRIX_P[1][1]); }
    115.  
    116.             inline float GetDistanceFromCameraToNearClipPlane(float4 projPos)
    117.             {
    118.                 projPos.xy /= projPos.w;
    119.                 projPos.xy = (projPos.xy - 0.5) * 2.0;
    120.                 projPos.x *= _ScreenParams.x / _ScreenParams.y;
    121.                 float3 norm = normalize(float3(projPos.xy, GetCameraFocalLength()));
    122.                 return GetCameraNearClip() / norm.z;
    123.             }
    124.  
    125.  
    126.             float GetDensity(float3 p) //density calculation, actual code will go in here
    127.             {
    128.                 return 0.95;
    129.             }
    130.  
    131.             float sdCone(in float2 p, in float2 q)
    132.             {
    133.                 // c is the sin/cos of the angle, h is height
    134.                 // Alternatively pass q instead of (c,h),
    135.                 // which is the point at the base in 2D
    136.  
    137.                 //vec2 w = vec2( length(p.x), p.y );
    138.                 float2 w = float2(length(p.xy), p.y);
    139.                 float2 a = w - q * clamp(dot(w, q) / dot(q, q), 0.0, 1.0);
    140.                 float2 b = w - q * float2(clamp(w.x / q.x, 0.0, 1.0), 1.0);
    141.                 float k = sign(q.y);
    142.                 float d = min(dot(a, a), dot(b, b));
    143.                 float s = max(k * (w.x * q.y - w.y * q.x), k * (w.y - q.y));
    144.                 return sqrt(d) * sign(s);
    145.             }
    146.  
    147.             float smin0(float a, float b, float k)
    148.             {
    149.                 float x = exp(-k * a);
    150.                 float y = exp(-k * b);
    151.                 return (a * x + b * y) / (x + y);
    152.             }
    153.  
    154.             float4 BoundaryDistanceFunction(float3 p)
    155.             {
    156.                 float an = 6.283185 / 12.0;
    157.                 float sector = round(atan2(p.z, p.x) / an);
    158.                 float angrot = sector * an;
    159.                 float3 q = p;
    160.  
    161.                 float resultx = q.x * cos(angrot) + q.z * sin(angrot);
    162.  
    163.                 float resultz = q.x * -sin(angrot) + q.z * cos(angrot);
    164.  
    165.                 q.xz = float2(resultx, resultz);
    166.  
    167.                 //petals
    168.                 float petalPosition = 0.46;
    169.                 float petalWidth = 0.007;
    170.                 float petalLength = 0.0211;
    171.                 //0.071
    172.                 float d = sdCone(q.xz - float2(petalPosition, 0.0), float2(petalLength, petalWidth)) - 0.003;
    173.                 // ring
    174.  
    175.                 //make this smaller and they move further out
    176.                 float widthOfTorus = 0.009;
    177.                 float torusDiameter = 0.5;
    178.  
    179.                 float d2 = abs(length(p.xz) - torusDiameter) - widthOfTorus;
    180.  
    181.                 d = min(d, d2);
    182.                 d = max(d, abs(p.y) - 0.03);
    183.                 return d;
    184.  
    185.                 //return float4(d, p);
    186.             }
    187.  
    188.             inline float DistanceFunction(float3 pos)
    189.             {
    190.                 pos = ToLocal(pos);
    191.                 float d = BoundaryDistanceFunction(pos);
    192.                 return d;
    193.             }
    194.  
    195.             inline float _DistanceFunction(float3 pos)
    196.             {
    197.                 return DistanceFunction(pos);
    198.             }
    199.  
    200.             float smax(float a, float b, float k)
    201.             {
    202.                 return smin0(a, b, -k);
    203.             }
    204.  
    205.             inline bool ShouldRaymarchExit(float lastDistanceToSurface, float minDistance, float maxDistance,
    206.                                            float totalLength, float3 endPosition)
    207.             {
    208.                 if (lastDistanceToSurface < minDistance || totalLength > maxDistance) return true;
    209.  
    210.                 return false;
    211.             }
    212.  
    213.             //raymarch until ray intersects with SDF sphere, record that point in p and then collect density
    214.             float VolumetricRaymarch(float3 ro, float3 rd, float maxdepth)
    215.             {
    216.                 float3 endPosition = ro;
    217.  
    218.                 float lastDistanceToSurface = 0.0;
    219.  
    220.                 float totalLength = length(endPosition - GetCameraPosition()); //total distance travelled along the ray
    221.  
    222.                 float minDistance = SURFACE_DISTANCE;
    223.                 float multiplier = 1.0;
    224.                 float accumulatedOcclusion = 0.0;
    225.                 float3 localRayDir = normalize(mul(unity_WorldToObject, rd));
    226.                 multiplier *= length(mul(unity_ObjectToWorld, localRayDir));
    227.  
    228.                 //first raymarcher seeks point on a SDF surface
    229.                 //this is required to ensure consistent sampling in second raymarch, indepedent of camera world position
    230.                 UNITY_LOOP
    231.                 for (int i = 0; i < 60; i++) //finite count of loops. tweak the number of steps to minimum
    232.                 {
    233.                     lastDistanceToSurface = DistanceFunction(endPosition) * multiplier;
    234.                     //get distance to surface of primitive found along the ray
    235.  
    236.                     totalLength += lastDistanceToSurface; //advance along the ray
    237.  
    238.                     endPosition += rd * lastDistanceToSurface; //Initially it is set to the origin + (0.0)
    239.  
    240.                     if (ShouldRaymarchExit(lastDistanceToSurface, minDistance, maxdepth, totalLength, endPosition))
    241.                         break;
    242.                 }
    243.  
    244.                 if (!(lastDistanceToSurface < minDistance)) return 0.0;
    245.  
    246.                 float lengthToPolySurface = length(ro - GetCameraPosition());
    247.                 if (totalLength - lengthToPolySurface < SURFACE_DISTANCE)
    248.                 {
    249.                     accumulatedOcclusion = EncodeDepthWS(ro) * 200.0; //200.0 For testing to ensure it is occluded
    250.                 }
    251.                 else
    252.                 {
    253.                     accumulatedOcclusion = EncodeDepthWS(endPosition) * 200.0;
    254.                     //200.0 For testing to ensure it is occluded
    255.                 }
    256.                 return accumulatedOcclusion; //return collected occlusion along the ray
    257.             }
    258.  
    259.  
    260.             struct Attributes
    261.             {
    262.                 float4 positionOS : POSITION;
    263.  
    264.                 float2 uv : TEXCOORD0;
    265.                 UNITY_VERTEX_INPUT_INSTANCE_ID
    266.             };
    267.  
    268.             struct Varyings
    269.             {
    270.                 float4 pos : SV_POSITION;
    271.  
    272.                 float2 uv : TEXCOORD0;
    273.                 float3 positionWS : TEXCOORD1;
    274.                 float4 positionSS : TEXCOORD2;
    275.  
    276.  
    277.                 UNITY_VERTEX_INPUT_INSTANCE_ID
    278.  
    279.                 UNITY_VERTEX_OUTPUT_STEREO
    280.             };
    281.  
    282.             struct fragOutput
    283.             {
    284.                 float4 color : SV_Target;
    285.             };
    286.  
    287.             Varyings vert(Attributes IN)
    288.             {
    289.                 Varyings o;
    290.                 UNITY_SETUP_INSTANCE_ID(IN);
    291.                 UNITY_TRANSFER_INSTANCE_ID(IN, o);
    292.                 UNITY_INITIALIZE_VERTEX_OUTPUT_STEREO(o);
    293.  
    294.                 VertexPositionInputs vertexInput = GetVertexPositionInputs(IN.positionOS.xyz);
    295.  
    296.                 o.pos = ObjectToClipPos(IN.positionOS.xyz);
    297.  
    298.                 o.positionWS = vertexInput.positionWS;
    299.  
    300.                 o.positionSS = ComputeNonStereoScreenPosition(o.pos);
    301.                 o.positionSS.z = -TransformWorldToView(o.positionWS).z;
    302.  
    303.                 o.uv = TRANSFORM_TEX(IN.uv, _BaseMap);
    304.  
    305.                 return o;
    306.             }
    307.  
    308.             fragOutput frag(Varyings i)
    309.             {
    310.                 UNITY_SETUP_INSTANCE_ID(i);
    311.                 // necessary only if any instanced properties are going to be accessed in the fragment Shader.
    312.  
    313.                 UNITY_SETUP_STEREO_EYE_INDEX_POST_VERTEX(i);
    314.  
    315.                 float3 cameraPosition = UNITY_MATRIX_I_V._m03_m13_m23;
    316.  
    317.                 float3 rayDir = normalize(i.positionWS - cameraPosition);
    318.  
    319.                 float3 cameraNearPlanePos = cameraPosition + GetDistanceFromCameraToNearClipPlane(i.positionSS) *
    320.                     rayDir;
    321.  
    322.                 float3 ro = cameraNearPlanePos;
    323.  
    324.                 float3 maxDepth = GetCameraFarClip();
    325.  
    326.                 //raymarch inside primitive and return occlusion factor
    327.                 float occlusion = VolumetricRaymarch(ro, rayDir, maxDepth);
    328.  
    329.                 fragOutput o;
    330.                 o.color = float4(i.uv, 0, occlusion);
    331.                 return o;
    332.             }
    333.             ENDHLSL
    334.         }
    335.     }
    336. }
     
  2. JussiKnuuttila

    JussiKnuuttila

    Unity Technologies

    Joined:
    Jun 7, 2019
    Posts:
    351
    Hybrid Renderer does not intentionally do anything special with the scale. Can you check with the entity debugger whether the transform matrix of the converted entity matches with what you would expect? Something I would also recommend is adding
    #pragma enable_d3d11_debug_symbols
    to your shader temporarily, and debug it using RenderDoc to find out exactly what is happening differently in the shader (e.g. does it get the wrong transform matrix for some reason?).
     
    nanobot_games likes this.
  3. nanobot_games

    nanobot_games

    Joined:
    Jul 22, 2019
    Posts:
    10
    In the entity debugger I am seeing the correct LocalToWorld matrix.
    However, when I debug with RenderDoc I am finding that the unity_ObjectToWorld matrix (defined in the UnityPerDraw cbuffer) is the identity matrix for the entity converted version, whereas it has the correct scaled values when I do not convert to entity. Should this be the case? And do I need to encode the scale in a separate material property block?
     
  4. JussiKnuuttila

    JussiKnuuttila

    Unity Technologies

    Joined:
    Jun 7, 2019
    Posts:
    351
    The UnityPerDraw cbuffer is unused by entities. It is used for built-in properties when rendering GameObjects, but Hybrid Renderer uses DOTS instancing for built-in properties, and if I remember correctly it does not use this cbuffer for anything. The UnityPerMaterial cbuffer is used normally for material properties which are not DOTS instanced.
     
  5. nanobot_games

    nanobot_games

    Joined:
    Jul 22, 2019
    Posts:
    10
    Thanks so much for your help! So to get the instanced transform matrix - would I have to create a job copying the localToWorld matrix to a DOTS IComponenetData struct with a MaterialProperty attribute so that is accessible in the shader?
     
  6. JussiKnuuttila

    JussiKnuuttila

    Unity Technologies

    Joined:
    Jun 7, 2019
    Posts:
    351
    Yes, if your matrix is in some custom data structure, then this is the easiest way. The DOTS LocalToWorld component is used as the GPU transform matrix by Hybrid Renderer.
     
  7. linfuqing

    linfuqing

    Joined:
    May 11, 2015
    Posts:
    166