Search Unity

Tiling textures within an atlas by wrapping UVs within frag shader, getting artifacts

Discussion in 'Shaders' started by Iron-Warrior, Jun 12, 2018.

  1. Iron-Warrior

    Iron-Warrior

    Joined:
    Nov 3, 2009
    Posts:
    838
    Well I did my best to make the title descriptive.

    I am attempting to write a shader that allows for textures to tile within a texture atlas. This works by having the UV coords in one texture channel, then in another channel I embed the x, y, width and height of the texture's rectangle within the atlas. That way, the fragment shader can read this data and ensure that the texture coordinates wrap within this rectangle. Below is my shader. Note that this also is a two-sided shader, which is why there's some extra data there.

    Code (csharp):
    1.  
    2. Shader "Lit/Two Sided Two UVs Texture Atlas"
    3. {
    4.     Properties
    5.     {
    6.         _FrontColor("Front Color", Color) = (1,1,1,1)
    7.         _BackColor("Back Color", Color) = (1,1,1,1)
    8.         _MainTex("Texture", 2D) = "white" {}
    9.         _Padding("Padding", Range(0, 0.1)) = 0.0005
    10.     }
    11.     SubShader
    12.     {
    13.         Tags
    14.         {
    15.             "LightMode" = "ForwardBase"
    16.             "Queue" = "Geometry"
    17.         }
    18.  
    19.         Cull Off
    20.  
    21.         Pass
    22.         {
    23.             CGPROGRAM
    24.             #pragma vertex vert
    25.             #pragma fragment frag
    26.             #pragma target 3.0
    27.            
    28.             #include "UnityCG.cginc"
    29.             #include "Lighting.cginc"
    30.  
    31.             struct appdata
    32.             {
    33.                 float4 vertex : POSITION;
    34.                 float3 normal : NORMAL;
    35.                 // TEXCOORD0 is the front UVs packed into xy
    36.                 // and the back UVs packed into zw.
    37.                 float4 uv : TEXCOORD0;
    38.                 // Rectangles are packed into these in the form
    39.                 // x position, y position, width, height.
    40.                 float4 frontRect : TEXCOORD1;
    41.                 float4 backRect : TEXCOORD2;
    42.             };
    43.  
    44.             struct v2f
    45.             {
    46.                 float4 vertex : SV_POSITION;
    47.                 float3 normal : NORMAL;
    48.                 float4 uv : TEXCOORD0;
    49.                 float4 frontRect : TEXCOORD1;
    50.                 float4 backRect : TEXCOORD2;
    51.             };
    52.            
    53.             v2f vert (appdata v)
    54.             {
    55.                 v2f o;
    56.                 o.vertex = UnityObjectToClipPos(v.vertex);
    57.                 o.normal = v.normal;
    58.                 o.uv = v.uv;
    59.                 o.frontRect = v.frontRect;
    60.                 o.backRect = v.backRect;
    61.                 return o;
    62.             }
    63.            
    64.             float4 _FrontColor;
    65.             float4 _BackColor;
    66.            
    67.             float _Padding;
    68.  
    69.             sampler2D _MainTex;
    70.  
    71.             float4 frag (v2f i, fixed facing : VFACE) : SV_Target
    72.             {
    73.                 i.normal *= facing;
    74.  
    75.                 float3 worldNormal = normalize(mul((float3x3)UNITY_MATRIX_M, i.normal));
    76.  
    77.                 float NdotL = dot(worldNormal, _WorldSpaceLightPos0);
    78.                 float intensity = saturate(NdotL);
    79.  
    80.                 float4 color = facing > 0 ? _FrontColor : _BackColor;
    81.  
    82.                 // Select the correct UV coordinate and bounding box.
    83.                 float2 uv = facing > 0 ? i.uv.xy : i.uv.zw;
    84.                 float4 rect = facing > 0 ? i.frontRect : i.backRect;
    85.  
    86.                 // Loop this UV coordinate within its rectangle's bounds.
    87.                 uv.x = (uv.x % (rect.z - _Padding * 2)) + rect.x + _Padding;
    88.                 uv.y = (uv.y % (rect.w - _Padding * 2)) + rect.y + _Padding;
    89.  
    90.                 float4 sample = tex2D(_MainTex, (float2)uv);
    91.                 float4 diffuse = intensity * _LightColor0;
    92.  
    93.                 float ambient = ShadeSH9(float4(worldNormal, 1));
    94.  
    95.                 return (diffuse + ambient) * (color * sample);
    96.             }
    97.             ENDCG
    98.         }
    99.     }
    100. }
    101.  
    Here is the script I am using to generate the quad this renders on:

    Code (csharp):
    1.  
    2. using UnityEngine;
    3. using System.Collections.Generic;
    4.  
    5. [ExecuteInEditMode]
    6. public class ExperimentTextureAtlas : MonoBehaviour
    7. {
    8.     [SerializeField]
    9.     Vector2 frontScale = Vector2.one;
    10.  
    11.     [SerializeField]
    12.     Vector2 backScale = Vector2.one;
    13.  
    14.     private void Update()
    15.     {
    16.         Mesh m = new Mesh();
    17.         m.vertices = new Vector3[] { Vector3.zero, Vector3.up, Vector3.up + Vector3.right, Vector3.right };
    18.  
    19.         // Data is in the form Vector4(frontUV.x, frontUV.y, backUV.x, backUV.y)
    20.         m.SetUVs(0, new List<Vector4>()
    21.         {
    22.             new Vector4(0 * frontScale.x, 0 * frontScale.y, 0 * backScale.x, 0 * backScale.y),
    23.             new Vector4(0 * frontScale.x, 1  * frontScale.y, 0 * backScale.x, 1 * backScale.y),
    24.             new Vector4(1 * frontScale.x, 1  * frontScale.y, 1 * backScale.x, 1 * backScale.y),
    25.             new Vector4(1 * frontScale.x, 0 * frontScale.y, 1 * backScale.x, 0 * backScale.y),
    26.         });
    27.  
    28.         // Data is in the form Rect(x, y, width, height)
    29.         m.SetUVs(1, new List<Vector4>()
    30.         {
    31.             new Vector4(0, 0, 0.5f, 1),
    32.             new Vector4(0, 0, 0.5f, 1),
    33.             new Vector4(0, 0, 0.5f, 1),
    34.             new Vector4(0, 0, 0.5f, 1)
    35.         });
    36.  
    37.         m.SetUVs(2, new List<Vector4>()
    38.         {
    39.             new Vector4(0.5f, 0, 0.375f, 0.5f),
    40.             new Vector4(0.5f, 0, 0.375f, 0.5f),
    41.             new Vector4(0.5f, 0, 0.375f, 0.5f),
    42.             new Vector4(0.5f, 0, 0.375f, 0.5f),
    43.         });
    44.  
    45.         m.triangles = new int[] { 0, 1, 2, 2, 3, 0 };
    46.         m.RecalculateNormals();
    47.  
    48.         GetComponent<MeshFilter>().mesh = m;
    49.     }
    50. }
    51.  
    Note that it should be attached to an object with a mesh filter, and a mesh renderer with a material that implements the above shader. I've attached below the texture I am testing with.

    Everything works, but unfortunately I'm getting some artifacts at the seams. I'm aware that I should be expecting some issues tiling textures within at atlas due to mipmapping and filtering, but I've added some padding (hence why the texture cuts off in the image below) and I still get these artifacts.

    upload_2018-6-12_10-6-38.png

    Really don't know if there's an issue with my math or the modulo operator in HLSL, but hoping someone has encountered this before and has any ideas.

    Thanks,
    Erik
     

    Attached Files:

  2. bgolus

    bgolus

    Joined:
    Dec 7, 2012
    Posts:
    12,342
    The problem is in how GPUs calculate which mip map to use.

    The short version is GPUs calculate the texture's mip map by how much the UVs change between one on screen pixel and the pixels next to it. This is done per quad of 2x2 pixels across the screen, per primitive coverage (i.e.: for any pixel quad each tri covers). This change of a value between pixels are known as pixel derivatives. When you're using modulo (%) you'll get a sudden break in the UVs where it jumps a large between two pixels, thus the GPU thinks it needs a lower mip map for that group of pixels. The easy solution is to disable mip maps, but that's an ugly solution as the textures will alias under minification (when the texture is being displayed on screen at a lower resolution than the texture itself).

    The two real solutions are calculating the mip map yourself and using tex2Dlod, or using tex2Dgrad and supplying your own derivatives. The second option sounds scary but is quite easy.

    // starting uv
    float2 uv = facing > 0 ? i.uv.xy : i.uv.zw;

    // uv derivatives
    float2 uv_ddx = ddx(uv);
    float2 uv_ddy = ddy(uv);

    // apply modulo wrap to uv
    uv.x = (uv.x % (rect.z - _Padding * 2)) + rect.x + _Padding;
    uv.y = (uv.y % (rect.w - _Padding * 2)) + rect.y + _Padding;

    // sample texture supplying pre-wrapped uv derivatives
    float4 sample = tex2Dgrad(_MainTex, uv, uv_ddx, uv_ddy);


    Using tex2Dlod requires a bit more code to use, but the basic version is you use those same derivatives in a function to calculate the LOD using the same math the GPU would. This post has the relevant function, taken from the OpenGL documentation and written in GLSL, but the math is the same.
    https://www.opengl.org/discussion_b...ng-dFdx-dFdy?p=1236952&viewfull=1#post1236952
    Note that the input texture_coordinate in that code snippet is the uv * texture resolution.

    One nice advantage to tex2Dlod is you can easily clamp the mip level to prevent it from going too small, which is useful for when using an atlas, but you loose anisotropic filtering. You can actually do the same clamping with tex2Dgrad by limiting the length of the derivatives. Generally speaking unless you're seeing significant artifacts on objects in the distance or at near edge on angles you can ignore this.
     
    Iron-Warrior likes this.
  3. Iron-Warrior

    Iron-Warrior

    Joined:
    Nov 3, 2009
    Posts:
    838
    Excellent, thanks very much bgolus! That also explains how exactly ddx and ddy are made available to the frag shader (I've used them before for flat shading, but nothing else).