Hello. I have a problem with outline shader in my dynamically generated mesh. I tried using probably all available outline shader but the result was not be rewarding. For example i tried using: https://github.com/westmark/unity-mesh-outline https://roystan.net/articles/outline-shader.html https://willweissman.wordpress.com/tutorials/shaders/unity-shaderlab-object-outlines/ https://www.febucci.com/2019/06/sprite-outline-shader/ etc. I attached image to show 'my little progress' The problem is when two or more shapes are covered (these shapes touch each other by maybe one pixel - ss number two), then outline between these shapes are not rendering. I useVector grahpics package: https://forum.unity.com/threads/vector-graphics-preview-package.529845/ but this asset don't provide any normal information along with the SVG generated meshes so maybe this is a reason of my problem. When I'm using this simple shader my outline behave weirdly. Code (CSharp): // Upgrade NOTE: replaced 'mul(UNITY_MATRIX_MVP,*)' with 'UnityObjectToClipPos(*)' Shader "Custom/Outline" { Properties //variables { _MainTex("Main Texture (RGB)",2D) = "white" {} // allows for a texture property _Color("Color", Color) = (1,1,1,1) // allows for a color property _OutlineTex("Outline Texture", 2D) = "white" {} _OutlineColor("Outline Color", Color) = (1,1,1,1) _OutlineWidth("Outline Width", Range(1.0,10.0)) = 1.1 } Subshader { Tags { "Queue" = "Transparent" // "DisableBatching" = "True" } Pass { Name "OUTLINE" ZWrite Off CGPROGRAM // allows talk between two languages: shader lab and nvidia C for graphics // Function Defines - defines the name for the vertex and fragment functions // Define for the building function. // Has information of the shape and tells program how to build it #pragma vertex vert // Define for coloring function #pragma fragment frag // Includes #include "UnityCG.cginc" // build in shader functions. // Structures - can get data like - vertices's, normal, color, uv //appdata basically says how the vertex function has going to get inforamtions struct appdata // vertex information { float4 vertex : POSITION; float2 uv : TEXCOORD0; }; // how fragments get its data struct v2f // fragment inforamtion { //without SV_ this shader doesnt work on playstation and some other platforms float4 pos : SV_POSITION; float2 uv: TEXCOORD0; }; // Imports - Re-import property from shader lab to nvidia cg // properties have to same the same name like in shader lab part float4 _OutlineColor; sampler2D _OutlineTex; float _OutlineWidth; // Vertex function - builds the object //vertex to fragment //put struct appdata (above) into IN and convert it into a v2f v2f vert(appdata IN) { //get all vertexes of the shape and it's going to //times it by the outline width // and so its gonna make it either the same size if its 1 or more than one // SO its gonna get the shape and then its goona increase it a bit bigger than the actual shape size IN.vertex.xyz *= _OutlineWidth; v2f OUT; // its gonna take the object form the object space and // put it into camera clip space so it's gonna allow it to appear // on the screen //OUT.pos = UnityObjectToClipPos(IN.vertex); OUT.pos = UnityObjectToClipPos(IN.vertex); // OUT.pos -= float4(1, 0, 0,0); // swapping the uvs to our uvs OUT.uv = IN.uv; return OUT; } // fragment function - color it in fixed4 frag(v2f IN) : SV_Target { //gets the teture and wraps it around the uvs of the objects float4 texColor = tex2D(_OutlineTex, IN.uv); return texColor * _OutlineColor; } ENDCG } Pass { Name "OBJECT" CGPROGRAM // allows talk between two languages: shader lab and nvidia C for graphics // Function Defines - defines the name for the vertex and fragment functions // Define for the building function. // Has information of the shape and tells program how to build it #pragma vertex vert // Define for coloring function #pragma fragment frag // Includes #include "UnityCG.cginc" // build in shader functions. // Structures - can get data like - vertices's, normal, color, uv //appdata basically says how the vertex function has going to get inforamtions struct appdata // vertex information { float4 vertex : POSITION; float2 uv : TEXCOORD0; }; // how fragments get its data struct v2f // fragment inforamtion { //without SV_ this shader doesnt work on playstation and some other platforms float4 pos : SV_POSITION; float2 uv: TEXCOORD0; }; // Imports - Re-import property from shader lab to nvidia cg // properties have to same the same name like in shader lab part float4 _Color; sampler2D _MainTex; // Vertex function - builds the object //vertex to fragment //put struct appdata (above) into IN and convert it into a v2f v2f vert(appdata IN) { v2f OUT; // its gonna take the object form the object space and // put it into camera clip space so it's gonna allow it to appear // on the screen OUT.pos = UnityObjectToClipPos(IN.vertex); // swapping the uvs to our uvs OUT.uv = IN.uv; return OUT; } // fragment function - color it in fixed4 frag(v2f IN) : SV_Target { //gets the teture and wraps it around the uvs of the objects float4 texColor = tex2D(_MainTex, IN.uv); return texColor * _Color; } ENDCG } } } I'm little confused and don't know what to do. Does it caused by normals or something else? Or what should i change in my shader to works fine? The problem occured in procedural mesh generated: You can use this to check: Code (CSharp): using UnityEngine; public class PolygonTester : MonoBehaviour { void Start() { // Create Vector2 vertices Vector2[] vertices2D = new Vector2[] { new Vector2(0,0), new Vector2(0,50), new Vector2(50,50), new Vector2(50,100), new Vector2(0,100), new Vector2(0,150), new Vector2(150,150), new Vector2(150,100), new Vector2(100,100), new Vector2(100,50), new Vector2(150,50), new Vector2(150,0), }; // Use the triangulator to get indices for creating triangles Triangulator2 tr = new Triangulator2(vertices2D); int[] indices = tr.Triangulate(); // Create the Vector3 vertices Vector3[] vertices = new Vector3[vertices2D.Length]; for (int i = 0; i < vertices.Length; i++) { vertices[i] = new Vector3(vertices2D[i].x, vertices2D[i].y, 0); } // Create the mesh Mesh msh = new Mesh(); msh.vertices = vertices; msh.triangles = indices; Vector3[] normalsToChange = msh.normals; msh.RecalculateNormals(); msh.RecalculateBounds(); // Set up game object with mesh; gameObject.AddComponent(typeof(MeshRenderer)); MeshFilter filter = gameObject.AddComponent(typeof(MeshFilter)) as MeshFilter; filter.mesh = msh; } } and this: Code (CSharp): using UnityEngine; using System.Collections.Generic; public class Triangulator2 { private List<Vector2> m_points = new List<Vector2>(); public Triangulator2(Vector2[] points) { m_points = new List<Vector2>(points); } public int[] Triangulate() { List<int> indices = new List<int>(); int n = m_points.Count; if (n < 3) return indices.ToArray(); int[] V = new int[n]; if (Area() > 0) { for (int v = 0; v < n; v++) V[v] = v; } else { for (int v = 0; v < n; v++) V[v] = (n - 1) - v; } int nv = n; int count = 2 * nv; for (int v = nv - 1; nv > 2;) { if ((count--) <= 0) return indices.ToArray(); int u = v; if (nv <= u) u = 0; v = u + 1; if (nv <= v) v = 0; int w = v + 1; if (nv <= w) w = 0; if (Snip(u, v, w, nv, V)) { int a, b, c, s, t; a = V[u]; b = V[v]; c = V[w]; indices.Add(a); indices.Add(b); indices.Add(c); for (s = v, t = v + 1; t < nv; s++, t++) V[s] = V[t]; nv--; count = 2 * nv; } } indices.Reverse(); return indices.ToArray(); } private float Area() { int n = m_points.Count; float A = 0.0f; for (int p = n - 1, q = 0; q < n; p = q++) { Vector2 pval = m_points[p]; Vector2 qval = m_points[q]; A += pval.x * qval.y - qval.x * pval.y; } return (A * 0.5f); } private bool Snip(int u, int v, int w, int n, int[] V) { int p; Vector2 A = m_points[V[u]]; Vector2 B = m_points[V[v]]; Vector2 C = m_points[V[w]]; if (Mathf.Epsilon > (((B.x - A.x) * (C.y - A.y)) - ((B.y - A.y) * (C.x - A.x)))) return false; for (p = 0; p < n; p++) { if ((p == u) || (p == v) || (p == w)) continue; Vector2 P = m_points[V[p]]; if (InsideTriangle(A, B, C, P)) return false; } return true; } private bool InsideTriangle(Vector2 A, Vector2 B, Vector2 C, Vector2 P) { float ax, ay, bx, by, cx, cy, apx, apy, bpx, bpy, cpx, cpy; float cCROSSap, bCROSScp, aCROSSbp; ax = C.x - B.x; ay = C.y - B.y; bx = A.x - C.x; by = A.y - C.y; cx = B.x - A.x; cy = B.y - A.y; apx = P.x - A.x; apy = P.y - A.y; bpx = P.x - B.x; bpy = P.y - B.y; cpx = P.x - C.x; cpy = P.y - C.y; aCROSSbp = ax * bpy - ay * bpx; cCROSSap = cx * apy - cy * apx; bCROSScp = bx * cpy - by * cpx; return ((aCROSSbp >= 0.0f) && (bCROSScp >= 0.0f) && (cCROSSap >= 0.0f)); } }
Well, I visualize my normals and they look fine. To visualize normlas i use this script: Code (CSharp): using System.Collections; using System.Collections.Generic; using UnityEngine; public class TangentSpaceVisualizer : MonoBehaviour { public float offset = 0.01f; public float scale = 0.1f; void OnDrawGizmos() { MeshFilter filter = GetComponent<MeshFilter>(); if (filter) { Mesh mesh = filter.sharedMesh; if (mesh) { ShowTangentSpace(mesh); } } } void ShowTangentSpace(Mesh mesh) { Vector3[] vertices = mesh.vertices; Vector3[] normals = mesh.normals; for (int i = 0; i < vertices.Length; i++) { ShowTangentSpace( transform.TransformPoint(vertices[i]), transform.TransformDirection(normals[i]) ); } } void ShowTangentSpace(Vector3 vertex, Vector3 normal) { vertex += normal * offset; Gizmos.color = Color.green; Gizmos.DrawLine(vertex, vertex + normal * scale); } }
Finally i did it. I used this https://docs.unity3d.com/540/Documentation/Manual/script-EdgeDetectEffectNormals.html - works perfectly!