Hello, I'm new to writing shaders. I'm having an issue getting alpha to work. Seems alpha is being ignored, though it doesn't cause a compile error. Am I missing a tag or something? Code (CSharp): Shader "Custom/mat test" { Properties { _MainTex ("Texture", 2D) = "white" {} _BlendTex("Alpha Texture", 2D) = "white" {} _blendValue("Blend Value", Range(0,1)) = 0.5 } SubShader { Tags {"RenderType"="Transparent" "Queue"="Transparent"} ColorMask RGBA LOD 100 Pass { CGPROGRAM #pragma vertex vert #pragma fragment frag #pragma multi_compile_fog #include "UnityCG.cginc" struct appdata { float4 vertex : POSITION; float2 uv : TEXCOORD0; }; struct v2f { float2 uv : TEXCOORD0; UNITY_FOG_COORDS(1) float4 vertex : SV_POSITION; }; sampler2D _MainTex; float4 _MainTex_ST; sampler2D _BlendTex; float4 _BlendTex_ST; float _blendValue; v2f vert (appdata v) { v2f o; o.vertex = UnityObjectToClipPos(v.vertex); o.uv = TRANSFORM_TEX(v.uv, _MainTex); UNITY_TRANSFER_FOG(o,o.vertex); return o; } fixed4 frag (v2f i) : SV_Target { fixed4 col = lerp(tex2D(_MainTex, i.uv), tex2D(_BlendTex, i.uv), _blendValue); fixed4 alpha = tex2D(_BlendTex, i.uv); float a = max(alpha.r, max(alpha.g, alpha.b)); col.r = 1; // This works (just a test) col.a = 0.5; // THIS DON'T DO ANYTHING... // apply fog //UNITY_APPLY_FOG(i.fogCoord, col); return col; } ENDCG } } }
Thanks, but I've already read that. I don't understand/know what I'm missing. What I'm supposed to add to the code... Here's an example without any blending of two textures: (I added Blend SrcAlpha OneMinusSrcAlpha) Code (CSharp): Shader "Custom/AlphaTest" { Properties { _MainTex ("Texture", 2D) = "white" {} _alphaValue("Alpha Value", Range(0,1)) = 0.5 } SubShader { Tags {"RenderType"="Transparent" "Queue"="Transparent"} ColorMask RGBA LOD 100 Pass { Blend SrcAlpha OneMinusSrcAlpha CGPROGRAM #pragma vertex vert #pragma fragment frag #include "UnityCG.cginc" struct appdata { float4 vertex : POSITION; float2 uv : TEXCOORD0; }; struct v2f { float2 uv : TEXCOORD0; float4 vertex : SV_POSITION; }; sampler2D _MainTex; float4 _MainTex_ST; float _alphaValue; v2f vert (appdata v) { v2f o; o.vertex = UnityObjectToClipPos(v.vertex); o.uv = TRANSFORM_TEX(v.uv, _MainTex); return o; } fixed4 frag (v2f i) : SV_Target { fixed4 col = tex2D(_MainTex, i.uv); col.a = _alphaValue; // Here's wher I'm stuck... return col; } ENDCG } } } This "works" but I end up with weird visual glitches. Whatever is in the editor scene window ends up being displayed in the game window as well... like TWO cameras (editor scene view and the camera in the scene's hierarchy) rendering to the game window. Further it'll have weird black areas (in this case, on the right) - depending on what the alpha value was set. Their blacked out areas' locations "flicker" in and out as the alpha value changes: My end goal for this was to make a shader that takes a main texture and a 2nd texture in which a *color* channel (not its alpha channel) of the 2nd texture would be used to set alpha values for the main texture. I was going to be using renderTextures (using video files) for the texture inputs.
There’s nothing in that shader that would cause that. Apart from missing ZWrite Off and a possibility your material’s Render Queue is overridden to not be using the shader’s Transparent (3000) queue, but that’s not going to cause it to render in another camera.