Search Unity

CustomRenderTexture ignores "ComputeScreenPos"

Discussion in 'Shaders' started by Voodoomedia, May 2, 2020.

  1. Voodoomedia

    Voodoomedia

    Joined:
    Jan 9, 2017
    Posts:
    9
    Hi everyone! This is my first post on the forum

    I wrote a pretty simple shader, it draws an ellipse based on given uv screen coordinates of the mouse position.
    It works well in a material applied to a gameObject, the coordinates are updated via script following the input screen position and the ellipse is displayed in a right way.

    Now the problem
    I've created a CustomRenderTexture, applied a material with that shader and I've noticed that the ellipse is now distorted since the screen position computated by ComputeScreenPos(o.vertex) are now "converted" in texture's uv space.
    So for example, the bottom of the screen coordinates (0,0)screen are converted to (0,0) local uv space [the ellipse in the left bottom of the texture], another example the center of the screen let say (540,960) are now (0.5,0.5) [the ellipse is now in the middle of the texture not of the screen!]

    I would like to know if CustomRenderTexture can read screen space and how.

    Let me know if you need more details
    Thank you!
     
  2. bgolus

    bgolus

    Joined:
    Dec 7, 2012
    Posts:
    12,342
    Can you post your shader code, how are you correcting for the aspect ratio?
    What shader is being used to display the render render texture on the object, and is that using screen space UVs too?
    What resolution is your custom render texture, are you modifying it at runtime to match the screen aspect ratio?
    What’s the reason you’re using a custom render texture for this instead of doing the calculations on the object’s material like you already were?
     
  3. Voodoomedia

    Voodoomedia

    Joined:
    Jan 9, 2017
    Posts:
    9
    this is the shader, tI correct using ratio = _ScreenParams.x/_ScreenParams.y;

    Code (CSharp):
    1. Shader "Unlit/ScreenspaceBrush"
    2. {
    3.     Properties
    4.     {
    5.         _MainTex ("Texture", 2D) = "white" {}
    6.         _Radius ("Radius", Range(0,1)) = 0.25
    7.         _Radius_Feather ("Radius feather", Range(0,1)) = 0.25
    8.         _U ("U", Range(0,1)) = 0.5      
    9.         _V ("V", Range(0,1)) = 0.5      
    10.         [Toggle] _Reverse ("Reverse", Float) = 0      
    11.        
    12.     }
    13.     SubShader
    14.     {
    15.         Tags
    16.         {
    17.             "Queue"="Transparent"
    18.             "IgnoreProjector"="True"
    19.             "RenderType"="Transparent"
    20.             "PreviewType"="Plane"        
    21.         }
    22.  
    23.         LOD 100
    24.         Cull Off
    25.         Lighting Off
    26.         ZWrite Off
    27.         Blend SrcAlpha OneMinusSrcAlpha
    28.  
    29.         Pass
    30.         {
    31.             CGPROGRAM
    32.             #include "UnityCustomRenderTexture.cginc"          
    33.             #pragma vertex vert
    34.             #pragma fragment frag
    35.             #pragma target 3.0
    36.  
    37.             struct appdata
    38.             {
    39.                 float4 vertex : POSITION;
    40.                 float2 uv : TEXCOORD0;
    41.             };
    42.  
    43.             struct v2f
    44.             {
    45.                 float2 uv : TEXCOORD0;
    46.                 float4 vertex : SV_POSITION;
    47.                 float4 screenPos : TEXCOORD1;
    48.             };
    49.  
    50.             sampler2D _MainTex;
    51.             float4 _MainTex_ST;
    52.             float _Radius;
    53.             float _Radius_Feather;
    54.             float _Reverse;
    55.             float _U;float _V;
    56.  
    57.             v2f vert (appdata v)
    58.             {
    59.                 v2f o;
    60.                 o.vertex = UnityObjectToClipPos(v.vertex);
    61.                 o.uv = TRANSFORM_TEX(v.uv, _MainTex);
    62.                 o.screenPos = ComputeScreenPos(o.vertex);
    63.                 return o;
    64.             }
    65.             inline float4 Ellipse(float4 c,float2 uv) {
    66.                
    67.                 float ratio = _ScreenParams.x/_ScreenParams.y;
    68.                 float a =0.7; // Semimajor Axis
    69.                 float b =0.7*ratio; // Semiminor Axis
    70.                
    71.                 float fOutlineWidth = 0.5;
    72.                
    73.                 float h = _U  ; // Center X coordinate
    74.                 float k = _V; // Center Y coordinate
    75.  
    76.                 float x = uv.x;
    77.                 float y = uv.y;
    78.  
    79.                 float fEllipse;
    80.                
    81.                 fEllipse =( pow(x-h,2) / (a*a*_Radius) + pow(y-k,2) / (b*b*_Radius));
    82.  
    83.                
    84.                 float fDistance = abs(1-fEllipse);
    85.                
    86.                 if (fEllipse > fOutlineWidth) {c.rgb = 0; }
    87.                
    88.                 else if (fDistance <= fOutlineWidth + _Radius_Feather)
    89.                 c.rgb= max(c.rgb, smoothstep(0,1, (fDistance-fOutlineWidth)/_Radius_Feather ));
    90.                 else c.rgb +=1;
    91.  
    92.                 return c;              
    93.             }
    94.  
    95.             fixed4 frag (v2f i) : SV_Target
    96.             {
    97.                 fixed4 col = fixed4(0,0,0,1);
    98.  
    99.                 col = Ellipse(col,i.screenPos);
    100.                 if(_Reverse){ col.rgb = 1-col.rgb; }
    101.  
    102.                 return col;
    103.             }
    104.             ENDCG
    105.         }
    106.     }
    107. }
    This is how it looks when applied to a simple unlit material and work as it should:

    unity1.jpg

    The second material shade has two images, the main used as color image and the second one [custom render texture] is used as alpha layer:
    unity 3.jpg

    updating in real time it tracks in real time the mouse position and allows me to compute further calculations like a brush stroke in my case.
    BUT the problem is that now the position is not in screen space anymore and, even more strange, the ratio randomly "blink" from correct to squeezed form as I change the position [not in runtime for this test], as you can see in the following image:

    unity2.jpg

    In both example the ratio of the texture itsel is not important since the ellipse is draw in screen coordinates [I need to obtain a brush size not related to texture size]

    As you can see the second image layer is used as a temporary buffer where I paint an alpha mask, maybe there are more efficient and fast ways but at the moment I tried to invent a personal solution [before this one I used to "Graphics.drawtexture" of an image but was not that fast and the ratio not related to texture space was painful to manage]
    I find CustomRenderTexture a good opportunity to perform a brushstroke without care of blit or other [maybe] heavy solutions..
    I can't find a good tutorial that shows how to paint on a texture in runtime..

    Thanks and sorry for this big reply!
     
  4. bgolus

    bgolus

    Joined:
    Dec 7, 2012
    Posts:
    12,342
    Is the alpha layer using screen space UVs to sample the custom render texture, yes, not the mesh UV’s? If you’re using mesh UVs it’s guaranteed to be wrong since, as you said, it’s converting the “screen space” of the render texture to mesh UV space. The custom render texture knows nothing of your object and is just rendering your original shader at “full screen”, where the “screen” is its target render texture.

    If you’re using screen space UVs to sample the alpha layer then it might be a bug with custom render textures running before the internal systems have properly set global shader values. You could work around it by setting the aspect ratio manually as a material property or you own global shader value using the same c# script you’re setting your cursor position with.

    Know the custom render texture is implemented as a
    Blit()
    . It’s basically a “do a Blit without having to write c#” asset. You’re not really avoiding any “heavy” solutions by using them, at least in terms of rendering cost, but you are giving up a lot of flexibility and control for the benefit of simplicity and a handful of bugs.
     
  5. Voodoomedia

    Voodoomedia

    Joined:
    Jan 9, 2017
    Posts:
    9
    You're right, in the Unity manual there's no property exposed than these two:
    localTexcoord [Texture coordinates relative to the update zone being currently processed.]
    globalTexcoord [Texture coordinates relative to the Custom Render Texture itself]

    So i guess no screen parameter is accepted..

    I definitively agree with you and I think I step back to blit with fewer problems and the useful screenspace that is the most important thing in my project.

    Thank you very much for your support!
     
  6. bgolus

    bgolus

    Joined:
    Dec 7, 2012
    Posts:
    12,342
    Technically both of those are in "screen space". The issue is whether or not the global
    _ScreenParams
    value has been properly updated by the time it does its
    Blit()
    . Testing in the editor, there's all sorts of weird reasons for global values to get changed or even reset at times that won't happen in a built project.
    _ScreenParams
    especially since the editor can have 3 or more views to render (Scene view, Game view, and the Inspector preview most commonly, but also potentially additional camera views if selecting the camera in the scene view, as well as a few others I'm probably forgetting). Because custom render textures are being handled internally, and not necessarily triggered by any one particular camera view, the currently set
    _ScreenParams
    for it might be for any one of those views.

    Hence why I suggested modifying your custom render texture shader to use a screen value you define manually. Or by do it by setting the resolution of your custom render texture to match the current screen resolution's aspect ratio and use
    _MainTex_TexelSize
    instead of
    _ScreenParams
    . Also because manually calling
    Blit()
    won't necessarily fix the problem if you're not being mindful of when you run it too.
     
  7. Voodoomedia

    Voodoomedia

    Joined:
    Jan 9, 2017
    Posts:
    9
    I tried to use RendertTexture and at the baking time with
    Blit()
    I had the same problem.

    I've also tried to play with
    DrawTexture()
    following this useful answer, http://answers.unity.com/answers/849754/view.html but no way to accomplish.

    Since I need to paint/project brush strokes over a mesh I can't use _MainTex_TexelSize because the brush size need to be constant in size [zooming in out the mesh but painting with the same brush size]

    I have to find a new solution.. It's incredible that such a simple concept turns out to be a pain like this!
     
  8. bgolus

    bgolus

    Joined:
    Dec 7, 2012
    Posts:
    12,342
    Oh, wait, you want a fully 3D mesh painting system?

    Yeah, what you’re doing right now really isn’t even going to be useful for that endeavor. What you’re “painting” right now is a screen space texture, not the model, so if you need to move the camera after you paint, it won’t stay “on” the model. Instead what you painted will just move with the camera since that’s where you’re painting. If that’s what you want, you need to have a uniquely UVd mesh with no overlaps and a fairly consistent UV scale, like a lightmap UV, and a way to map from the 2D UV space to the 3D model space. Then for the brushes you’d need to calculate the appropriate 3D projection to map from your current 2D view into the 3D space your model is in to match that. Stuff like the
    _ScreenParams
    or render texture size aren’t even going to be all that important.

    Then you’ll need to deal with issues with seams, and back face painting, and all sorts of other potential problems.

    This is hard. This is hard even for people with years of senior graphics programming experience.
     
  9. Voodoomedia

    Voodoomedia

    Joined:
    Jan 9, 2017
    Posts:
    9
    Yes it can be challenging but I cant understand how difficult can be a simple UV projection! i'm using only a quad (sometime skewed, which has an appropriate shader for that)
    I'll keep studying, thanks!
     
  10. bgolus

    bgolus

    Joined:
    Dec 7, 2012
    Posts:
    12,342
    The difficulty is in mapping from 2D UV space of an arbitrary mesh to 2D screen space when they don’t line up perfectly to start with.
     
  11. Egad_McDad

    Egad_McDad

    Joined:
    Feb 5, 2020
    Posts:
    39
    While I certainly don't have years of experience under my belt, I wanted to help out and take a stab at what this would look like.

    In this setup I would use two render textures, one for capturing input from the brush on screen and a second for the "paint" on the model.

    For the brush input, I would draw the brush strokes in screen space. Additionally, it should be cleared every frame.

    Now to "paint" the model I would use a custom shader which:
    • Gets clip space positions and UVs in the vert shader
    • In the fragment shader, samples the brush input texture using that clip space position
    • Write to back to the models "paint" texture using the UV
    This custom shader would require that you set the brush input texture at runtime. Also, you would need to make sure that the brush input texture gets written to before the models shader, so by placing it earlier in the render queue or using a command buffer to add it to a camera event that is likewise triggered earlier.

    I'm almost positive that backfaces shouldn't be a problem in this case as they'll be discarded before the fragment shader. UV seams on the other hand would still pose issues and I'm not sure how you would tackle them.

    Edit: Realized in hindsight that if SV_Position would need to be applied to the UVs and not the clip-space vertices. This means backfacing triangles won't be discarded.
     
    Last edited: May 8, 2020
  12. Voodoomedia

    Voodoomedia

    Joined:
    Jan 9, 2017
    Posts:
    9
    brushstroke.jpg

    I think I'm near to solution, but I miss something..
    In the image you can see the object, the brush computed by the shader in screen space and applied to alpha channel.
    Now I think I'm not good enough to understand how to manipulate UVS and VERTEX to avoid that image will be uv normalized, insted I would planar project my brush shader.

    I think in verts function I sould compute something different than UnityObjectToClipPos
    maybe UnityObjectToViewPos? Is it possible to define uvs in screen coordinates in order to avoid fitting a normalized uv space? I feel to be very near to the solution but I'm stuck :D

    Code (CSharp):
    1. v2f vert (appdata v)
    2. {
    3.     v2f o;
    4.     o.vertex = UnityObjectToClipPos(v.vertex);
    5.     o.uv = TRANSFORM_TEX(v.uv, _MainTex);
    6.     o.screenPos = ComputeScreenPos(o.vertex);
    7.     return o;
    8. }
    I
     
  13. Egad_McDad

    Egad_McDad

    Joined:
    Feb 5, 2020
    Posts:
    39
    Just want to make sure I understand the problem - in the third image, baked brush stroke, you want the brush to appear round, and not stretched?
     
  14. Voodoomedia

    Voodoomedia

    Joined:
    Jan 9, 2017
    Posts:
    9
    Not only, I would like that the brush hit only a small part (H7 and H8 as you can see in the 3rd image the purple hit only a little portion of the image) as if it were projected.
    In the third image the entire screen is "compressed" in my quad uv space and so the brush is squashed...
    I Would like to transform the uv in some way (and if it's possbile!)
     
  15. bgolus

    bgolus

    Joined:
    Dec 7, 2012
    Posts:
    12,342
    Again, the render texture knows nothing about your mesh or its orientation to the screen. You need to pass the necessary information to the material used in your blit for it to be able to reconstruct the on-mesh UV space position relative to the main camera's projection. "Screen space" for the blit() is the render texture itself, not the camera.

    Focusing on "screen space" here is a dead end because it only works in the simple case because the camera projection space UVs & blit() line up implicitly.
     
  16. bgolus

    bgolus

    Joined:
    Dec 7, 2012
    Posts:
    12,342
    Because this is a quad, you can skip some of the nastiness of doing the full 3D position to UV position mapping, and you can simplify it down to essentially just passing an object space to projection space matrix, adjusting the input UV position from the 0.0 to 1.0 range to a -1.0 to 1.0 range, doing the w divide to the resulting value, and adjusting that back to 0.0 to 1.0.

    Code (csharp):
    1. float2 objectSpacePos = i.uv.xy * 2.0 - 1.0;
    2. float4 clipSpacePos = mul(_ObjectToScreen, float4(objectSpacePos, 0.0, 1.0));
    3. float2 screenPos = (clipSpacePos.xy / clipSpacePos.w) * 0.5 + 0.5;
    You need to calculate that
    _ObjectToScreen
    matrix in code yourself and set it on the material every update.
     
  17. Voodoomedia

    Voodoomedia

    Joined:
    Jan 9, 2017
    Posts:
    9
    Ok I'll try that! thank you!