Search Unity

Question How to properly take a screenshot using RenderTexture?

Discussion in 'General Graphics' started by Alaadel, Apr 26, 2021.

  1. Alaadel

    Alaadel

    Joined:
    Apr 6, 2013
    Posts:
    30
    Hello,

    I am using this plugin to take a 360 screen capture of the scene. This is the core of the capturing code:
    Code (CSharp):
    1. public static class I360Render
    2. {
    3.     private static Material equirectangularConverter = null;
    4.     private static int paddingX;
    5.  
    6.     public static byte[] Capture( int width = 1024, bool encodeAsJPEG = true, Camera renderCam = null, bool faceCameraDirection = true )
    7.     {
    8.         if( renderCam == null )
    9.         {
    10.             renderCam = Camera.main;
    11.             if( renderCam == null )
    12.             {
    13.                 Debug.LogError( "Error: no camera detected" );
    14.                 return null;
    15.             }
    16.         }
    17.  
    18.         RenderTexture camTarget = renderCam.targetTexture;
    19.  
    20.         if( equirectangularConverter == null )
    21.         {
    22.             equirectangularConverter = new Material( Shader.Find( "Hidden/I360CubemapToEquirectangular" ) );
    23.             paddingX = Shader.PropertyToID( "_PaddingX" );
    24.         }
    25.  
    26.         int cubemapSize = Mathf.Min( Mathf.NextPowerOfTwo( width ), 8192 );
    27.         RenderTexture activeRT = RenderTexture.active;
    28.         RenderTexture cubemap = null, equirectangularTexture = null;
    29.         Texture2D output = null;
    30.         try
    31.         {
    32.             cubemap = RenderTexture.GetTemporary( cubemapSize, cubemapSize, 0 );
    33.             cubemap.dimension = UnityEngine.Rendering.TextureDimension.Cube;
    34.  
    35. // Trying to write cubemap shows an error
    36. //Texture2D outputC = new Texture2D(cubemap.width, cubemap.height, TextureFormat.ARGB32, false);
    37.            //outputC.ReadPixels(new Rect(0, 0, cubemap.width, cubemap.height), 0, 0);
    38.            //File.WriteAllBytes("D:/c.png", outputC.EncodeToPNG());
    39.  
    40.             equirectangularTexture = RenderTexture.GetTemporary( cubemapSize, cubemapSize / 2, 0 );
    41.             equirectangularTexture.dimension = UnityEngine.Rendering.TextureDimension.Tex2D;
    42.  
    43.             if( !renderCam.RenderToCubemap( cubemap, 63 ) )
    44.             {
    45.                 Debug.LogError( "Rendering to cubemap is not supported on device/platform!" );
    46.                 return null;
    47.             }
    48.  
    49.             equirectangularConverter.SetFloat( paddingX, faceCameraDirection ? ( renderCam.transform.eulerAngles.y / 360f ) : 0f );
    50.             Graphics.Blit( cubemap, equirectangularTexture, equirectangularConverter );
    51.  
    52.             RenderTexture.active = equirectangularTexture;
    53.  
    54. // I only changed RGB to ARGB32
    55.             output = new Texture2D( equirectangularTexture.width, equirectangularTexture.height, TextureFormat.ARGB32, false );
    56.             output.ReadPixels( new Rect( 0, 0, equirectangularTexture.width, equirectangularTexture.height ), 0, 0 );
    57.  
    58. // I did some tests, and the following line does not affect the rendering. I wrote output directly to a file here and still got wrong results.
    59.             return encodeAsJPEG ? InsertXMPIntoTexture2D_JPEG( output ) : InsertXMPIntoTexture2D_PNG( output );
    60.         }
    61.         catch( Exception e )
    62.         {
    63.             Debug.LogException( e );
    64.             return null;
    65.         }
    66.         finally
    67.         {
    68.             renderCam.targetTexture = camTarget;
    69.             RenderTexture.active = activeRT;
    70.  
    71.             if( cubemap != null )
    72.                 RenderTexture.ReleaseTemporary( cubemap );
    73.  
    74.             if( equirectangularTexture != null )
    75.                 RenderTexture.ReleaseTemporary( equirectangularTexture );
    76.  
    77.             if (output != null)
    78.                 UnityEngine.Object.DestroyImmediate(output);
    79.         }
    80.     }
    I got correct results so far.

    Then I used this shader/script for Reflections. (drag&drop the script to the object you want. Explanation here).

    When the reflections script is Off, I get correct capture.
    When the reflections script is On, normal objects turn black, and the floor (which I attached the reflection script to) is not visible. The floor is supposed to be a plane under the sphere, but it doesn't show up in the output capture:
    img.png

    Some people suggested using OnPostRender(). So instead of calling Capture from a button, I tried using OnPostRender(). The button sets a flag to True, and if OnPostRender() found it is True, it calls Capture and rests the flag.
    Didn't work at all (no output file was written).

    I am using a normal 3D project. The Game window shows correct results and reflections. When I attach a RenderTexture to the camera, it shows correct results.
    When I use the Capture function, it shows black objects when the Reflection script is active.

    Thank you.
     
  2. Alaadel

    Alaadel

    Joined:
    Apr 6, 2013
    Posts:
    30
    Anyone, please :)?