Search Unity

iOS Dual Displays (Airplay) with EdgeDetectNormals Shader (ImageEffect)

Discussion in 'Image Effects' started by wxxhrt, Oct 10, 2018.

  1. wxxhrt

    wxxhrt

    Joined:
    Mar 18, 2014
    Posts:
    163
    I'm using a modified ImageEffect EdgeDetectionShader to give outlines to my geometry, this has worked fine on a single screen and when using "Mirrored" Dual Displays through Airplay.

    But I'm wanting to render two cameras to two separate screens on iOS (so I can have a UI on one but not the other) using the following code from GitHub :

    Code (CSharp):
    1. using UnityEngine;
    2. using System.Collections;
    3.  
    4. namespace UnityAssets
    5. {
    6.     public class DualDisplay : MonoBehaviour
    7.     {
    8.         public Camera mainCamera, controlsCamera;
    9.         public bool autoEnable = true;
    10.  
    11.  
    12.         public bool Available
    13.         {
    14.             get
    15.             {
    16.                 return enabled && Display.displays.Length > 1;
    17.             }
    18.         }
    19.  
    20.  
    21.         public bool Active
    22.         {
    23.             get
    24.             {
    25.                 return enabled && controlsCamera.enabled;
    26.             }
    27.             set
    28.             {
    29.                 if (!value || !Available)
    30.                 {
    31.                     controlsCamera.enabled = false;
    32.  
    33.                     controlsCamera.SetTargetBuffers (Display.main.colorBuffer, Display.main.depthBuffer);
    34.                     mainCamera.SetTargetBuffers (Display.main.colorBuffer, Display.main.depthBuffer);
    35.                 }
    36.                 else
    37.                 {
    38.                     Display secondDisplay = Display.displays[1];
    39.  
    40.                     mainCamera.SetTargetBuffers (secondDisplay.colorBuffer, secondDisplay.depthBuffer);
    41.                     controlsCamera.SetTargetBuffers (Display.main.colorBuffer, Display.main.depthBuffer);
    42.  
    43.                     controlsCamera.enabled = true;
    44.                 }
    45.             }
    46.         }
    47.  
    48.  
    49.         void Reset ()
    50.         {
    51.             mainCamera = Camera.main;
    52.         }
    53.  
    54.  
    55.         void Start ()
    56.         {
    57.             if (mainCamera == null || controlsCamera == null)
    58.             {
    59.                 Debug.LogError ("DualDisplay missing a camera reference");
    60.                 Destroy (this);
    61.                 return;
    62.             }
    63.  
    64.             controlsCamera.enabled = false;
    65.         }
    66.  
    67.  
    68.         void Update()
    69.         {
    70.             if (Available)
    71.             {
    72.                 if (autoEnable)
    73.                 {
    74.                     Active = true;
    75.                 }
    76.             }
    77.             else
    78.             {
    79.                 Active = false;
    80.             }
    81.         }
    82.     }
    83. }
    This renders to two separate displays ( the iPad and the Airplay screen) but breaks the Edge Detection Image Effect ( as soon as I disconnect from Airplay the Image Effect starts working again ). Have tried putting the EdgeDetection script on each camera but no luck there.

    I think it must be something to do with depth buffers but I'm all at sea and could do with a point in the right direction.

    Modified EdgeDetection Script:-

    Code (CSharp):
    1. using System;
    2. using UnityEngine;
    3.  
    4. namespace UnityStandardAssets.ImageEffects
    5. {
    6.     [ExecuteInEditMode]
    7.     [RequireComponent (typeof (Camera))]
    8.     [AddComponentMenu ("Image Effects/Edge Detection/Edge Detection Color")]
    9.     public class EdgeDetectionColor : PostEffectsBase
    10.     {
    11.         public enum EdgeDetectMode
    12.         {
    13.             TriangleDepthNormals = 0,
    14.             RobertsCrossDepthNormals = 1,
    15.  
    16.         }
    17.        
    18.        
    19.         public EdgeDetectMode mode = EdgeDetectMode.RobertsCrossDepthNormals;
    20.         public float sensitivityDepth = 1.0f;
    21.         public float sensitivityNormals = 1.0f;
    22.         public float lumThreshold = 0.2f;
    23.         public float edgeExp = 1.0f;
    24.         public float sampleDist = 1.0f;
    25.         public float edgesOnly = 0.0f;
    26.         public Color edgesOnlyBgColor = Color.black;
    27.         public Color edgesColor = Color.red;
    28.        
    29.         public Shader edgeDetectShader;
    30.         public Material edgeDetectMaterial = null;
    31.         private EdgeDetectMode oldMode = EdgeDetectMode.RobertsCrossDepthNormals;
    32.        
    33.    
    34.         public Color gradientTop = Color.white;
    35.         public Color gradientBot = Color.blue;
    36.  
    37.         public Color gradientTop2 = Color.white;
    38.         public Color gradientBot2 = Color.blue;
    39.         public float gradientHorizon = 1.0f;
    40.         public float screenHeight = 3840.0f;
    41.        
    42.         public override bool CheckResources ()
    43.         {
    44.             CheckSupport (true);
    45.            
    46.             edgeDetectMaterial = CheckShaderAndCreateMaterial (edgeDetectShader,edgeDetectMaterial);
    47.             if (mode != oldMode)
    48.                 SetCameraFlag ();
    49.            
    50.             oldMode = mode;
    51.            
    52.             if (!isSupported)
    53.                 ReportAutoDisable ();
    54.             return isSupported;
    55.         }
    56.        
    57.        
    58.         new void Start ()
    59.         {
    60.             oldMode    = mode;
    61.         }
    62.        
    63.         void SetCameraFlag ()
    64.         {
    65.                 if (mode == EdgeDetectMode.TriangleDepthNormals || mode == EdgeDetectMode.RobertsCrossDepthNormals)
    66.                 GetComponent<Camera>().depthTextureMode |= DepthTextureMode.DepthNormals;
    67.         }
    68.        
    69.         void OnEnable ()
    70.         {
    71.             SetCameraFlag();
    72.         }
    73.        
    74.         [ImageEffectOpaque]
    75.         void OnRenderImage (RenderTexture source, RenderTexture destination)
    76.         {
    77.             if (CheckResources () == false)
    78.             {
    79.                 Graphics.Blit (source, destination);
    80.                 return;
    81.             }
    82.             if (edgeDetectMaterial == null)
    83.             {
    84.                 edgeDetectShader = Shader.Find("Hidden/EdgeDetectColors");
    85.                 edgeDetectMaterial = CheckShaderAndCreateMaterial(edgeDetectShader, edgeDetectMaterial);
    86.             }
    87.             Vector2 sensitivity = new Vector2 (sensitivityDepth, sensitivityNormals);
    88.             edgeDetectMaterial.SetVector ("_Sensitivity", new Vector4 (sensitivity.x, sensitivity.y, 1.0f, sensitivity.y));
    89.             edgeDetectMaterial.SetFloat ("_BgFade", edgesOnly);
    90.             edgeDetectMaterial.SetFloat ("_SampleDistance", sampleDist);
    91.             edgeDetectMaterial.SetVector ("_BgColor", edgesOnlyBgColor);
    92.             edgeDetectMaterial.SetFloat ("_Exponent", edgeExp);
    93.             edgeDetectMaterial.SetFloat ("_Threshold", lumThreshold);
    94.             edgeDetectMaterial.SetVector("_Color", edgesColor);
    95.  
    96.             edgeDetectMaterial.SetVector("_GradientTop", gradientTop);
    97.             edgeDetectMaterial.SetVector("_GradientBot", gradientBot);
    98.  
    99.             edgeDetectMaterial.SetVector("_GradientTop2", gradientTop2);
    100.             edgeDetectMaterial.SetVector("_GradientBot2", gradientBot2);
    101.  
    102.             edgeDetectMaterial.SetFloat ("_GradientHorizon", gradientHorizon);
    103.             edgeDetectMaterial.SetFloat ("_ScreenHeight", screenHeight);
    104.            
    105.             Graphics.Blit (source, destination, edgeDetectMaterial, (int) mode);
    106.         }
    107.     }
    108. }
     
  2. wxxhrt

    wxxhrt

    Joined:
    Mar 18, 2014
    Posts:
    163
    Have been trying lots of different things and am getting a little closer, here’s a more concise question.

    Outputting to 2 screens on iOS.

    “iPad Screen” is 4:3 2224x1668 , “Airplay Screen” is 16:9 1920x1080.

    Wanting to end up with Scene and UI on “iPad Screen”, Scene on “Airplay Screen”.

    Both screens should display an EdgeDetection Image Effect.


    ..


    My current setup has 2 cameras.



    The “iPad Camera” renders into a RenderTexture then back to the Camera using this code in a script on the “iPad Camera”.





    Code (CSharp):
    1. void OnEnable ()
    2.         {
    3.             SetCameraFlag();
    4.             mainRenderTexture = new RenderTexture(Screen.width, Screen.height, 16, RenderTextureFormat.ARGB32);
    5.             mainRenderTexture.Create();
    6.             mainRenderTexture.useDynamicScale = true;
    7.         }
    8.  
    9.  
    10.  
    11. void OnRenderImage (RenderTexture source, RenderTexture destination)
    12.  
    13. {      
    14.             iPadCamera.targetTexture = null;
    15.             Graphics.Blit (source, mainRenderTexture, edgeDetectMaterial, (int) mode);
    16.             Graphics.Blit(mainRenderTexture, destination);
    17. }



    The “Airplay Camera” grabs a reference to iPad Camera’s mainRenderTexture and displays it using this code on a script on the “Airplay Camera”



    Code (CSharp):
    1. private void OnPreRender()
    2. {
    3.     if (Display.displays.Length > 1){
    4.                 airPlayCamera.SetTargetBuffers(Display.displays[1].colorBuffer, Display.displays[1].depthBuffer);
    5.  
    6.         }
    7. }
    8.  
    9. private void OnRenderImage(RenderTexture source, RenderTexture destination)
    10.  
    11. {
    12.           Graphics.Blit(mainRenderTexture, destination, new Vector2(1f,1f), new Vector2(0,0));
    13. }





    Both cameras are set to “Render everything”, “Legacy Vertex Lit” the only difference being the AirplayCamera is set to depth -2 instead of depth -1 which seems to draw the UI on the iPad screen instead of the Airplay Screen.



    The iPad Camera displays the UI and the Scene with the image effect. The Airplay Camera displays the Scene in just black and white silhouette without the Image Effect.

    Anything I’m missing?