Search Unity

  1. Unity Asset Manager is now available in public beta. Try it out now and join the conversation here in the forums.
    Dismiss Notice

Mesh shaders support?

Discussion in 'Graphics Experimental Previews' started by tswierkot, Sep 19, 2018.

  1. tswierkot

    tswierkot

    Joined:
    Feb 15, 2017
    Posts:
    25
    Cynicat and grizzly like this.
  2. neoshaman

    neoshaman

    Joined:
    Feb 11, 2011
    Posts:
    6,493
    always
     
    tallyblotto likes this.
  3. Tim-C

    Tim-C

    Unity Technologies

    Joined:
    Feb 6, 2010
    Posts:
    2,225
    They are really cool... but no roadmap or anything yet. So many new gfx stuff to implement this year! :confused:
     
    LooperVFX, Ruberta and hippocoder like this.
  4. recursive

    recursive

    Joined:
    Jul 12, 2012
    Posts:
    669
    So this is an effective replacement for vtx/geom/tes shaders, based more on modern compute capabilities?

    Forget raytracing this is way more interesting for some of the experiments I've been tinkering with.
     
    VirtualPierogi likes this.
  5. hippocoder

    hippocoder

    Digital Ape

    Joined:
    Apr 11, 2010
    Posts:
    29,723
    I think it would be wiser for HDRP to look into acceptable compute shader equivalents that are comparable - for example geometry shaders are a bad design and a couple of compute shaders to replace it scales better, performs better, works on metal and any compute hardware... just an example.

    Seems time consuming to make but I see some AAA studios making noise in that direction so it seems like something more practical than getting locked into supporting a single GPU company.
     
    NotaNaN and ParovozVR like this.
  6. Przemyslaw_Zaworski

    Przemyslaw_Zaworski

    Joined:
    Jun 9, 2017
    Posts:
    328
    I made a plugin with simple example of NVIDIA Mesh Shader working with Unity OpenGL 4.5:

    upload_2019-7-8_17-52-10.png


    Source code:
    MeshShaderPlugin.cs
    Code (CSharp):
    1. using UnityEngine;
    2. using System;
    3. using System.Collections;
    4. using System.Runtime.InteropServices;
    5.  
    6. public class MeshShaderPlugin : MonoBehaviour
    7. {
    8.     [DllImport("MeshShaderPlugin")]
    9.     static extern IntPtr Execute();
    10.  
    11.     IEnumerator Start()
    12.     {
    13.         yield return StartCoroutine("CallNativePlugin");
    14.     }
    15.  
    16.     IEnumerator CallNativePlugin()
    17.     {
    18.         while (true)
    19.         {
    20.             yield return new WaitForEndOfFrame();
    21.             GL.IssuePluginEvent(Execute(), 1);
    22.         }
    23.     }
    24. }
    MeshShaderPlugin.cpp
    Code (CSharp):
    1. // For x64 Visual Studio command line:  cl.exe /LD MeshShaderPlugin.cpp opengl32.lib
    2. #include <windows.h>
    3. #include <GL/gl.h>
    4.  
    5. typedef GLuint(WINAPI *PFNGLCREATEPROGRAMPROC) ();
    6. typedef GLuint(WINAPI *PFNGLCREATESHADERPROC) (GLenum t);
    7. typedef void(WINAPI *PFNGLSHADERSOURCEPROC) (GLuint s, GLsizei c, const char*const*string, const GLint* i);
    8. typedef void(WINAPI *PFNGLCOMPILESHADERPROC) (GLuint s);
    9. typedef void(WINAPI *PFNGLATTACHSHADERPROC) (GLuint p, GLuint s);
    10. typedef void(WINAPI *PFNGLLINKPROGRAMPROC) (GLuint p);
    11. typedef void(WINAPI *PFNGLUSEPROGRAMPROC) (GLuint p);
    12. typedef void(WINAPI *PFNGLGETSHADERIVPROC) (GLuint s, GLenum v, GLint *p);
    13. typedef void(WINAPI *PFNGLGETSHADERINFOLOGPROC) (GLuint s, GLsizei b, GLsizei *l, char *i);
    14. typedef void(WINAPI *PFNGLDRAWMESHTASKSNVPROC) (GLuint f, GLuint c);
    15.  
    16. unsigned int PS;
    17.  
    18. static const char* MeshShader = \
    19.     "#version 450 \n"
    20.     "#extension GL_NV_mesh_shader : enable\n"
    21.     "layout(local_size_x = 3) in;"
    22.     "layout(max_vertices = 64) out;"
    23.     "layout(max_primitives = 126) out;"
    24.     "layout(triangles) out;"
    25.     "const vec3 vertices[3] = {vec3(-1,-1,0), vec3(1,-1,0), vec3(0,1,0)};"
    26.     "void main()"
    27.     "{"
    28.         "uint id = gl_LocalInvocationID.x;"
    29.         "gl_MeshVerticesNV[id].gl_Position = vec4(vertices[id], 2);"
    30.         "gl_PrimitiveIndicesNV[id] = id;"
    31.         "gl_PrimitiveCountNV = 1;"
    32.     "}";
    33.    
    34. static const char* FragmentShader = \
    35.     "#version 450 \n"
    36.     "#extension GL_NV_fragment_shader_barycentric : enable\n"
    37.     "out vec4 color;"
    38.     "void main()"
    39.     "{"  
    40.         "color = vec4(gl_BaryCoordNV, 1.0);"
    41.     "}";
    42.  
    43. int MakeShaders(const char* MS, const char* FS)
    44. {
    45.     int p = ((PFNGLCREATEPROGRAMPROC)wglGetProcAddress("glCreateProgram"))();
    46.     int sm = ((PFNGLCREATESHADERPROC)wglGetProcAddress("glCreateShader"))(0x9559);  
    47.     int sf = ((PFNGLCREATESHADERPROC)wglGetProcAddress("glCreateShader"))(0x8B30);  
    48.     ((PFNGLSHADERSOURCEPROC)wglGetProcAddress("glShaderSource"))(sm,1,&MS,0);
    49.     ((PFNGLSHADERSOURCEPROC)wglGetProcAddress("glShaderSource"))(sf,1,&FS,0);  
    50.     ((PFNGLCOMPILESHADERPROC)wglGetProcAddress("glCompileShader"))(sm);
    51.     ((PFNGLCOMPILESHADERPROC)wglGetProcAddress("glCompileShader"))(sf);  
    52.     ((PFNGLATTACHSHADERPROC)wglGetProcAddress("glAttachShader"))(p,sm);
    53.     ((PFNGLATTACHSHADERPROC)wglGetProcAddress("glAttachShader"))(p,sf);  
    54.     ((PFNGLLINKPROGRAMPROC)wglGetProcAddress("glLinkProgram"))(p);
    55.     return p;
    56. }
    57.  
    58. void Rendering()
    59. {
    60.     glDisable(GL_CULL_FACE);
    61.     glDisable(GL_BLEND);
    62.     glDepthFunc(GL_LEQUAL);
    63.     glEnable(GL_DEPTH_TEST);
    64.     glDepthMask(GL_FALSE);
    65.     ((PFNGLUSEPROGRAMPROC)wglGetProcAddress("glUseProgram"))(PS);
    66.     ((PFNGLDRAWMESHTASKSNVPROC)wglGetProcAddress("glDrawMeshTasksNV"))(0,1);
    67. }
    68.  
    69. typedef enum UnityGfxRenderer
    70. {
    71.     kUnityGfxRendererNull = 4,
    72.     kUnityGfxRendererOpenGLCore = 17,
    73. } UnityGfxRenderer;
    74.  
    75. typedef enum UnityGfxDeviceEventType
    76. {
    77.     kUnityGfxDeviceEventInitialize = 0,
    78.     kUnityGfxDeviceEventShutdown = 1,
    79.     kUnityGfxDeviceEventBeforeReset = 2,
    80.     kUnityGfxDeviceEventAfterReset = 3,
    81. } UnityGfxDeviceEventType;
    82.    
    83. struct UnityInterfaceGUID
    84. {
    85.     UnityInterfaceGUID(unsigned long long high, unsigned long long low) : m_GUIDHigh(high) , m_GUIDLow(low) { }
    86.     unsigned long long m_GUIDHigh;
    87.     unsigned long long m_GUIDLow;
    88. };
    89.  
    90. struct IUnityInterface {};
    91. typedef void (__stdcall * IUnityGraphicsDeviceEventCallback)(UnityGfxDeviceEventType eventType);
    92.  
    93. struct IUnityInterfaces
    94. {
    95.     IUnityInterface* (__stdcall* GetInterface)(UnityInterfaceGUID guid);
    96.     void(__stdcall* RegisterInterface)(UnityInterfaceGUID guid, IUnityInterface * ptr);
    97.     template<typename INTERFACE>
    98.     INTERFACE* Get()
    99.     {
    100.         return static_cast<INTERFACE*>(GetInterface(UnityInterfaceGUID(0x7CBA0A9CA4DDB544ULL, 0x8C5AD4926EB17B11ULL)));
    101.     }
    102.     void Register(IUnityInterface* ptr)
    103.     {
    104.         RegisterInterface(UnityInterfaceGUID(0x7CBA0A9CA4DDB544ULL, 0x8C5AD4926EB17B11ULL), ptr);
    105.     }
    106. };
    107.  
    108. struct IUnityGraphics : IUnityInterface
    109. {
    110.     void(__stdcall* RegisterDeviceEventCallback)(IUnityGraphicsDeviceEventCallback callback);
    111. };
    112.  
    113. typedef void (__stdcall* UnityRenderingEvent)(int eventId);
    114. typedef void(__stdcall* UnregisterDeviceEventCallback)(IUnityGraphicsDeviceEventCallback callback);
    115. static UnityGfxRenderer DeviceType = kUnityGfxRendererNull;
    116.  
    117. static void __stdcall OnGraphicsDeviceEvent(UnityGfxDeviceEventType eventType)
    118. {
    119.     if (eventType == kUnityGfxDeviceEventInitialize)
    120.     {
    121.         DeviceType = kUnityGfxRendererOpenGLCore;
    122.         PS = MakeShaders(MeshShader,FragmentShader);
    123.     }
    124.     if (eventType == kUnityGfxDeviceEventShutdown)
    125.     {
    126.         DeviceType = kUnityGfxRendererNull;
    127.     }
    128. }
    129.  
    130. static void __stdcall OnRenderEvent(int eventID)
    131. {
    132.     Rendering();
    133. }
    134.  
    135. extern "C" void    __declspec(dllexport) __stdcall UnityPluginLoad(IUnityInterfaces* unityInterfaces)
    136. {
    137.     IUnityInterfaces* s_UnityInterfaces = unityInterfaces;
    138.     IUnityGraphics* s_Graphics = s_UnityInterfaces->Get<IUnityGraphics>();
    139.     s_Graphics->RegisterDeviceEventCallback(OnGraphicsDeviceEvent);
    140.     OnGraphicsDeviceEvent(kUnityGfxDeviceEventInitialize);
    141. }
    142.  
    143. extern "C" void __declspec(dllexport) __stdcall UnityPluginUnload()
    144. {
    145.     UnregisterDeviceEventCallback(OnGraphicsDeviceEvent);  
    146. }
    147.  
    148. extern "C" UnityRenderingEvent __declspec(dllexport) __stdcall Execute()
    149. {
    150.     return OnRenderEvent;
    151. }
    upload_2019-7-8_18-0-20.png


    Copy compiled DLL file to Assets/Plugins, reload project and include MeshShaderPlugin.cs to camera. Play.
     
  7. Przemyslaw_Zaworski

    Przemyslaw_Zaworski

    Joined:
    Jun 9, 2017
    Posts:
    328
    Update to the previous post:
    This is a second version of CS script for situation, when we don't want to render output directly to screen, but to Render Texture:

    Code (CSharp):
    1. using UnityEngine;
    2. using System;
    3. using System.Collections;
    4. using System.Runtime.InteropServices;
    5.  
    6. public class MeshShaderPlugin : MonoBehaviour
    7. {
    8.     [DllImport("MeshShaderPlugin")]
    9.     static extern IntPtr Execute();
    10.  
    11.     public RenderTexture RT;
    12.     private Material InternalMaterial;
    13.  
    14.     void Awake()
    15.     {
    16.         InternalMaterial = new Material(Shader.Find("Sprites/Default"));
    17.     }
    18.  
    19.     void RenderToBuffer(RenderTexture destination, Material material)
    20.     {
    21.         RenderTexture.active = destination;
    22.         GL.PushMatrix();
    23.         GL.LoadOrtho();
    24.         material.SetPass(0);
    25.         GL.Begin(GL.QUADS);
    26.         GL.MultiTexCoord2(0, 0.0f, 0.0f);
    27.         GL.Vertex3(0.0f, 0.0f, 0.0f);
    28.         GL.MultiTexCoord2(0, 1.0f, 0.0f);
    29.         GL.Vertex3(1.0f, 0.0f, 0.0f);
    30.         GL.MultiTexCoord2(0, 1.0f, 1.0f);
    31.         GL.Vertex3(1.0f, 1.0f, 0.0f);
    32.         GL.MultiTexCoord2(0, 0.0f, 1.0f);
    33.         GL.Vertex3(0.0f, 1.0f, 0.0f);
    34.         GL.End();
    35.         GL.Clear(false, true, Color.black);
    36.         GL.IssuePluginEvent(Execute(), 1);
    37.         GL.PopMatrix();
    38.     }
    39.  
    40.     void Update()
    41.     {
    42.         RenderToBuffer(RT,InternalMaterial);
    43.     }
    44. }
     
    bb8_1 likes this.
  8. LooperVFX

    LooperVFX

    Joined:
    Dec 3, 2018
    Posts:
    179
    Once Microsoft adds Mesh Shaders to DirectX 12 then it will be a shoo-in for Unity to add support. Vulkan and OpenGL also already have preliminary Mesh Shader support (as an extension but presumably will be rolled into the main specifications.) After the graphics APIs have standardised Mesh Shaders, AMD and any other GPU vendors just need to implement driver and hardware support for the latest API features like AMD is already working on for Real-time Raytracing support.

    More details:
    https://devblogs.microsoft.com/directx/dev-preview-of-new-directx-12-features/
    https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/chap24.html
     
    bb8_1, cecarlsen and hippocoder like this.
  9. Iman_col

    Iman_col

    Joined:
    Mar 24, 2018
    Posts:
    27
    Looks great. Couldn't you support this?
     
  10. ROBYER1

    ROBYER1

    Joined:
    Oct 9, 2015
    Posts:
    1,454
    After watching the latest UE 5 demo, I can assume Unity are implementing this too?
     
  11. Cynicat

    Cynicat

    Joined:
    Jun 12, 2013
    Posts:
    290
    UE5 is using a software rasterized and a whole bunch of custom internal mesh formatting to render their Nanite geometry. Don't hold your breath on unity building a system like that. Honestly I'd prefer if unity focused on more general purpose systems rather than specific usecase style systems like that. Though eventually hardware will be up to a standard to handle that kind of geometry out of the box, without all the specific hacks UE5 is doing.
     
    ROBYER1 likes this.
  12. Coroknight

    Coroknight

    Joined:
    Jul 10, 2012
    Posts:
    26
    I wouldn't necessarily call what UE5 is doing a "hack" since it seems general purpose and not just a cool demo. From what I understand, they have written algorithms that convert a mesh into a texture and use virtual texturing to stream these large textures from your hard drive to graphics card. I'm not sure if mesh shaders really let you push the same level of detail.

    The key here is that they don't have to push millions of polygons to the graphics card, just normal textures. My original model can have a billion polygons but it doesn't matter because they'll all get converted to 8k textures (or smaller).
     
  13. LooperVFX

    LooperVFX

    Joined:
    Dec 3, 2018
    Posts:
    179
    @Cynicat @Coroknight I think citation is needed here. So first, before I dive into this topic - Was there some insider knowledge from Epic devs shared or a technical breakdown that's been mentioned or released that I missed? Please share if so. If not... this seems to be pure speculation as to the exact implementation Epic is using in the UE5 demo. (And now I will share some of my speculations here now as well, ha.) Admittedly, to your credit, whether a mesh shader pipeline or custom compute pipeline is being used, (or both, or something else) --It's true that there is still a lot for the CPU side software to handle that would need to efficiently stream these meshes from the disk to CPU memory to GPU memory, etc before the GPU can even do any work on it in a "shader" or kernel. Just so that it's clear this isn't a matter of "just add Mesh Shaders."

    What is known is that the UE5 demo was running on PS5 or PS5 dev kit hardware, which is AMD's Navi RDNA2 GPU architecture... This supports new GPU hardware/driver features such as Mesh Shaders and Sampler feedback which enables Texture-space shading (Nvidia RTX supports these as well, but is not used in any current or future game consoles)... Anyway, these features are likely candidates to have been used for the UE5 demo in some capacity. If this demo is supposed to showcase what UE5 is capable of on next gen hardware, why would they not take advantage of what the new GPU architecture has to offer for this exact purpose? Exactly what makes this new hardware "Next Gen." I find it hard to believe that Epic would choose not take advantage of these capabilities, though it's also possible they are too new that Epic wouldn't have been able to if the development cycle started x months/years ago and they didn't yet have these new features in mind.. and didn't have time to refactor to support them yet for this demo. It's anyone's guess. Though I do believe these features will be utilized sooner or later for the known performance benefits.

    @Cynicat While Unity's implementation to format and stream mesh data (to get it to the GPU) will not be exactly the same as UE5's. I *would* and will in fact hold my breath on Unity generally keeping pace with these features. Unity is still aiming for an eventual IPO and part of that has been focusing on parity with Epic's offerings (in terms of state-of-the-art render pipeline features) quite aggressively with HDRP. Not to mention all the other new major Unity features / packages released in the past few years. Unity will definitely support DirectX12 Ultimate's features as they were already early adopters of DXR / Real-time Raytracing (now part of DirectX12 Ultimate.) It's very unlikely Unity is now just going to stop implementing this API specification halfway. Vulkan will soon support all the same features as DirectX12 Ultimate as well, so you can think of DirectX/Vulkan interchangeably in this regard. I'm just using DirectX as the example here since they are slightly ahead in releasing these API features.

    @Coroknight what you are describing is (in an abstracted way) more or less the essence of how a Mesh Shader pipeline is designed to work, that is, to offload more of the work on Mesh data from the CPU to the GPU in a highly parallelized fashion with a specially optimized GPU memory buffer format. Though the GPU buffers holding the mesh data aren't strictly "textures," they are specialized buffers that are actually even more performant than if they were stuck being needlessly pushed through texture / rasterization hardware as traditional "textures" as has been a common technique of the past and still today (to pack some arbitrary data into a texture buffer.) Similar to how structured buffers / compute buffers are used with compute shaders, Mesh shaders are really just compute shaders that have been specialized for mesh data and can take advantage of new GPU features like early GPU culling that compute only implementations cannot. So to answer your question of "If mesh shaders really let you push the same level of detail." From what I understand, yes, Mesh Shaders can and in fact let will you take advantage of more performant GPU architecture features than if you were not using Mesh Shaders. That said, we don't know if UE5 or Unity (or the API developers, Microsoft, Khronos... or the GPU Manufacturers, at a driver level) will be providing some GPU compute based solution to emulate this sort of functionality.. in a similar way that the DXR Fallback compiler for non Raytracing capable GPUs was made available by Microsoft, or how Nvidia updated drivers on last generation's GTX 1060 + GPUs to support hardware accelerated raytracing / DXR

    I didn't really get into some of the other new graphics API features for the latest GPUs like Sampler Feedback or Texture-space shading, though you can find an overview on that here: https://www.guru3d.com/news-story/a...hitecture-to-support-directx-12-ultimate.html

    And you can find out more about what Mesh Shaders are / how they will be used / why they are important from a recent video this year by Microsoft here:


    There's also an informative article from Nvidia on Mesh Shaders from 2018:
    https://devblogs.nvidia.com/introduction-turing-mesh-shaders/
    And the associated talk at SIGGRAPH 2018:
     
  14. Cynicat

    Cynicat

    Joined:
    Jun 12, 2013
    Posts:
    290

    Sourced from an interview with Karis, who is the lead graphics engineer on UE5 Nanite: https://www.eurogamer.net/articles/...eal-engine-5-playstation-5-tech-demo-analysis

    Quote:

    We were also really curious about exactly how geometry is processed, whether Nanite uses a fully software-based raw compute approach (which would work well across all systems, including PC GPUs that aren't certified with the full DirectX 12 Ultimate) or whether Epic taps into the power of mesh shaders, or primitive shaders as Sony describes them for PlayStation 5. The answer is intriguing.

    "The vast majority of triangles are software rasterised using hyper-optimised compute shaders specifically designed for the advantages we can exploit," explains Brian Karis. "As a result, we've been able to leave hardware rasterisers in the dust at this specific task. Software rasterisation is a core component of Nanite that allows it to achieve what it does. We can't beat hardware rasterisers in all cases though so we'll use hardware when we've determined it's the faster path. On PlayStation 5 we use primitive shaders for that path which is considerably faster than using the old pipeline we had before with vertex shaders."

    End Quote

    Also an explanation from him about it here on his twitter:
    https://twitter.com/BrianKaris/status/1261098487279579136

    Also worth reading Brian's twitter and blog to see some of the groundwork for what later became nanite.:
    https://twitter.com/briankaris
    http://graphicrants.blogspot.com/

    While it's a neat system, let's wait and see how many structural assumptions epic had to make to get it working before we start pushing unity to invest even more time in experimental half-finished features. =/
     
    NotaNaN and LooperVFX like this.
  15. Cynicat

    Cynicat

    Joined:
    Jun 12, 2013
    Posts:
    290
    PS: I do really want unity to support mesh shaders though, those are going to become the new standard and will be hella useful, just not sure about mimicking Nanite specifically until we know more about how it works internally and what the trade-offs are. =3
     
    NotaNaN, LooperVFX and Lars-Steenhoff like this.
  16. Arowx

    Arowx

    Joined:
    Nov 12, 2009
    Posts:
    8,194
    This is the only post on the topic I could find, Mesh Shaders are now in DX12 Ultimate so only those unaffordable GPUs that no one can find e.g. 3000 and 6000 series.

    But according to the new 3D Mark benchmark they can give some really good performance boosts as they are designed to work better with parallelism than older shaders pipelines and also provide better culling.

    3DMark Adds Interactive Mesh Shader Benchmark To Test DX12 Ultimate Ready Graphics Cards | HotHardware

    Odd that they seem limited to DX12 Ultimate only when this thread has them mentioned as far back as 2018?

    So any news on the Unity support of this as it sounds like a great performance booster that could really make scenes like the Mega City demo fly?
     
    Last edited: Feb 15, 2021
  17. hippocoder

    hippocoder

    Digital Ape

    Joined:
    Apr 11, 2010
    Posts:
    29,723
    Everyone and their donut is going on about how mesh shaders are the next big thing for everything. But I'm more concerned about Unity's chances of handling the existing engine at this point, since they aren't at feature parity with built-in yet.

    Honestly had quite enough of the run-around in recent years. When I see both HDRP and URP capable of all built-in game needs and stable as heck, then sure count me in for DX12/Vulkan shenanigans.

    I doubt drivers are stable for those still.
     
  18. Arowx

    Arowx

    Joined:
    Nov 12, 2009
    Posts:
    8,194
    Isn't that what the Beta and Alpha versions are for to work on adding new features, with the LTS versions for bug fixes?

    Unity used to have a Roadmap of new technology it was planning on working on, is there still an updated Roadmap?
     
  19. AlanMattano

    AlanMattano

    Joined:
    Aug 22, 2013
    Posts:
    1,501
    Now that Mesh Shaders is implemented in 3DMark and GPU includes new drivers, this new tech is becoming more popular in the mind of general public gamers. Unity will be more attractive if they somehow implement it.

    Any rodmap news?
     
    Last edited: Feb 15, 2021
  20. LooperVFX

    LooperVFX

    Joined:
    Dec 3, 2018
    Posts:
    179
    Actually, Nvidia RTX *2000 series (and up)... and AMD Radeon RX 6000 series.
     
    laurentlavigne and vx4 like this.
  21. Arowx

    Arowx

    Joined:
    Nov 12, 2009
    Posts:
    8,194
    With the original posted tech mentioned back in 2018, I would have thought some newer/old AMD GPUs would have it.

    Does it need raytracing to work e.g. occlusion culling?
     
  22. hippocoder

    hippocoder

    Digital Ape

    Joined:
    Apr 11, 2010
    Posts:
    29,723
    You can get a GPU that costs 100 bucks or less that supports it. Basically any DX12 class GPU. It's not tied to raytracing, it's about reducing the number of stages while the new stages are a bit fatter (so you can reason about mesh data and also hardware optimise stages better).

    Think of it as one of the replacements of the old stages which are too many, and don't work well together. Basically, replacing the old geometry pipeline, tess, verts not having local data and so on, all those problems.
     
    LooperVFX and AlanMattano like this.
  23. LooperVFX

    LooperVFX

    Joined:
    Dec 3, 2018
    Posts:
    179
    I think it's important to clarify that mesh shaders are supported by any DX12 *Ultimate class GPU. They are not (to my current knowledge) supported on baseline DX12 (non-ultimate) class GPUs. And while Mesh Shaders are not explicitly tied to Real-time Raytracing in the way they function... both features are classed at the same level of API support. so @Arowx that is why you commonly see these mentioned together.. they were just developed around the same time and coincidentally grouped together in this new API feature set.

    Whether you can find a DirectX12 Ultimate class GPU for $100 or whether DX12, Vulkan APIs add some sort of fallback or subset Mesh Shader Support to slightly older (DX12 "non-ulimate") architectures in the future remains to be seen. I wish they had just called DirectX 12 Ultimate, DirectX 12.5 --what Microsoft would typically do when naming APIs advancements. But I guess their marketing dept didn't think that was as nearly as EXCITING as calling it ULTIMATE lol. Typical post death of Moore's Law accelerationist marketing desperation to hype up new features (not just raw performance) to sell products since they aren't getting faster and cheaper as fast as they used to when consistently shrinkable silicon fabs were upholding Moore's law --that just isn't feasible due to limitations of physics and our material science.

    I do hope there is some fallback support to emulate mesh shaders on any compute capable GPU with reduced performance, no hardware GPU culling. But that remains to be seen. Currently DirectX docs just advises devs to write an old fashioned vertex shader for fallback on older hardware, which doesn't really cover the vast middle ground that graphics programmers have been doing for a number of years now with Compute Shaders and Indirect Draw / rendering --which was what inspired the creation of Mesh Shaders in the first place. Mesh Shaders are a formalization / standardization of state-of-the-art methods for replacing the aging vertex / geometry / tessellation shader stages with much more flexible compute shaders in the graphics pipeline.... plus some added hardware optimizations for GPU side culling which are not possible with general purpose GPU compute alone.

    Why older AMD GPUs did not implement Mesh Shader support: the Mesh Shader pipeline was spearheaded by Nvidia, so they pre-emptively implemented early support for the Turing / RTX 2000 series launch in 2018. They are now being adopted in the major graphics APIs. So --similar to real-time Raytracing adoption, Nvidia is about a generation ahead here due the massive amount of investment in R&D.
    Actually, AMD had a similar (in some ways but not directly comparable) feature called Primitive Shaders. but that never caught on with cross vendor architecture and API support. If it had.. Nvidia would have been the one late to the party. It's a moot point now. Though you'll see mention of them in Epic developer talks on UE5 "Nanite" tech because apparently the PS5 AMD GPU is a bit earlier design than the Xbox so it apparently doesn't have full DX12 Ultimate Mesh Shader support so the used the AMD specific APIs to get similar performance gains.

    As for AMD GPUs newer than the current RDNA 2 architecture (RX 6000 series,) they will almost definitely support Mesh shaders as well, they just don't exist yet, so I didn't mention them.

    Additional info:
    https://www.pcgamer.com/amd-confirm...cards-will-fully-support-directx-12-ultimate/
     
    Last edited: Feb 19, 2021
  24. Arowx

    Arowx

    Joined:
    Nov 12, 2009
    Posts:
    8,194
    Sounds like it could be a very good optimisation that would work the hardware smarter not faster.

    Mind you there are in memory processing chips, Smart RAM (allowing massive parallelism nearer to the data), coming to market in the near future will we end up with displays that are the GPU?

    Or the big AI upscale cheat of DLSS.

    On the other hand I bumped into the Racing the Ray concept of retro game programming where they could do better graphics with their limited hardware as long as they did the work in the time it took for the next line of pixels to be drawn by the CRT display.

    Even LED displays still draw the screen a line at a time from top to bottom, maybe we could get more out of our GHz GPU and CPUs if we had a graphics API that wasn't fixed to drawing a frame at a time?

    I suppose Tile based rendering could probably take advantage of a Racing the Ray approach.
     
    LooperVFX likes this.
  25. hippocoder

    hippocoder

    Digital Ape

    Joined:
    Apr 11, 2010
    Posts:
    29,723
    I don't really think it's a cheat, I think it improves the quality over base rendering in a lot of cases. The reason for this is because it is reasoning about the final pixel better than a basic rasteriser would in lossy scenarios (which is pretty much everything as far as a rasteriser is concerned).

    You can see evidence of this with Digital Foundry tests - the DLSS versions retain detail at conservative settings, that the original renders do not.

    As we are human, perceptual detail is in fact more important for entertainment than anything else.
     
  26. AlanMattano

    AlanMattano

    Joined:
    Aug 22, 2013
    Posts:
    1,501
    Can be the terrain (includes terrain collider)?
    or is it more for the grass without collider?

    Unity 2021 HDRP DLSS toggle is super awesome!

    Is the "mesh shader" similar to the "Enable GPU Instancing" shader toggle
    or
    Do we need Unreal5 release for the implementation of it in Unity?
     
  27. Arowx

    Arowx

    Joined:
    Nov 12, 2009
    Posts:
    8,194

    Mesh Shading in an Actual Game with more info on the technology.

     
  28. hippocoder

    hippocoder

    Digital Ape

    Joined:
    Apr 11, 2010
    Posts:
    29,723
    I'm actually fairly sure this is a safe bet for Unity to support when the time comes. It's not a difficult add, and will substantially improve their performance for certain platforms. If Unity doesn't then there's a fair few custom similar culling options like per triangle culling with compute shaders replacing the traditional vertex shader pipeline. This approach is more work but EA have it in games today, in fact they began work on their versions years ago.

    I'd expect it on HDRP before anything else I suppose. All of this is guesswork from me.
     
    FernandoMK and LooperVFX like this.
  29. JoNax97

    JoNax97

    Joined:
    Feb 4, 2016
    Posts:
    611
    Excuse me if this is dumb question but, can this work as an analogue for Nanite or some of it? If yes, how much?
     
  30. hippocoder

    hippocoder

    Digital Ape

    Joined:
    Apr 11, 2010
    Posts:
    29,723
    No, they're very different. The end result is the same with nanite or any other rendering concept: In the end you want more polygons where they matter and less where they can't be seen.

    Workflows are nanites biggest strength by far, which all these other solutions are lacking. It's not simply enough to throw millions of triangles at the screen and cull it. You want to be able to get this stuff in in the engine to begin with, without bringing the workstation to it's knees. That's something nanite actually solved completely.

    It'll require substantial tooling and editing to benefit from this technology on the same scale as nanite.
     
    LooperVFX and JoNax97 like this.
  31. JoNax97

    JoNax97

    Joined:
    Feb 4, 2016
    Posts:
    611
    Thank you for the insight
     
  32. FernandoMK

    FernandoMK

    Joined:
    Feb 21, 2017
    Posts:
    178
    The topic covered here is very interesting...

    I hope that Unity implements this in the core of the SRP eventually for HDRP and URP can have a performance gain:)
     
  33. alexandre-fiset

    alexandre-fiset

    Joined:
    Mar 19, 2012
    Posts:
    715
    Any update on Mesh Shader support? :)
     
    BradZoob and bb8_1 like this.
  34. Grimreaper358

    Grimreaper358

    Joined:
    Apr 8, 2013
    Posts:
    789
  35. BradZoob

    BradZoob

    Joined:
    Feb 12, 2014
    Posts:
    66
    Any progress?
     
  36. LooperVFX

    LooperVFX

    Joined:
    Dec 3, 2018
    Posts:
    179
    Not sure what Unity has to say but related good news is that Apple's Metal 3 is released which added Mesh Shader support to Metal, so now Nvidia, AMD and Apple's latest GPUs. drivers, and supported Graphics API's all support Mesh Shaders. This is the type of broad cross vendor support Unity tends to look for when implementing new features like this.
     
    Lars-Steenhoff and seldemirov like this.
  37. BradZoob

    BradZoob

    Joined:
    Feb 12, 2014
    Posts:
    66
    So there's no HLSL interface to the api's in Compute shaders presently? if so can we solve that with a compilation of some kind as per Przemyslaw_Zaworski's amazing example for GLSL above?
     
  38. BradZoob

    BradZoob

    Joined:
    Feb 12, 2014
    Posts:
    66
    sorry, Surface shaders, though we would need CBuffers that can be accessed from surface shader.
     
  39. Mariusz-Born7

    Mariusz-Born7

    Joined:
    Sep 4, 2015
    Posts:
    40
  40. Przemyslaw_Zaworski

    Przemyslaw_Zaworski

    Joined:
    Jun 9, 2017
    Posts:
    328
    3D scene with standard gameobjects and Bezier surface rendered with Task / Mesh shader, with passed Model View Projection matrix:

    upload_2023-10-25_4-54-7.png

    Code (CSharp):
    1. using UnityEngine;
    2. using System.Runtime.InteropServices;
    3.  
    4. public class MeshShader : MonoBehaviour
    5. {
    6.     [DllImport("MeshShader")]
    7.     static extern void SetMatrix(float[] floatArray);
    8.  
    9.     [DllImport("MeshShader")]
    10.     static extern System.IntPtr Execute();
    11.  
    12.     float[] MatrixToArray(Matrix4x4 matrix)
    13.     {
    14.         return new float[]
    15.         {
    16.             matrix.m00, matrix.m10, matrix.m20, matrix.m30,
    17.             matrix.m01, matrix.m11, matrix.m21, matrix.m31,
    18.             matrix.m02, matrix.m12, matrix.m22, matrix.m32,
    19.             matrix.m03, matrix.m13, matrix.m23, matrix.m33
    20.         };
    21.     }
    22.  
    23.     void Start() {}
    24.  
    25.     void OnRenderObject()
    26.     {
    27.         Matrix4x4 mvp = GL.GetGPUProjectionMatrix(Camera.main.projectionMatrix, true) * Camera.main.worldToCameraMatrix * transform.localToWorldMatrix;
    28.         SetMatrix(MatrixToArray(mvp));
    29.         GL.IssuePluginEvent(Execute(), 1);
    30.     }
    31. }
    Code (CSharp):
    1. // For x64 Visual Studio command line:  cl.exe /LD MeshShader.cpp opengl32.lib
    2. #include <windows.h>
    3. #include <GL/gl.h>
    4. #include <stdio.h>
    5.  
    6. #define GL_TASK_SHADER_NV  0x955A
    7. #define GL_MESH_SHADER_NV  0x9559
    8. #define GL_FRAGMENT_SHADER 0x8B30
    9.  
    10. typedef unsigned int(__stdcall *PFNGLCREATEPROGRAMPROC) ();
    11. typedef unsigned int(__stdcall *PFNGLCREATESHADERPROC) (unsigned int type);
    12. typedef void(__stdcall *PFNGLSHADERSOURCEPROC) (unsigned int shader, int count, const char* const* string, const int* length);
    13. typedef void(__stdcall *PFNGLCOMPILESHADERPROC) (unsigned int shader);
    14. typedef void(__stdcall *PFNGLATTACHSHADERPROC) (unsigned int program, unsigned int shader);
    15. typedef void(__stdcall *PFNGLLINKPROGRAMPROC) (unsigned int program);
    16. typedef void(__stdcall *PFNGLUSEPROGRAMPROC) (unsigned int program);
    17. typedef void(__stdcall *PFNGLDRAWMESHTASKSNVPROC) (unsigned int first, unsigned int count);
    18. typedef void (__stdcall *PFNGLUNIFORMMATRIX4FVPROC) (int location, int count, unsigned char transpose, const float *value);
    19. typedef int (__stdcall *PFNGLGETUNIFORMLOCATIONPROC) (unsigned int program, const char *name);
    20. typedef void (__stdcall *PFNGLGENVERTEXARRAYSPROC) (int n, unsigned int *arrays);
    21. typedef void (__stdcall *PFNGLBINDVERTEXARRAYPROC) (unsigned int array);
    22.  
    23. PFNGLCREATEPROGRAMPROC glCreateProgram;
    24. PFNGLCREATESHADERPROC glCreateShader;
    25. PFNGLSHADERSOURCEPROC glShaderSource;
    26. PFNGLCOMPILESHADERPROC glCompileShader;
    27. PFNGLATTACHSHADERPROC glAttachShader;
    28. PFNGLLINKPROGRAMPROC glLinkProgram;
    29. PFNGLUSEPROGRAMPROC glUseProgram;
    30. PFNGLDRAWMESHTASKSNVPROC glDrawMeshTasksNV;
    31. PFNGLUNIFORMMATRIX4FVPROC glUniformMatrix4fv;
    32. PFNGLGETUNIFORMLOCATIONPROC glGetUniformLocation;
    33. PFNGLGENVERTEXARRAYSPROC glGenVertexArrays;
    34. PFNGLBINDVERTEXARRAYPROC glBindVertexArray;
    35.  
    36. void glInit()
    37. {
    38.     glCreateProgram = (PFNGLCREATEPROGRAMPROC)wglGetProcAddress("glCreateProgram");
    39.     glCreateShader = (PFNGLCREATESHADERPROC)wglGetProcAddress("glCreateShader");
    40.     glShaderSource = (PFNGLSHADERSOURCEPROC)wglGetProcAddress("glShaderSource");
    41.     glCompileShader = (PFNGLCOMPILESHADERPROC)wglGetProcAddress("glCompileShader");
    42.     glAttachShader = (PFNGLATTACHSHADERPROC)wglGetProcAddress("glAttachShader");
    43.     glLinkProgram = (PFNGLLINKPROGRAMPROC)wglGetProcAddress("glLinkProgram");
    44.     glUseProgram = (PFNGLUSEPROGRAMPROC)wglGetProcAddress("glUseProgram");
    45.     glDrawMeshTasksNV = (PFNGLDRAWMESHTASKSNVPROC)wglGetProcAddress("glDrawMeshTasksNV");
    46.     glUniformMatrix4fv = (PFNGLUNIFORMMATRIX4FVPROC)wglGetProcAddress("glUniformMatrix4fv");
    47.     glGetUniformLocation = (PFNGLGETUNIFORMLOCATIONPROC)wglGetProcAddress("glGetUniformLocation");
    48.     glGenVertexArrays = (PFNGLGENVERTEXARRAYSPROC)wglGetProcAddress("glGenVertexArrays");
    49.     glBindVertexArray = (PFNGLBINDVERTEXARRAYPROC)wglGetProcAddress("glBindVertexArray");
    50. }
    51.  
    52. typedef enum UnityGfxRenderer
    53. {
    54.     kUnityGfxRendererNull = 4,
    55.     kUnityGfxRendererOpenGLCore = 17,
    56. } UnityGfxRenderer;
    57. typedef enum UnityGfxDeviceEventType
    58. {
    59.     kUnityGfxDeviceEventInitialize = 0,
    60.     kUnityGfxDeviceEventShutdown = 1,
    61.     kUnityGfxDeviceEventBeforeReset = 2,
    62.     kUnityGfxDeviceEventAfterReset = 3,
    63. } UnityGfxDeviceEventType;
    64.  
    65. struct UnityInterfaceGUID
    66. {
    67.     UnityInterfaceGUID(unsigned long long high, unsigned long long low) : m_GUIDHigh(high) , m_GUIDLow(low) { }
    68.     unsigned long long m_GUIDHigh;
    69.     unsigned long long m_GUIDLow;
    70. };
    71. struct IUnityInterface {};
    72. typedef void (__stdcall * IUnityGraphicsDeviceEventCallback)(UnityGfxDeviceEventType eventType);
    73.  
    74. struct IUnityInterfaces
    75. {
    76.     IUnityInterface* (__stdcall* GetInterface)(UnityInterfaceGUID guid);
    77.     void(__stdcall* RegisterInterface)(UnityInterfaceGUID guid, IUnityInterface * ptr);
    78.     template<typename INTERFACE>
    79.     INTERFACE* Get()
    80.     {
    81.         return static_cast<INTERFACE*>(GetInterface(UnityInterfaceGUID(0x7CBA0A9CA4DDB544ULL, 0x8C5AD4926EB17B11ULL)));
    82.     }
    83.     void Register(IUnityInterface* ptr)
    84.     {
    85.         RegisterInterface(UnityInterfaceGUID(0x7CBA0A9CA4DDB544ULL, 0x8C5AD4926EB17B11ULL), ptr);
    86.     }
    87. };
    88.  
    89. struct IUnityGraphics : IUnityInterface
    90. {
    91.     void(__stdcall* RegisterDeviceEventCallback)(IUnityGraphicsDeviceEventCallback callback);
    92. };
    93.  
    94. typedef void (__stdcall* UnityRenderingEvent)(int eventId);
    95. typedef void(__stdcall* UnregisterDeviceEventCallback)(IUnityGraphicsDeviceEventCallback callback);
    96. static UnityGfxRenderer DeviceType = kUnityGfxRendererNull;
    97.  
    98. unsigned int Program;
    99. float ModelViewProjection[16];
    100.  
    101. char* LoadSource(char *filename)
    102. {
    103.     FILE *file = fopen(filename, "r");
    104.     fseek(file, 0, SEEK_END);
    105.     size_t size = ftell(file);
    106.     rewind(file);
    107.     char* source = (char*) malloc(size + 1);
    108.     source[size] = '\0';
    109.     fread(source, sizeof(char), size, file);
    110.     fclose(file);
    111.     return source;
    112. }
    113.  
    114. int BuildShaders(const char* TS, const char* MS, const char* FS)
    115. {
    116.     int program = glCreateProgram();
    117.     int taskShader = glCreateShader(GL_TASK_SHADER_NV);
    118.     int meshShader = glCreateShader(GL_MESH_SHADER_NV);
    119.     int fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
    120.     glShaderSource(taskShader, 1, &TS, 0);
    121.     glShaderSource(meshShader, 1, &MS, 0);
    122.     glShaderSource(fragmentShader, 1, &FS, 0);
    123.     glCompileShader(taskShader);
    124.     glCompileShader(meshShader);
    125.     glCompileShader(fragmentShader);
    126.     glAttachShader(program, taskShader);
    127.     glAttachShader(program, meshShader);
    128.     glAttachShader(program, fragmentShader);
    129.     glLinkProgram(program);
    130.     return program;
    131. }
    132.  
    133. void Start()
    134. {
    135.     glInit();
    136.     unsigned int vertexArrayObject;
    137.     glGenVertexArrays (1, &vertexArrayObject);
    138.     glBindVertexArray(vertexArrayObject);
    139.     DeviceType = kUnityGfxRendererOpenGLCore;
    140.     Program = BuildShaders(LoadSource("D:\\task.glsl"), LoadSource("D:\\mesh.glsl"), LoadSource("D:\\fragment.glsl"));  
    141. }
    142.  
    143. void Update()
    144. {
    145.     glEnable(GL_CULL_FACE);
    146.     glCullFace(GL_FRONT);
    147.     glDisable(GL_BLEND);
    148.     glDepthFunc(GL_LEQUAL);
    149.     glEnable(GL_DEPTH_TEST);
    150.     glDepthMask(GL_FALSE);
    151.     glUseProgram(Program);
    152.     glUniformMatrix4fv(glGetUniformLocation(Program, "MVP"), 1, GL_FALSE, &ModelViewProjection[0]);
    153.     glDrawMeshTasksNV(0, 1);
    154. }
    155.  
    156. extern "C" void __declspec(dllexport) __stdcall SetMatrix(float* floatArray)
    157. {
    158.     memcpy(ModelViewProjection, floatArray, sizeof(float) * 16);
    159. }
    160.  
    161. static void __stdcall OnGraphicsDeviceEvent(UnityGfxDeviceEventType eventType)
    162. {
    163.     if (eventType == kUnityGfxDeviceEventInitialize)
    164.     {
    165.         Start();
    166.     }
    167.     if (eventType == kUnityGfxDeviceEventShutdown)
    168.     {
    169.         DeviceType = kUnityGfxRendererNull;
    170.     }
    171. }
    172.  
    173. static void __stdcall OnRenderEvent(int eventID)
    174. {
    175.     Update();
    176. }
    177.  
    178. extern "C" void __declspec(dllexport) __stdcall UnityPluginLoad(IUnityInterfaces* unityInterfaces)
    179. {
    180.     IUnityInterfaces* s_UnityInterfaces = unityInterfaces;
    181.     IUnityGraphics* s_Graphics = s_UnityInterfaces->Get<IUnityGraphics>();
    182.     s_Graphics->RegisterDeviceEventCallback(OnGraphicsDeviceEvent);
    183.     OnGraphicsDeviceEvent(kUnityGfxDeviceEventInitialize);
    184. }
    185.  
    186. extern "C" void __declspec(dllexport) __stdcall UnityPluginUnload()
    187. {
    188.     UnregisterDeviceEventCallback(OnGraphicsDeviceEvent);
    189. }
    190.  
    191. extern "C" UnityRenderingEvent __declspec(dllexport) __stdcall Execute()
    192. {
    193.     return OnRenderEvent;
    194. }
    task.glsl:

    Code (CSharp):
    1. #version 450
    2. #extension GL_NV_mesh_shader : enable
    3.  
    4. layout(local_size_x = 1) in;
    5.  
    6. void main()
    7. {
    8.     int tessellationFactor = 64;
    9.     gl_TaskCountNV = tessellationFactor * tessellationFactor * 2;
    10. }
    mesh.glsl:

    Code (CSharp):
    1. #version 450
    2. #extension GL_NV_mesh_shader : enable
    3. layout(local_size_x = 3) in;
    4. layout(max_vertices = 3) out;
    5. layout(max_primitives = 1) out;
    6. layout(triangles) out;
    7. layout(location = 0) out Interpolants{vec3 v_color;} OUT[];
    8. uniform mat4 MVP;
    9.  
    10. vec3 ControlPoints[16] = vec3[16]
    11. (
    12.     vec3(00.0, 00.0, 00.0), vec3(10.0, 00.0, 00.0), vec3(20.0, 00.0, 00.0), vec3(30.0, 00.0, 00.0),
    13.     vec3(00.0, 00.0, 10.0), vec3(10.0, 10.0, 10.0), vec3(20.0, 10.0, 10.0), vec3(30.0, 00.0, 10.0),
    14.     vec3(00.0, 00.0, 20.0), vec3(10.0, 10.0, 20.0), vec3(20.0, 10.0, 20.0), vec3(30.0, 00.0, 20.0),
    15.     vec3(00.0, 00.0, 30.0), vec3(10.0, 00.0, 30.0), vec3(20.0, 00.0, 30.0), vec3(30.0, 00.0, 30.0)
    16. );
    17.  
    18. vec3 BezierCurve (vec3 a, vec3 b, vec3 c, vec3 d, float t)
    19. {
    20.     return mix(mix(mix(a, b, t), mix(b, c, t), t), mix(mix(b, c, t), mix(c, d, t), t), t);
    21. }
    22.  
    23. vec3 BezierPatch (vec3 cp[16], float u, float v)
    24. {
    25.     vec3 a = BezierCurve( cp[0],  cp[1],  cp[2],  cp[3], u);
    26.     vec3 b = BezierCurve( cp[4],  cp[5],  cp[6],  cp[7], u);
    27.     vec3 c = BezierCurve( cp[8],  cp[9], cp[10], cp[11], u);
    28.     vec3 d = BezierCurve(cp[12], cp[13], cp[14], cp[15], u);
    29.     return BezierCurve(a, b, c, d, v);
    30. }
    31.  
    32. vec3 BezierPatchNormal (vec3 cp[16], float u, float v)
    33. {
    34.     vec3 a = BezierCurve( cp[0],  cp[1],  cp[2],  cp[3], u);
    35.     vec3 b = BezierCurve( cp[4],  cp[5],  cp[6],  cp[7], u);
    36.     vec3 c = BezierCurve( cp[8],  cp[9], cp[10], cp[11], u);
    37.     vec3 d = BezierCurve(cp[12], cp[13], cp[14], cp[15], u);
    38.     vec3 dv = -3.0 * (1.0 - v) * (1.0 - v) * a + (3.0 * (1.0 - v) * (1.0 - v) - 6.0 * v * (1.0 - v)) * b + (6.0 * v * (1.0 - v) - 3.0 * v * v) * c + 3.0 * v * v * d;
    39.     vec3 e = BezierCurve( cp[0],  cp[4],  cp[8], cp[12], v);
    40.     vec3 f = BezierCurve( cp[1],  cp[5],  cp[9], cp[13], v);
    41.     vec3 g = BezierCurve( cp[2],  cp[6], cp[10], cp[14], v);
    42.     vec3 h = BezierCurve( cp[3],  cp[7], cp[11], cp[15], v);
    43.     vec3 du = -3.0 * (1.0 - u) * (1.0 - u) * e + (3.0 * (1.0 - u) * (1.0 - u) - 6.0 * u * (1.0 - u)) * f + (6.0 * u * (1.0 - u) - 3.0 * u * u) * g + 3.0 * u * u * h;
    44.     return normalize(cross(dv, du));
    45. }
    46.  
    47. vec3 Hash(float p)
    48. {
    49.     vec3 p3 = fract(vec3(p) * vec3(.1031, .1030, .0973));
    50.     p3 += dot(p3, p3.yzx+33.33);
    51.     return fract((p3.xxy+p3.yzz)*p3.zyx);
    52. }  
    53.  
    54. void main()
    55. {
    56.     uint laneID = gl_LocalInvocationID.x;
    57.     uint baseID = gl_GlobalInvocationID.x;
    58.     int vertexId = int(baseID);
    59.     int tessellationFactor = 64;
    60.     int instance = int(floor(float(vertexId) / 6.0));
    61.     float x = sign(mod(20.0, mod(float(vertexId), 6.0) + 2.0));
    62.     float y = sign(mod(18.0, mod(float(vertexId), 6.0) + 2.0));
    63.     float u = (float(instance / tessellationFactor) + x) / float(tessellationFactor);
    64.     float v = (mod(float(instance), float(tessellationFactor)) + y) / float(tessellationFactor);
    65.     vec3 lightDirection = normalize(vec3(50, 50, -50));
    66.     vec3 normalDirection = BezierPatchNormal(ControlPoints, u, v);
    67.     float diffuse = max(dot(lightDirection, normalDirection), 0.2);      
    68.     gl_MeshVerticesNV[laneID].gl_Position = MVP * vec4(BezierPatch(ControlPoints, u, v), 1);
    69.     gl_PrimitiveIndicesNV[laneID] = laneID;
    70.     gl_PrimitiveCountNV = 1;
    71.     OUT[laneID].v_color = Hash(float(instance + 123)) * vec3(diffuse);
    72. }
    fragment.glsl:

    Code (CSharp):
    1. #version 450
    2.  
    3. layout(location = 0) in Interpolants{vec3 v_color;} IN;
    4. out vec4 fragColor;
    5.  
    6. void main()
    7. {
    8.     fragColor = vec4(IN.v_color, 1.0);
    9. };
     
    bb8_1, OCASM and LooperVFX like this.
  41. Przemyslaw_Zaworski

    Przemyslaw_Zaworski

    Joined:
    Jun 9, 2017
    Posts:
    328