In Unity, a Compute Buffer is a buffer of memory that lives on the CPU which can hold arbitrary data. That memory is then uploaded to the GPU when it is bound to a shader. You can then use a Structured Buffer in your shader to access the data. Compute Buffers/Structured Buffers are very powerful as they easily allow you to upload a list of arbitrary input data to the GPU in easy to use structures that can be utilized by a shader.
The following C# script and shader will demonstrate how to use a geometry shader with instancing. We will submit a single draw call with 4 instanced points. The shader will then index our uploaded compute buffer, via the current instance id, to pass along the individual point data to the geometry shader. From there, the geometry shader will expand each point into a quad composed of 2 triangles. The geometry shader will also procedurally generated uv coordinates for each new vertex so that the fragment shader can generate a vertical gradient.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 |
using System.Collections; using System.Collections.Generic; using System.Runtime.InteropServices; using UnityEngine; public class GeometryShaderExample : MonoBehaviour { const int TotalPoints = 4; public Material geometryMaterial; ComputeBuffer computeBuffer; [StructLayout(LayoutKind.Sequential)] struct ComputeBufferData { public const int size = sizeof(float) * 3 + sizeof(float) * 3 + sizeof(float) * 1; public Vector3 centerPosition; public Vector3 color; public float scale; } // Use this for initialization void Start () { // Generate 4 individual points. Each point will be expanded into a quad made up // of two triangles via the Geometry Shader. Each quad will have its own position, // color, and scale. We will be using instancing to submit a single draw call with all 4 points. ComputeBufferData[] computeBufferData = new ComputeBufferData[TotalPoints]; computeBufferData[0] = new ComputeBufferData(); computeBufferData[0].centerPosition = new Vector3(-2, 0, 3); computeBufferData[0].color = new Vector3(1,0,0); computeBufferData[0].scale = 0.5f; computeBufferData[1] = new ComputeBufferData(); computeBufferData[1].centerPosition = new Vector3(0, 1, 3); computeBufferData[1].color = new Vector3(0, 1, 0); computeBufferData[1].scale = 0.3f; computeBufferData[2] = new ComputeBufferData(); computeBufferData[2].centerPosition = new Vector3(0, -1, 3); computeBufferData[2].color = new Vector3(0, 0, 1); computeBufferData[2].scale = 0.3f; computeBufferData[3] = new ComputeBufferData(); computeBufferData[3].centerPosition = new Vector3(2, 0, 3); computeBufferData[3].color = new Vector3(1, 1, 0); computeBufferData[3].scale = 0.5f; // Create the compute buffer and assign it our custom data. // SetData will only copy our data to CPU memory. // The data will be uploaded to the GPU later when // we bind the compute buffer to the shader via the material. computeBuffer = new ComputeBuffer(TotalPoints, ComputeBufferData.size); computeBuffer.SetData(computeBufferData); } void OnPostRender() { // Use the geometry shader and bind the compute buffer data to the shader // as a structured buffer resource that can be indexed via the current instancce id. geometryMaterial.SetPass(0); geometryMaterial.SetBuffer("_ComputeBufferData", computeBuffer); // Instanced draw call which will contain all 4 point's data. Graphics.DrawProcedural(MeshTopology.Points, 1, TotalPoints); } } |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 |
Shader "Unlit/GeometryShaderExample" { Properties { } SubShader { Tags { "RenderType"="Opaque" } Pass { CGPROGRAM #pragma vertex vert #pragma geometry geom #pragma fragment frag #pragma enable_d3d11_debug_symbols #pragma target 5.0 #include "UnityCG.cginc" struct v2g { float4 position : SV_POSITION; float3 color : TEXCOORD0; float scale : TEXCOORD1; }; struct g2f { float4 position : SV_POSITION; float3 color : TEXCOORD0; float2 uv : TEXCOORD1; }; struct ComputeBufferData { float3 centerPosition; float3 color; float scale; }; // A structured buffer allows us to have a list or array // of custom buffer data that we can index. StructuredBuffer _ComputeBufferData; v2g vert (uint instanceId : SV_InstanceID) { v2g o; // Index our compute buffer data via the current instance id // since we used an instanced draw call. ComputeBufferData computeBufferData =_ComputeBufferData[instanceId]; o.position = UnityObjectToClipPos(computeBufferData.centerPosition); o.color = computeBufferData.color; o.scale = computeBufferData.scale; return o; } [maxvertexcount(6)] void geom( point v2g i[1], inout TriangleStream triangleStream) { g2f o; // Make sure we multiply our scale's x component by the aspect ratio // so our quad won't be squashed. const float aspectRatio = _ScreenParams.y / _ScreenParams.x; float2 scale = float2(i[0].scale * aspectRatio, i[0].scale); // Currently, the point's position is in clip space. I would like to get it // in normalized device coordinate space which will be between -1 and 1 for the x and y components. // This makes the math a little easier. So we need to divde each component by w. const float OneOverW = 1.0 / i[0].position.w; float3 ndc = float3(i[0].position.x * OneOverW, i[0].position.y * OneOverW, i[0].position.z * OneOverW); // We will generate 2 triangles to build up our quad. // We will add a scale offset and then multiply by w to put the position back into // clip space as that is the space that the fragment shader expects the position to be in. // I will also procedurally generate uvs for each vertex so that we can take advantage of // uvs in the fragement shader. // Left Triangle o.position = float4((ndc + float3(scale.x, -scale.y, 0)) * i[0].position.w, i[0].position.w); o.color = i[0].color; o.uv = float2(1,1); triangleStream.Append(o); o.position = float4((ndc + float3(-scale.x, scale.y, 0)) * i[0].position.w, i[0].position.w); o.color = i[0].color; o.uv = float2(0, 0); triangleStream.Append(o); o.position = float4((ndc + float3(-scale.x, -scale.y, 0)) * i[0].position.w, i[0].position.w); o.color = i[0].color; o.uv = float2(0, 1); triangleStream.Append(o); triangleStream.RestartStrip(); // Right Triangle o.position = float4((ndc + float3(-scale.x, scale.y, 0)) * i[0].position.w, i[0].position.w); o.color = i[0].color; o.uv = float2(0, 0); triangleStream.Append(o); o.position = float4((ndc + float3(scale.x, -scale.y, 0)) * i[0].position.w, i[0].position.w); o.color = i[0].color; o.uv = float2(1, 1); triangleStream.Append(o); o.position = float4((ndc + float3(scale.x, scale.y, 0)) * i[0].position.w, i[0].position.w); o.color = i[0].color; o.uv = float2(1, 0); triangleStream.Append(o); triangleStream.RestartStrip(); } fixed4 frag (g2f i) : SV_Target { // To show off the uvs we procedurally generated in the geometry shader, // we will make the quad fade from top to bottom. fixed4 col = fixed4(i.uv.yyy,1) * float4(i.color.rgb, 1.0); return col; } ENDCG } } } |