在vertex shader和pixel shader中分别处理单个vertices和单个pixels,而geometry shader则是处理完整的primitives。回顾一下以前所讨论的三种基本primitive:points,lines和triangles(尽管可以把这三种pimitive组合成lists和strips,还可以包含adjacency数据,详见第1章)。编写一个geometry shader程序时,需要使用以下关键字:point,line,triangle,lineadj和triangleadj指定要处理的primitive类型。这是geometry和vertex或pixel shader之间不同语法中的其中一点。以下列出了一个geometry shader示例的声明代码:
[maxvertexcount(3)] void geometry_shader(point VS_OUTPUT IN[1], inout TriangleStream<GS_ OUTPUT> triStream) { /*shader body*/ }
对于geometry shaderr的第一个参数中[n] array-style syntax(数组语法格式),可以根据表格21.1指定各种不同的primitive类型。
表格21.1 The Array Size of Geometry Shader Input, According to Primitive Type
一个geometry shader的基础功能是处理输入的pimitives,并把vertices添加到stream-output对象中。调用StreamOutputObject<T>::Append()函数可以添加vertices。在前面列出的geometry shader示例基础上,我们把该shader扩展为接收point primitives,并输出一个triangle stream,该stream最多包含3个vertices(每次执行geometry shader时该stream都只输出一个triangle)。以下代码演示了该geometry shader的基本的框架:
struct VS_OUTPUT { float4 ObjectPosition : POSITION; }; struct GS_OUTPUT { float4 Position : SV_Position; }; [maxvertexcount(3)] void geometry_shader(point VS_OUTPUT IN[1], inout TriangleStream<GS_ OUTPUT> triStream) { GS_OUTPUT OUT = (GS_OUTPUT)0; for (int i = 0; i < 3; i++) { // Some modification of the input point, followed by transformation into homogeneous - clip space OUT.Position = triStream.Append(OUT); } }
列表21.1 A Spherical Billboarding Point Sprite Shader
/************* Resources *************/ static const float2 QuadUVs[4] = { float2(0.0f, 1.0f), // v0, lower-left float2(0.0f, 0.0f), // v1, upper-left float2(1.0f, 0.0f), // v2, upper-right float2(1.0f, 1.0f) // v3, lower-right }; cbuffer CBufferPerFrame { float3 CameraPosition : CAMERAPOSITION; float3 CameraUp; } cbuffer CBufferPerObject { float4x4 ViewProjection; } Texture2D ColorTexture; SamplerState ColorSampler { Filter = MIN_MAG_MIP_LINEAR; AddressU = WRAP; AddressV = WRAP; }; /************* Data Structures *************/ struct VS_INPUT { float4 Position : POSITION; float2 Size : SIZE; }; struct VS_OUTPUT { float4 Position : POSITION; float2 Size : SIZE; }; struct GS_OUTPUT { float4 Position : SV_Position; float2 TextureCoordinate : TEXCOORD; }; /************* Vertex Shader *************/ VS_OUTPUT vertex_shader(VS_INPUT IN) { VS_OUTPUT OUT = (VS_OUTPUT)0; OUT.Position = IN.Position; OUT.Size = IN.Size; return OUT; } /************* Geometry Shader *************/ [maxvertexcount(6)] void geometry_shader(point VS_OUTPUT IN[1], inout TriangleStream<GS_OUTPUT> triStream) { GS_OUTPUT OUT = (GS_OUTPUT)0; float2 halfSize = IN[0].Size / 2.0f; float3 direction = CameraPosition - IN[0].Position.xyz; float3 right = cross(normalize(direction), CameraUp); float3 offsetX = halfSize.x * right; float3 offsetY = halfSize.y * CameraUp; float4 vertices[4]; vertices[0] = float4(IN[0].Position.xyz + offsetX - offsetY, 1.0f); // lower-left vertices[1] = float4(IN[0].Position.xyz + offsetX + offsetY, 1.0f); // upper-left vertices[2] = float4(IN[0].Position.xyz - offsetX + offsetY, 1.0f); // upper-right vertices[3] = float4(IN[0].Position.xyz - offsetX - offsetY, 1.0f); // lower-right // tri: 0, 1, 2 OUT.Position = mul(vertices[0], ViewProjection); OUT.TextureCoordinate = QuadUVs[0]; triStream.Append(OUT); OUT.Position = mul(vertices[1], ViewProjection); OUT.TextureCoordinate = QuadUVs[1]; triStream.Append(OUT); OUT.Position = mul(vertices[2], ViewProjection); OUT.TextureCoordinate = QuadUVs[2]; triStream.Append(OUT); triStream.RestartStrip(); // tri: 0, 2, 3 OUT.Position = mul(vertices[0], ViewProjection); OUT.TextureCoordinate = QuadUVs[0]; triStream.Append(OUT); OUT.Position = mul(vertices[2], ViewProjection); OUT.TextureCoordinate = QuadUVs[2]; triStream.Append(OUT); OUT.Position = mul(vertices[3], ViewProjection); OUT.TextureCoordinate = QuadUVs[3]; triStream.Append(OUT); } /************* Pixel Shader *************/ float4 pixel_shader(GS_OUTPUT IN) : SV_Target { return ColorTexture.Sample(ColorSampler, IN.TextureCoordinate); } /************* Techniques *************/ technique11 main11 { pass p0 { SetVertexShader(CompileShader(vs_5_0, vertex_shader())); SetGeometryShader(CompileShader(gs_5_0, geometry_shader())); SetPixelShader(CompileShader(ps_5_0, pixel_shader())); } }
列表21.2 Updated Point Sprite Geometry Shader Using Triangle Strips
[maxvertexcount(4)] void geometry_shader_strip(point VS_OUTPUT IN[1], inout TriangleStream<GS_OUTPUT> triStream) { GS_OUTPUT OUT = (GS_OUTPUT)0; float2 halfSize = IN[0].Size / 2.0f; float3 direction = CameraPosition - IN[0].Position.xyz; float3 right = cross(normalize(direction), CameraUp); float3 offsetX = halfSize.x * right; float3 offsetY = halfSize.y * CameraUp; float4 vertices[4]; vertices[0] = float4(IN[0].Position.xyz + offsetX - offsetY, 1.0f); // lower-left vertices[1] = float4(IN[0].Position.xyz + offsetX + offsetY, 1.0f); // upper-left vertices[2] = float4(IN[0].Position.xyz - offsetX - offsetY, 1.0f); // lower-right vertices[3] = float4(IN[0].Position.xyz - offsetX + offsetY, 1.0f); // upper-right [unroll] for (int i = 0; i < 4; i++) { OUT.Position = mul(vertices[i], ViewProjection); OUT.TextureCoordinate = QuadStripUVs[i]; triStream.Append(OUT); } }
列表21.3 Initialization and Rendering for the Geometry Shader Demo
void GeometryShaderDemo::Initialize() { SetCurrentDirectory(Utility::ExecutableDirectory().c_str()); // Initialize the material mEffect = new Effect(*mGame); mEffect->LoadCompiledEffect(L"Assets\\Effects\\PointSprite.cso"); mMaterial = new PointSpriteMaterial(); mMaterial->Initialize(mEffect); Technique* technique = mEffect->TechniquesByName().at("main11"); mMaterial->SetCurrentTechnique(technique); mPass = mMaterial->CurrentTechnique()->Passes().at(0); mInputLayout = mMaterial->InputLayouts().at(mPass); UINT maxPoints = 100; float maxDistance = 10; float minSize = 2; float maxSize = 2; std::random_device randomDevice; std::default_random_engine randomGenerator(randomDevice()); std::uniform_real_distribution<float> distanceDistribution(-maxDistance, maxDistance); std::uniform_real_distribution<float> sizeDistribution(minSize, maxSize); // Randomly generate points std::vector<VertexPositionSize> vertices; vertices.reserve(maxPoints); for (UINT i = 0; i < maxPoints; i++) { float x = distanceDistribution(randomGenerator); float y = distanceDistribution(randomGenerator); float z = distanceDistribution(randomGenerator); float size = sizeDistribution(randomGenerator); vertices.push_back(VertexPositionSize(XMFLOAT4(x, y, z, 1.0f), XMFLOAT2(size, size))); } mVertexCount = vertices.size(); ReleaseObject(mVertexBuffer); mMaterial->CreateVertexBuffer(mGame->Direct3DDevice(), &vertices[0], mVertexCount, &mVertexBuffer); std::wstring textureName = L"Assets\\Textures\\BookCover.png"; HRESULT hr = DirectX::CreateWICTextureFromFile(mGame->Direct3DDevice(), mGame->Direct3DDeviceContext(), textureName.c_str(), nullptr, &mColorTexture); if (FAILED(hr)) { throw GameException("CreateWICTextureFromFile() failed.", hr); } mKeyboard = (Keyboard*)mGame->Services().GetService(Keyboard::TypeIdClass()); assert(mKeyboard != nullptr); mSpriteBatch = new SpriteBatch(mGame->Direct3DDeviceContext()); mSpriteFont = new SpriteFont(mGame->Direct3DDevice(), L"Assets\\Fonts\\Arial_14_Regular.spritefont"); } void GeometryShaderDemo::Draw(const GameTime& gameTime) { ID3D11DeviceContext* direct3DDeviceContext = mGame->Direct3DDeviceContext(); direct3DDeviceContext->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_POINTLIST); direct3DDeviceContext->IASetInputLayout(mInputLayout); UINT stride = mMaterial->VertexSize(); UINT offset = 0; direct3DDeviceContext->IASetVertexBuffers(0, 1, &mVertexBuffer, &stride, &offset); mMaterial->ViewProjection() << mCamera->ViewMatrix() * mCamera->ProjectionMatrix(); mMaterial->CameraPosition() << mCamera->PositionVector(); mMaterial->CameraUp() << mCamera->UpVector(); mMaterial->ColorTexture() << mColorTexture; mPass->Apply(0, direct3DDeviceContext); direct3DDeviceContext->Draw(mVertexCount, 0); direct3DDeviceContext->GSSetShader(nullptr, nullptr, 0); }
列表21.4 Example of SV_PrimitiveID Usage
[maxvertexcount(4)] void geometry_shader_nosize(point VS_NOSIZE_OUTPUT IN[1], uint primitiveID : SV_PrimitiveID, inout TriangleStream<GS_OUTPUT> triStream) { GS_OUTPUT OUT = (GS_OUTPUT)0; float size = primitiveID + 1.0f; float2 halfSize = size / 2.0f; float3 direction = CameraPosition - IN[0].Position.xyz; float3 right = cross(normalize(direction), CameraUp); float3 offsetX = halfSize.x * right; float3 offsetY = halfSize.y * CameraUp; float4 vertices[4]; vertices[0] = float4(IN[0].Position.xyz + offsetX - offsetY, 1.0f); // lower-left vertices[1] = float4(IN[0].Position.xyz + offsetX + offsetY, 1.0f); // upper-left vertices[2] = float4(IN[0].Position.xyz - offsetX - offsetY, 1.0f); // lower-right vertices[3] = float4(IN[0].Position.xyz - offsetX + offsetY, 1.0f); // upper-right [unroll] for (int i = 0; i < 4; i++) { OUT.Position = mul(vertices[i], ViewProjection); OUT.TextureCoordinate = QuadStripUVs[i]; triStream.Append(OUT); } }
如果在应用程序中更新创建vertex buffer的代码(沿着x轴以固定间隔产生points),该shader的就会输出如图21.2所示的结果。