Here is the Triangle.cpp code:
#include "Triangle.h"
Triangle::Triangle()
{
_vertexBuffer = NULL;
_indexBuffer = NULL;
_vertexShader = NULL;
_pixelShader = NULL;
_inputLayout = NULL;
_stride = sizeof(Vertex);
_offset = 0;
}
Triangle::~Triangle()
{
if (_vertexBuffer)
{
_vertexBuffer->Release();
_vertexBuffer = NULL;
}
if (_indexBuffer)
{
_indexBuffer->Release();
_indexBuffer = NULL;
}
if (_vertexShader)
{
_vertexShader->Release();
_vertexShader = NULL;
}
if (_pixelShader)
{
_pixelShader->Release();
_pixelShader = NULL;
}
if (_inputLayout)
{
_inputLayout->Release();
_inputLayout = NULL;
}
}
const bool Triangle::Initialize(DirectX * graphics)
{
_graphics = graphics;
InitTriangleData();
if (!InitializeVertexAndIndexBuffers(graphics->GetDevice()))
return false;
if (!InitializeShadersAndinputLayout(graphics->GetDevice()))
return false;
return true;
}
void Triangle::Render()
{
_graphics->GetDeviceContext()->IASetVertexBuffers(0, 1, &_vertexBuffer, &_stride, &_offset);
_graphics->GetDeviceContext()->IASetIndexBuffer(_indexBuffer, DXGI_FORMAT_D32_FLOAT, 0);
_graphics->GetDeviceContext()->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST);
_graphics->GetDeviceContext()->PSSetShader(_pixelShader, 0, 1);
_graphics->GetDeviceContext()->VSSetShader(_vertexShader, 0, 1);
_graphics->GetDeviceContext()->DrawIndexed(3, 0, 0);
_graphics->GetDeviceContext()->PSSetShader(0, 0, 0);
_graphics->GetDeviceContext()->VSSetShader(0, 0, 0);
}
void Triangle::InitTriangleData()
{
_vertices = new Vertex[3];
_indices = new unsigned long[3];
float halfX = 50;
float halfY = 50;
_vertices[0].position = D3DXVECTOR3(0.0f, halfY, 0.0f);
_vertices[0].uv = D3DXVECTOR2(0.0f, 0.0f);
_vertices[1].position = D3DXVECTOR3(halfX, -halfY, 0.0f);
_vertices[1].uv = D3DXVECTOR2(0.0f, 0.0f);
_vertices[2].position = D3DXVECTOR3(-halfX, -halfY, 0.0f);
_vertices[2].uv = D3DXVECTOR2(0.0f, 0.0f);
}
const bool Triangle::InitializeVertexAndIndexBuffers(ID3D11Device * device)
{
HRESULT result;
D3D11_BUFFER_DESC bufferDesc;
ZeroMemory(&bufferDesc, sizeof bufferDesc);
D3D11_SUBRESOURCE_DATA bufferData;
ZeroMemory(&bufferData, sizeof bufferData);
bufferDesc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
bufferDesc.ByteWidth = _stride * 3;
bufferDesc.StructureByteStride = 0;
bufferDesc.MiscFlags = 0;
bufferDesc.CPUAccessFlags = 0;
bufferDesc.Usage = D3D11_USAGE_DEFAULT;
bufferData.pSysMem = _vertices;
result = device->CreateBuffer(&bufferDesc, &bufferData, &_vertexBuffer);
if (FAILED(result))
return false;
ZeroMemory(&bufferData, sizeof bufferData);
ZeroMemory(&bufferDesc, sizeof bufferDesc);
bufferDesc.BindFlags = D3D11_BIND_INDEX_BUFFER;
bufferDesc.ByteWidth = sizeof(unsigned long) *3;
bufferDesc.StructureByteStride = 0;
bufferDesc.MiscFlags = 0;
bufferDesc.CPUAccessFlags = 0;
bufferDesc.Usage = D3D11_USAGE_DEFAULT;
bufferData.pSysMem =_indices;
result = device->CreateBuffer(&bufferDesc, &bufferData, &_indexBuffer);
if (FAILED(result))
return false;
delete[] _vertices;
_vertices = 0;
delete[] _indices;
_indices = 0;
return true;
}
const bool Triangle::InitializeShadersAndinputLayout(ID3D11Device * device)
{
HRESULT result;
ID3D10Blob *data;
ID3D10Blob* error;
std::string name = ".//Resources//Shaders//BasicTextureShader//color.ps";
result = D3DX11CompileFromFileA(name.c_str(), NULL, NULL, "ColorPixelShader", "ps_5_0", D3D10_SHADER_ENABLE_STRICTNESS, 0, NULL, &data, &error, NULL);
if (FAILED(result)) {
if (error)
{
PrintOutMessage(error);
error->Release();
error = NULL;
}
return false;
}
result = device->CreatePixelShader(data->GetBufferPointer(), data->GetBufferSize(), NULL, &_pixelShader);
if (FAILED(result)) {
return false;
}
data->Release();
data = NULL;
name.clear();
name = ".//Resources//Shaders//BasicTextureShader//color.vs";
result = D3DX11CompileFromFileA(name.c_str(), NULL, NULL, "ColorVertexShader", "vs_5_0", D3D10_SHADER_ENABLE_STRICTNESS, 0, NULL, &data, &error, NULL);
if (FAILED(result)) {
if (error)
{
PrintOutMessage(error);
error->Release();
error = NULL;
}
return false;
}
result = device->CreateVertexShader(data->GetBufferPointer(), data->GetBufferSize(), NULL, &_vertexShader);
if (FAILED(result))
return false;
D3D11_INPUT_ELEMENT_DESC elements[2];
unsigned int elementsCount = 2;
elements[0].AlignedByteOffset = 0;
elements[0].Format = DXGI_FORMAT_R32G32B32_FLOAT;
elements[0].InputSlot = 0;
elements[0].InputSlotClass = D3D11_INPUT_PER_VERTEX_DATA;
elements[0].InstanceDataStepRate = 0;
elements[0].SemanticIndex = 0;
elements[0].SemanticName = "POSITION";
elements[1].AlignedByteOffset = D3D11_APPEND_ALIGNED_ELEMENT;
elements[1].Format = DXGI_FORMAT_R32G32_FLOAT;
elements[1].InputSlot = 0;
elements[1].InputSlotClass = D3D11_INPUT_PER_VERTEX_DATA;
elements[1].InstanceDataStepRate = 0;
elements[1].SemanticIndex = 0;
elements[1].SemanticName = "TEXCOORD";
result = device->CreateInputLayout(elements, elementsCount, data->GetBufferPointer(), data->GetBufferSize(), &_inputLayout);
if (FAILED(result))
return false;
data->Release();
data = NULL;
return true;
}
void Triangle::PrintOutMessage(ID3D10Blob * error)
{
if (!error)
return;
char * message = new char[error->GetBufferSize()];
message = (char*)error->GetBufferPointer();
std::fstream file;
file.open("ShaderErrorReport.txt", std::fstream::in | std::fstream::out | std::fstream::trunc);
if (!file.is_open())
return;
for (size_t i = 0; i < error->GetBufferSize(); i++)
{
file << message[i];
}
file.close();
}
And the Triangle.h code:
#ifndef TRIANGLE_H
#define TRIANGLE_H
#include "Engine\DirectX.h"
#include <D3DX10math.h>
#include <string>
#include <fstream>
class Triangle
{
private:
struct ConstantBuffer {
D3DXMATRIX world;
D3DXMATRIX view;
D3DXMATRIX proj;
};
struct Vertex
{
D3DXVECTOR3 position;
D3DXVECTOR2 uv;
};
public:
Triangle();
~Triangle();
const bool Initialize(DirectX* graphics);
void Render();
private:
void InitTriangleData();
const bool InitializeVertexAndIndexBuffers(ID3D11Device* device);
const bool InitializeShadersAndinputLayout(ID3D11Device* device);
void PrintOutMessage(ID3D10Blob * error);
private:
Vertex* _vertices;
unsigned long * _indices;
unsigned int _vertexCount = 3;
unsigned int indexCount = 3;
unsigned int _stride;
unsigned int _offset;
ID3D11Buffer* _vertexBuffer, *_indexBuffer, *_constantBuffer;
ID3D11InputLayout *_inputLayout;
ID3D11PixelShader* _pixelShader;
ID3D11VertexShader* _vertexShader;
DirectX* _graphics;
};
#endif
I have a Nvidia GTX 760 gpu and when i run this it the Graphics driver crushes and recovers...and my window goes white.[the 'clear' color is black]
The API ID3D11DeviceContext::PSSetShader and ID3D11DeviceContext::VSSetShader second and third arguments are use for dynamic linkage, in your case they should be nullptr and 0. By telling the device 0,1, you claim one class instance but give a nil pointer for it. The device reject your invalid shader binding, and when you attempt to render, the gpu hang because it does not have a proper shader to run.
In your code, replace
_graphics->GetDeviceContext()->PSSetShader(_pixelShader, 0, 1);
_graphics->GetDeviceContext()->VSSetShader(_vertexShader, 0, 1);
by
_graphics->GetDeviceContext()->PSSetShader(_pixelShader, nullptr, 0);
_graphics->GetDeviceContext()->VSSetShader(_vertexShader, nullptr, 0);
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With