View Projection matrix calculation using slimdx - c#

I'm using slimdx to build a small visualizer, however, i recently stumbled into a problem. When i transform the triangle with the mvp matrix it disappears.
Constant buffers are loaded properly because i can see the right color loaded through them.
The triangle used as test is seen if i don't transform it in the vertex shader.
So i suppose the problem is in either the view matrix or the projection matrix.
Moreover i don't know if i should transpose them..
vertices = new DataStream(12 * 3, true, true);
vertices.Write(new Vector3(0.0f, 0.5f, 0.5f));
vertices.Write(new Vector3(0.5f, -0.5f, 0.5f));
vertices.Write(new Vector3(-0.5f, -0.5f, 0.5f));
vertices.Position = 0;
vertexBuffer = new Buffer(device, vertices, 12 * 3, ResourceUsage.Default, BindFlags.VertexBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0);
// configure the Input Assembler portion of the pipeline with the vertex data
dc3D.InputAssembler.InputLayout = baseShaders.GetInputLayout();
dc3D.InputAssembler.PrimitiveTopology = PrimitiveTopology.TriangleList;
dc3D.InputAssembler.SetVertexBuffers(0, new VertexBufferBinding(vertexBuffer, 12, 0));
// set the shaders
dc3D.VertexShader.Set(baseShaders.GetVertexShader());
dc3D.PixelShader.Set(baseShaders.GetPixelShader());
cbufferData = new Buffer(device, new BufferDescription
{
Usage = ResourceUsage.Default,
SizeInBytes = System.Runtime.InteropServices.Marshal.SizeOf(typeof(BaseShaders.ConstantBuffer)),
BindFlags = BindFlags.ConstantBuffer
});
dc3D.VertexShader.SetConstantBuffer(cbufferData, 0);
Vector3 eye = new Vector3(4, 4, 4);
Vector3 target = new Vector3(0, 0, 0);
Vector3 up = new Vector3(0, 1, 0);
Matrix.LookAtLH(ref eye, ref target, ref up, out cbuffer.view) ;
//for now width and height are hardcoded.
Matrix.PerspectiveFovLH((float)Math.PI / 4, 617/643.0f, 1.0f, 100.0f, out cbuffer.projection);
cbuffer.color = new Vector4(0.1f, 1.0f, 1.0f, 1.0f);
//Matrix.Transpose(cbuffer.view);
//Matrix.Transpose(cbuffer.projection);
// update constant buffers.
var data = new DataStream(System.Runtime.InteropServices.Marshal.SizeOf(typeof(BaseShaders.ConstantBuffer)), true, true);
data.Write(cbuffer);
data.Position = 0;
dc3D.UpdateSubresource(new DataBox(0, 0, data), cbufferData, 0);
Its been some hours now, and i didn't find any solution.
Oh, here is the vertex shader code:
cbuffer ConstantBuffer : register(b0)
{
matrix view;
matrix projection;
float4 color;
}
struct VOut
{
float4 position : SV_POSITION;
float4 color : COLOR;
};
VOut main(float4 position : POSITION)
{
VOut output;
output.position = mul(mul(position, view), projection);
output.color = color;
return output;
}

By making some small changes, and in fact transposing the matrix i managed to get the code to work.
Here is the vertex shader:
cbuffer ConstantBuffer : register(b0)
{
float4x4 mvp;
float4 color;
}
struct VOut
{
float4 position : SV_POSITION;
float4 color : COLOR;
};
VOut main(float4 position : POSITION)
{
VOut output;
output.position = mul(position, mvp);
output.color = color;
return output;
}
Here is the code changed:
Vector3 eye = new Vector3(1, 1, 1);
Vector3 target = new Vector3(0, 0, 0);
Vector3 up = new Vector3(0, 1, 0);
Matrix view = new Matrix();
Matrix projection = new Matrix();
view = Matrix.LookAtLH(eye, target, up) ;
projection = Matrix.PerspectiveFovLH((float)Math.PI / 4, 617/643.0f, 0.1f, 100.0f);
cbuffer.color = new Vector4(0.1f, 1.0f, 1.0f, 1.0f);
cbuffer.mvp = Matrix.Multiply(view, projection);
Matrix.Transpose(ref cbuffer.mvp, out cbuffer.mvp);

Related

C# OpenTK - Draw string on window

I've a big problem: I've an OpenTK window open where I draw textures, images, etc. I've to do a little videogame in this manner for a test and I'ld like to show text on it that shows game infos.
Actually I've been only able to open a Window form with text and it's not what I need.
Is there a manner to show text in a OpenTK window?
I can't use OpenTK 3.0, so QuickFont has to be excluded.
I can use GL Class.
Thank you very much!
One possibility would be to use FreeType library to load a TrueType Font to texture objects.
SharpFont provides Cross-platform FreeType bindings for C#.
The source can be found at GitHub - Robmaister/SharpFont.
(x64 SharpFont.dll and freetype6.dll from MonoGame.Dependencies)
A full example can be found at GitHub - Rabbid76/c_sharp_opengl.
The example eis based on LearnOpenGL - Text Rendering.
Load the font and glyph information for the characters and create a texture object for each character:
public struct Character
{
public int TextureID { get; set; }
public Vector2 Size { get; set; }
public Vector2 Bearing { get; set; }
public int Advance { get; set; }
}
// initialize library
Library lib = new Library();
Face face = new Face(lib, "FreeSans.ttf");
face.SetPixelSizes(0, 32);
// set 1 byte pixel alignment
GL.PixelStore(PixelStoreParameter.UnpackAlignment, 1);
// Load first 128 characters of ASCII set
for (uint c = 0; c < 128; c++)
{
try
{
// load glyph
//face.LoadGlyph(c, LoadFlags.Render, LoadTarget.Normal);
face.LoadChar(c, LoadFlags.Render, LoadTarget.Normal);
GlyphSlot glyph = face.Glyph;
FTBitmap bitmap = glyph.Bitmap;
// create glyph texture
int texObj = GL.GenTexture();
GL.BindTexture(TextureTarget.Texture2D, texObj);
GL.TexImage2D(TextureTarget.Texture2D, 0,
PixelInternalFormat.R8, bitmap.Width, bitmap.Rows, 0,
PixelFormat.Red, PixelType.UnsignedByte, bitmap.Buffer);
// set texture parameters
GL.TextureParameter(texObj, TextureParameterName.TextureMinFilter, (int)TextureMinFilter.Linear);
GL.TextureParameter(texObj, TextureParameterName.TextureMagFilter, (int)TextureMagFilter.Linear);
GL.TextureParameter(texObj, TextureParameterName.TextureWrapS, (int)TextureWrapMode.ClampToEdge);
GL.TextureParameter(texObj, TextureParameterName.TextureWrapT, (int)TextureWrapMode.ClampToEdge);
// add character
Character ch = new Character();
ch.TextureID = texObj;
ch.Size = new Vector2(bitmap.Width, bitmap.Rows);
ch.Bearing = new Vector2(glyph.BitmapLeft, glyph.BitmapTop);
ch.Advance = (int)glyph.Advance.X.Value;
_characters.Add(c, ch);
}
catch (Exception ex)
{
Console.WriteLine(ex);
}
}
Create a Vertex Array Object which draws a quad by 2 trinagles:
// bind default texture
GL.BindTexture(TextureTarget.Texture2D, 0);
// set default (4 byte) pixel alignment
GL.PixelStore(PixelStoreParameter.UnpackAlignment, 4);
float[] vquad =
{
// x y u v
0.0f, -1.0f, 0.0f, 0.0f,
0.0f, 0.0f, 0.0f, 1.0f,
1.0f, 0.0f, 1.0f, 1.0f,
0.0f, -1.0f, 0.0f, 0.0f,
1.0f, 0.0f, 1.0f, 1.0f,
1.0f, -1.0f, 1.0f, 0.0f
};
// Create [Vertex Buffer Object](https://www.khronos.org/opengl/wiki/Vertex_Specification#Vertex_Buffer_Object)
_vbo = GL.GenBuffer();
GL.BindBuffer(BufferTarget.ArrayBuffer, _vbo);
GL.BufferData(BufferTarget.ArrayBuffer, 4 * 6 * 4, vquad, BufferUsageHint.StaticDraw);
// [Vertex Array Object](https://www.khronos.org/opengl/wiki/Vertex_Specification#Vertex_Array_Object)
_vao = GL.GenVertexArray();
GL.BindVertexArray(_vao);
GL.EnableVertexAttribArray(0);
GL.VertexAttribPointer(0, 2, VertexAttribPointerType.Float, false, 4 * 4, 0);
GL.EnableVertexAttribArray(1);
GL.VertexAttribPointer(1, 2, VertexAttribPointerType.Float, false, 4 * 4, 2 * 4);
Furthermore create a method which draws a string at specified position which a given direction:
public void RenderText(string text, float x, float y, float scale, Vector2 dir)
{
GL.ActiveTexture(TextureUnit.Texture0);
GL.BindVertexArray(_vao);
float angle_rad = (float)Math.Atan2(dir.Y, dir.X);
Matrix4 rotateM = Matrix4.CreateRotationZ(angle_rad);
Matrix4 transOriginM = Matrix4.CreateTranslation(new Vector3(x, y, 0f));
// Iterate through all characters
float char_x = 0.0f;
foreach (var c in text)
{
if (_characters.ContainsKey(c) == false)
continue;
Character ch = _characters[c];
float w = ch.Size.X * scale;
float h = ch.Size.Y * scale;
float xrel = char_x + ch.Bearing.X * scale;
float yrel = (ch.Size.Y - ch.Bearing.Y) * scale;
// Now advance cursors for next glyph (note that advance is number of 1/64 pixels)
char_x += (ch.Advance >> 6) * scale; // Bitshift by 6 to get value in pixels (2^6 = 64 (divide amount of 1/64th pixels by 64 to get amount of pixels))
Matrix4 scaleM = Matrix4.CreateScale(new Vector3(w, h, 1.0f));
Matrix4 transRelM = Matrix4.CreateTranslation(new Vector3(xrel, yrel, 0.0f));
Matrix4 modelM = scaleM * transRelM * rotateM * transOriginM; // OpenTK `*`-operator is reversed
GL.UniformMatrix4(0, false, ref modelM);
// Render glyph texture over quad
GL.BindTexture(TextureTarget.Texture2D, ch.TextureID);
// Render quad
GL.DrawArrays(PrimitiveType.Triangles, 0, 6);
}
GL.BindVertexArray(0);
GL.BindTexture(TextureTarget.Texture2D, 0);
}
Vertex shader:
#version 460
layout (location = 0) in vec2 in_pos;
layout (location = 1) in vec2 in_uv;
out vec2 vUV;
layout (location = 0) uniform mat4 model;
layout (location = 1) uniform mat4 projection;
void main()
{
vUV = in_uv.xy;
gl_Position = projection * model * vec4(in_pos.xy, 0.0, 1.0);
}
Fragment shader:
#version 460
in vec2 vUV;
layout (binding=0) uniform sampler2D u_texture;
layout (location = 2) uniform vec3 textColor;
out vec4 fragColor;
void main()
{
vec2 uv = vUV.xy;
float text = texture(u_texture, uv).r;
fragColor = vec4(textColor.rgb*text, text);
}
See the example:
Matrix4 projectionM = Matrix4.CreateScale(new Vector3(1f/this.Width, 1f/this.Height, 1.0f));
projectionM = Matrix4.CreateOrthographicOffCenter(0.0f, this.Width, this.Height, 0.0f, -1.0f, 1.0f);
GL.ClearColor(0.2f, 0.3f, 0.3f, 1.0f);
GL.Clear(ClearBufferMask.ColorBufferBit);
GL.Enable(EnableCap.Blend);
GL.BlendFunc(BlendingFactor.SrcAlpha, BlendingFactor.OneMinusSrcAlpha);
text_prog.Use();
GL.UniformMatrix4(1, false, ref projectionM);
GL.Uniform3(2, new Vector3(0.5f, 0.8f, 0.2f));
font.RenderText("This is sample text", 25.0f, 50.0f, 1.2f, new Vector2(1f, 0f));
GL.Uniform3(2, new Vector3(0.3f, 0.7f, 0.9f));
font.RenderText("(C) LearnOpenGL.com", 50.0f, 200.0f, 0.9f, new Vector2(1.0f, -0.25f));

Texture not filling the entire render target view on resizing

I'm trying to implement the resizing of render target when window/control is resized.
However when doing so it is not working as expected (maybe cause i'm not doing it correctly) as the rendered texture is not filling my entire render target view.
Now, when ever the window is resized, i reset my render target view and any other render target (texture) [Please see code below]
this.ImgSource.SetRenderTargetDX11(null);
Disposer.SafeDispose(ref this.m_RenderTargetView);
Disposer.SafeDispose(ref this.m_d11Factory);
Disposer.SafeDispose(ref this.RenderTarget);
int width = (int)sizeInfo.Width;
int height = (int)sizeInfo.Height;
Texture2DDescription colordesc = new Texture2DDescription
{
BindFlags = BindFlags.RenderTarget | BindFlags.ShaderResource,
Format = PIXEL_FORMAT,
Width = width,
Height = height,
MipLevels = 1,
SampleDescription = new SampleDescription(1, 0),
Usage = ResourceUsage.Default,
OptionFlags = ResourceOptionFlags.Shared,
CpuAccessFlags = CpuAccessFlags.None,
ArraySize = 1
};
this.RenderTarget = new Texture2D(this.Device, colordesc);
m_RenderTargetView = new RenderTargetView(this.Device, this.RenderTarget);
m_depthStencil = CreateTexture2D(this.Device, width, height, BindFlags.DepthStencil, Format.D24_UNorm_S8_UInt);
m_depthStencilView = new DepthStencilView(this.Device, m_depthStencil);
Device.ImmediateContext.Rasterizer.SetViewport(0, 0, width, height, 0.0f, 1.0f);
Device.ImmediateContext.OutputMerger.SetTargets(m_depthStencilView, m_RenderTargetView);
SetShaderAndVertices(sizeInfo);
SetShaderAndVertices Method: (sizeInfo is the size of he render target)
protected void SetShaderAndVertices(Size rendersize)
{
var device = this.Device;
var context = device.ImmediateContext;
ShaderBytecode shaderCode = GetShaderByteCode(eEffectType.Texture);
layout = new InputLayout(device, shaderCode, new[] {
new InputElement("SV_Position", 0, Format.R32G32B32A32_Float, 0, 0),
new InputElement("TEXCOORD", 0, Format.R32G32_Float, 32, 0),
});
// Write vertex data to a datastream
var stream = new DataStream(Utilities.SizeOf<VertexPositionTexture>() * 4, true, true);
stream.WriteRange(new[]
{
new VertexPositionTexture(
new Vector4(-1, 1, 0.0f, 1.0f), // position top-left
new Vector2(0f,0f)
),
new VertexPositionTexture(
new Vector4(1, 1, 0.0f, 1.0f), // position top-right
new Vector2(1,0)
),
new VertexPositionTexture(
new Vector4(-1, -1, 0.0f, 1.0f), // position bottom-left
new Vector2(0,1)
),
new VertexPositionTexture(
new Vector4(1, -1, 0.0f, 1.0f), // position bottom-right
new Vector2(1,1)
),
});
stream.Position = 0;
// Instantiate VertexPositionTexture buffer from vertex data
//
vertices = new SharpDX.Direct3D11.Buffer(device, stream, new BufferDescription()
{
BindFlags = BindFlags.VertexBuffer,
CpuAccessFlags = CpuAccessFlags.None,
OptionFlags = ResourceOptionFlags.None,
SizeInBytes = Utilities.SizeOf<VertexPositionTexture>() * 4,
Usage = ResourceUsage.Default,
StructureByteStride = 0
});
stream.Dispose();
// Prepare All the stages
// for primitive topology https://msdn.microsoft.com/en-us/library/bb196414.aspx#ID4E2BAC
context.InputAssembler.InputLayout = (layout);
context.InputAssembler.PrimitiveTopology = (PrimitiveTopology.TriangleStrip);
context.InputAssembler.SetVertexBuffers(0, new VertexBufferBinding(vertices, Utilities.SizeOf<VertexPositionTexture>(), 0));
context.OutputMerger.SetTargets(m_RenderTargetView);
}
Shader File:
Texture2D ShaderTexture : register(t0);
SamplerState Sampler : register(s0);
cbuffer PerObject: register(b0)
{
float4x4 WorldViewProj;
};
// ------------------------------------------------------
// A shader that accepts Position and Texture
// ------------------------------------------------------
struct VertexShaderInput
{
float4 Position : SV_Position;
float2 TextureUV : TEXCOORD0;
};
struct VertexShaderOutput
{
float4 Position : SV_Position;
float2 TextureUV : TEXCOORD0;
};
VertexShaderOutput VSMain(VertexShaderInput input)
{
VertexShaderOutput output = (VertexShaderOutput)0;
output.Position = input.Position;
output.TextureUV = input.TextureUV;
return output;
}
float4 PSMain(VertexShaderOutput input) : SV_Target
{
return ShaderTexture.Sample(Sampler, input.TextureUV);
}
// ------------------------------------------------------
// A shader that accepts Position and Color
// ------------------------------------------------------
struct ColorVS_IN
{
float4 pos : SV_Position;
float4 col : COLOR;
};
struct ColorPS_IN
{
float4 pos : SV_Position;
float4 col : COLOR;
};
ColorPS_IN ColorVS(ColorVS_IN input)
{
ColorPS_IN output = (ColorPS_IN)0;
output.pos = input.pos;
output.col = input.col;
return output;
}
float4 ColorPS(ColorPS_IN input) : SV_Target
{
return input.col;
}
// ------------------------------------------------------
// Techniques
// ------------------------------------------------------
technique11 Color
{
pass P0
{
SetGeometryShader(0);
SetVertexShader(CompileShader(vs_5_0, ColorVS()));
SetPixelShader(CompileShader(ps_5_0, ColorPS()));
}
}
technique11 TextureLayer
{
pass P0
{
SetGeometryShader(0);
SetVertexShader(CompileShader(vs_5_0, VSMain()));
SetPixelShader(CompileShader(ps_5_0, PSMain()));
}
}
I would like to be able to either stretch the image or not thus maintaining the aspect ratio based on my requirement.
Also my texture data is updated from another thread via mapping the bitmap data to my render target.
Note: Texture fills the entire render target view if the mapped image is of same size as my rendered target view.
Please see screen dumps below:
Screenshots:
1st Screen:
Size for 1st screen dump image:
---- Display Image size (835, 626) on render target of Size(720, 576)
2nd Screen:
Size for 2nd screen dump image:
Display Image size (899, 674) on render target of Size(899, 676)
Any more information then do let me know and I will happily provide.
Thanks.
P.S:
I have also posted this question in another forum but had no luck hence posting here hoping someone would be able to direct me in the correct direction.
Using C# , SharpDx with Directx11 and D3DImage && not using Swapchains
Also see below is the code used to map texture data:
Device.ImmediateContext.ClearRenderTargetView(this.m_RenderTargetView, Color4.Black);
Texture2DDescription colordesc = new Texture2DDescription
{
BindFlags = BindFlags.ShaderResource,
Format = PIXEL_FORMAT,
Width = iWidthOfImage,
Height = iHeightOfImage,
MipLevels = 1,
SampleDescription = new SampleDescription(1, 0),
Usage = ResourceUsage.Dynamic,
OptionFlags = ResourceOptionFlags.None,
CpuAccessFlags = CpuAccessFlags.Write,
ArraySize = 1
};
Texture2D newFrameTexture = new Texture2D(this.Device, colordesc);
DataStream dtStream = null;
DataBox dBox = Device.ImmediateContext.MapSubresource(newFrameTexture, 0, MapMode.WriteDiscard, 0, out dtStream);
if (dtStream != null)
{
int iRowPitch = dBox.RowPitch;
for (int iHeightIndex = 0; iHeightIndex < iHeightOfImage; iHeightIndex++)
{
//Copy the image bytes to Texture
// we write row strides multiplies by bytes per pixel
// as our case is bgra32 which is 4 bytes
dtStream.Position = iHeightIndex * iRowPitch;
Marshal.Copy(decodedData, iHeightIndex * iWidthOfImage * 4, new IntPtr(dtStream.DataPointer.ToInt64() + iHeightIndex * iRowPitch), iWidthOfImage * 4);
}
}
Device.ImmediateContext.UnmapSubresource(newFrameTexture, 0);
Device.ImmediateContext.CopySubresourceRegion(newFrameTexture, 0, null, this.RenderTarget, 0);
After resizing, your new window's aspect ratio doesn't match your texture, so it can't cover the entire client area of the window. You'll have to enlarge the textured geometry so that it covers the entire client area when projected onto the camera plane. This will necessarily mean that a portion of the texture is clipped.
Basically, this is the same problem when watching 4:3 aspect ratio TV programming on a 16:9 monitor. You can either zoom it to fill the screen, whereby the 4:3 content is cropped or stretched, or you can letter box the 4:3 content with black on either side of the content.

OpenTK triangle is always white

I want to draw a colored triangle in OpenTK using a simple fragment shader with a fixed output color. However, the triangle always stays white. What causes this problem?
Here is the init and render code:
game.Load += (sender, e) =>
{
game.VSync = VSyncMode.On;
float[] vertexPositions = {
0.75f, 0.75f, 0.0f, 1.0f,
0.75f, -0.75f, 0.0f, 1.0f,
-0.75f, -0.75f, 0.0f, 1.0f,
};
GL.GenBuffers(1, out vbo);
GL.BindBuffer(BufferTarget.ArrayBuffer, vbo);
GL.BufferData(BufferTarget.ArrayBuffer, new IntPtr(
vertexPositions.Length * sizeof(float)), vertexPositions, BufferUsageHint.StaticDraw);
GL.BindBuffer(BufferTarget.ArrayBuffer, 0);
// load shaders, create and link shader program
string shaderPath = #"K:\VisualProjects\ArcSynthesis\Tut2\Tut2\shaders\";
shVertex = new VertexShader(new System.IO.FileInfo(shaderPath + "vertex.glsl"));
shFragment = new FragShader(new System.IO.FileInfo(shaderPath + "fragment.glsl"));
spMain = new ShaderProgram(shVertex, shFragment);
spMain.Link();
};
and
game.RenderFrame += (sender, e) =>
{
GL.ClearColor(0f, 0f, 0f, 0f);
GL.Clear(ClearBufferMask.ColorBufferBit);
spMain.Use();
GL.BindBuffer(BufferTarget.ArrayBuffer, vbo);
GL.EnableVertexAttribArray(0);
GL.VertexAttribPointer(0, 4, VertexAttribPointerType.Float, false, 0, 0);
GL.DrawArrays(PrimitiveType.Triangles, 0, 3);
GL.DisableVertexAttribArray(0);
GL.UseProgram(0);
game.SwapBuffers();
};
Vertex shader:
#version 130
in vec4 position;
void main()
{
gl_Position = position;
}
Fragment shader:
#version 130
out vec4 outputColor;
void main()
{
outputColor = vec4(0.2f, 0.5f, 0.8f, 1.0f);
}
So I found out that the problem was the line
GL.UseProgram(0);
The shader program class I use (found in some forum) implements the glUseProgram() function like this:
public IDisposable Use()
{
IDisposable r = new ShaderProgram.Handle(curr_program);
if (curr_program != ID)
{
GL.UseProgram(ID);
curr_program = ID;
}
return r;
}
It only calls glUseProgram() if another ShaderProgram has previously set the static int curr_program to its own ID. It does nothing if the GL shader program has been set to 0 manually by calling GL.UseProgram(0).
I simply removed the line and everything is working now.

OpenGL 2d sprite not rendering (OpenTK)

I have been working on a render and it has been working OK for one texture but would not render a second. I seemed to have changed something and it stopped rendering anything but the background color. I am not sure what I changed and I cannot get it back to the way it was. I try not to post lots of code at once onto here but I do not know enough OpenGL to isolate the issue. If you can offer any help or hints, I would greatly appreciate it!
My guess is that it is either coming from the way I am binding the coordinate or the shader.
The following is the code:
Shaders:
string vertexShaderSource = #"
#version 330
layout (location = 0) in vec3 Position;
uniform mat4 projectionmatrix;
uniform mat4 ModelMatrix;
uniform mat4 ViewMatrix;
attribute vec2 texcoord;
varying vec2 f_texcoord;
uniform vec2 pos;
void main()
{
f_texcoord = texcoord;
gl_Position = projectionmatrix * vec4(Position, 1);
//gl_Position = projectionmatrix * vec4(Position.xyz, 1.0);
}
";
string fragmentShaderSource = #"
#version 330
out vec4 FragColor;
varying vec2 f_texcoord;
uniform sampler2D mytexture;
void main()
{
FragColor = texture2D(mytexture, f_texcoord);
//FragColor = Vec4(0,0,0, 1);
}";
Vertexes:
Vector2[] g_vertex_buffer_data ={
new Vector2(-1.0f, 1.0f),
new Vector2(1.0f, 1.0f),
new Vector2(1.0f, -1.0f),
new Vector2(-1.0f, -1.0f)
};
Vector2[] g_texture_coords = {
new Vector2(0.0f, 0.0f),
new Vector2(1.0f, 0.0f),
new Vector2(1.0f, -1.0f),
new Vector2(0.0f, -1.0f)
};
Shader setup:
shaderProgramHandle = GL.CreateProgram();
vertexShaderHandle = GL.CreateShader(ShaderType.VertexShader);
fragmentShaderHandle = GL.CreateShader(ShaderType.FragmentShader);
GL.ShaderSource(vertexShaderHandle, vertexShaderSource);
GL.ShaderSource(fragmentShaderHandle, fragmentShaderSource);
GL.CompileShader(vertexShaderHandle);
GL.CompileShader(fragmentShaderHandle);
GL.AttachShader(shaderProgramHandle, vertexShaderHandle);
GL.AttachShader(shaderProgramHandle, fragmentShaderHandle);
GL.LinkProgram(shaderProgramHandle);
GL.UseProgram(shaderProgramHandle);
Basic setup and binding:
GL.ClearColor(Color4.Red);
//GL.LoadMatrix(ref projectionMatrix);
GL.GenBuffers(2, out vertexbuffer);
GL.BindBuffer(BufferTarget.ArrayBuffer, vertexbuffer);
GL.BufferData<Vector2>(BufferTarget.ArrayBuffer,
new IntPtr(g_vertex_buffer_data.Length * Vector2.SizeInBytes),
g_vertex_buffer_data, BufferUsageHint.StaticDraw);
//Shader Setup
CreateShaders();
Matrix4 projectionMatrix = Matrix4.CreateOrthographic(control.Width, control.Height, -1, 1);
vertexShaderProjectionHandle = GL.GetUniformLocation(shaderProgramHandle, "projectionmatrix");
GL.UniformMatrix4(vertexShaderProjectionHandle, false, ref projectionMatrix);
GL.EnableVertexAttribArray(0);
GL.BindBuffer(BufferTarget.ArrayBuffer, vertexbuffer);
GL.VertexAttribPointer(0, 2, VertexAttribPointerType.Float, false, 0, 0);
Loading and binding the texture:
GL.BlendFunc(BlendingFactorSrc.SrcAlpha, BlendingFactorDest.OneMinusSrcAlpha);
GL.Enable(EnableCap.Blend);
GL.ActiveTexture(TextureUnit.Texture0 + texture.textureID);
GL.BindTexture(TextureTarget.Texture2D, texture.textureID);
textureHandle = GL.GetAttribLocation(shaderProgramHandle, "texcoord");
GL.GenBuffers(1, out textureBufferHandle);
GL.BindBuffer(BufferTarget.ArrayBuffer, textureBufferHandle);
GL.BufferData<Vector2>(BufferTarget.ArrayBuffer, new IntPtr(Vector2.SizeInBytes * 4), g_texture_coords, BufferUsageHint.StaticDraw);
Matrix Setup:
//rotation += MathHelper.DegreesToRadians(1);
float displayRatio = ((float)control.Height / (float)control.Width);
Matrix4 ViewMatrix = Matrix4.Identity;
int ViewMatrixHandle = GL.GetUniformLocation(shaderProgramHandle, "ViewMatrix");
GL.UniformMatrix4(ViewMatrixHandle, true, ref ViewMatrix);
Matrix4 ModelMatrix = Matrix4.Identity;
int modelMatrixHandle = GL.GetUniformLocation(shaderProgramHandle, "ModelMatrix");
GL.UniformMatrix4(modelMatrixHandle, true, ref ModelMatrix);
int posHandle = GL.GetUniformLocation(shaderProgramHandle, "pos");
GL.Uniform2(posHandle, ref offset);
Rendering
GL.Viewport(0, 0, control.Width, control.Height);
//GL.Enable(EnableCap.Texture2D);
GL.Clear(ClearBufferMask.ColorBufferBit | ClearBufferMask.DepthBufferBit);
GL.BindVertexArray(0);
GL.EnableVertexAttribArray(textureHandle);
GL.BindBuffer(BufferTarget.ArrayBuffer, textureBufferHandle);
GL.VertexAttribPointer(textureHandle, 2, VertexAttribPointerType.Float, false, 0, 0);
GL.DrawArrays(BeginMode.Quads, 0, 4);
GL.Flush();
control.SwapBuffers();
You are using the old attribute qualifier to declare texcoord in your vertex shader. This is invalid in GLSL 330, and I suspect if you read the program/shader info logs when you compile/link your GLSL program it includes this information in the log.
To correct this, replace attribute vec2 texcoord with in vec2 texcoord. Then you should get a valid location when you query the attribute location, which is required to set your vertex attribute pointer correctly.
varying is also invalid in GLSL 330. You need to declare f_texcoord as out in your vertex shader and in in your fragment shader for your program to properly link.
There is no error detecting code at all in your code listings. You should read the manual pages for glValidateProgram (...), glGetProgramInfoLog (...) and glGetShaderInfoLog (...), because I am pretty sure the GLSL compiler would have told you your exact problem if you read its output log.

How to prevent triangles in OpenGL from rendering on top of triangles in front of themselves

I've been working with OpenGL using the OpenTK library for .NET, writing my own engine. I placed 3 different objects, one spinning cube and 2 adjacent cubes. Everything seemed to work fine until I changed the color of the quad on top of the objects.
I'm rendering cubes with a green top, on the left the block on the back is being rendered over the block in the front. I can't seem to find out where I'm going wrong with this, when the camera is set to look from the other side it renders correctly.
The following is the related code in classes with irrelevant or unrelated methods, properties and attributes omitted:
GameState.cs
class GameState : State
{
// TEMP: Test Block
SimpleBlock block;
int i = 0;
public override void Render()
{
base.Render();
// Set OpenGL Settings
GL.Viewport(0, 0, 1024, 768);
GL.Enable(EnableCap.CullFace);
// Reset the Matrices
Matrices.ClearMatrices();
// Set Camera Settings (Field of view in radians)
Matrices.ProjectionMatrix = Matrix4.CreatePerspectiveFieldOfView((float)Math.PI / 2, (1024.0f / 768.0f), 1, 1000);
// Create the Camera
// this has to be in reverse
Matrix4 viewMatrix = Matrix4.CreateRotationX((float)Math.PI/8);
viewMatrix = viewMatrix.Translate(0, -2, -4);
// Multiply it with the ModelView (Which at this point is set to a value that we can just use = and it has the same result)
Matrices.ModelViewMatrix = viewMatrix;
// Render the Block
Matrices.Push();
Matrices.ModelViewMatrix = Matrices.ModelViewMatrix.Translate(2, 0, 0);
Matrices.ModelViewMatrix = Matrices.ModelViewMatrix.Translate(0.5f, 0, 0.5f);
Matrices.ModelViewMatrix = Matrices.ModelViewMatrix.Rotate(0, i / 40.0f, 0);
block.Render();
Matrices.Pop();
// Render the Block Again Twice
Matrices.Push();
Matrices.ModelViewMatrix = Matrices.ModelViewMatrix.Translate(-2, 0, 0);
Matrices.ModelViewMatrix = Matrices.ModelViewMatrix.Translate(0.5f, 0, 0.5f);
block.Render();
Matrices.ModelViewMatrix = Matrices.ModelViewMatrix.Translate(0, 0, -1);
block.Render();
Matrices.Pop();
// Increment Rotation Test Variable
i++;
}
}
SimpleBlock.cs
class SimpleBlock : IBlock
{
public void Render()
{
// Send the Shader Parameters to the GPU
Shader.Bind();
Shader.SendMatrices();
// Begin Rendering the Polys
GL.Begin(BeginMode.Triangles);
// Front Quad
Shader.SetColor(Color4.SaddleBrown);
GL.Normal3(0, 0, 1);
GLUtils.QuadVertices(
new Vector3(-0.5f, 1, 0.5f),
new Vector3(-0.5f, 0, 0.5f),
new Vector3( 0.5f, 1, 0.5f),
new Vector3( 0.5f, 0, 0.5f));
// Right Quad
GL.Normal3(1, 0, 0);
GLUtils.QuadVertices(
new Vector3(0.5f, 1, 0.5f),
new Vector3(0.5f, 0, 0.5f),
new Vector3(0.5f, 1, -0.5f),
new Vector3(0.5f, 0, -0.5f));
// Back Quad
GL.Normal3(0, 0, -1);
GLUtils.QuadVertices(
new Vector3( 0.5f, 1, -0.5f),
new Vector3( 0.5f, 0, -0.5f),
new Vector3(-0.5f, 1, -0.5f),
new Vector3(-0.5f, 0, -0.5f));
// Left Quad
GL.Normal3(-1, 0, 0);
GLUtils.QuadVertices(
new Vector3(-0.5f, 1, -0.5f),
new Vector3(-0.5f, 0, -0.5f),
new Vector3(-0.5f, 1, 0.5f),
new Vector3(-0.5f, 0, 0.5f));
// Bottom Quad
GL.Normal3(0, -1, 0);
GLUtils.QuadVertices(
new Vector3(-0.5f, 0, 0.5f),
new Vector3(-0.5f, 0, -0.5f),
new Vector3( 0.5f, 0, 0.5f),
new Vector3( 0.5f, 0, -0.5f));
// Top Quad
Shader.SetColor(Color4.Green);
GL.Normal3(0, 1, 0);
GLUtils.QuadVertices(
new Vector3(-0.5f, 1, -0.5f),
new Vector3(-0.5f, 1, 0.5f),
new Vector3(0.5f, 1, -0.5f),
new Vector3(0.5f, 1, 0.5f));
// Done!
GL.End();
}
}
BasicFragment.glfs
#version 130
// MultiColor Attribute
in vec4 multiColor;
// Output color
out vec4 gl_FragColor;
void main()
{
// Set fragment
gl_FragColor = multiColor;
}
BasicVertex.glvs
#version 130
// Transformation Matrices
uniform mat4 ProjectionMatrix;
uniform mat4 ModelViewMatrix;
// Vertex Position Attribute
in vec3 VertexPos;
// MultiColor Attributes
in vec4 MultiColor;
out vec4 multiColor;
void main()
{
// Process Colors
multiColor = MultiColor;
// Process Vertex
gl_Position = ProjectionMatrix * ModelViewMatrix * vec4(VertexPos.x, VertexPos.y, VertexPos.z, 1);
}
MainWindow.cs
// Extends OpenTK's GameWindow Class
class MainWindow : GameWindow
{
public MainWindow()
: base(1024, 768, new GraphicsMode(32, 0, 0, 4))
{
this.Title = "Trench Wars";
this.WindowBorder = WindowBorder.Fixed;
this.ClientSize = new Size(1024, 768);
// Set VSync On
this.VSync = VSyncMode.Adaptive;
}
protected override void OnRenderFrame(FrameEventArgs e)
{
base.OnRenderFrame(e);
// Clear Screen
GL.ClearColor(Color4.CornflowerBlue);
GL.Clear(ClearBufferMask.ColorBufferBit | ClearBufferMask.DepthBufferBit);
// Do State-Specific Rendering
StateEngine.Render();
// Pull a Wicked Bluffing move in Poker
GL.Flush();
// Swap Buffers
this.SwapBuffers();
}
}
It seems like you did forget to enable depth testing. glEnable(GL_DEPTH_TEST) before rendering the geometry is your friend (or given the language bindings you're using GL.Enable(EnableCap.DepthTest);).

Categories