I've a big problem: I've an OpenTK window open where I draw textures, images, etc. I've to do a little videogame in this manner for a test and I'ld like to show text on it that shows game infos.
Actually I've been only able to open a Window form with text and it's not what I need.
Is there a manner to show text in a OpenTK window?
I can't use OpenTK 3.0, so QuickFont has to be excluded.
I can use GL Class.
Thank you very much!
One possibility would be to use FreeType library to load a TrueType Font to texture objects.
SharpFont provides Cross-platform FreeType bindings for C#.
The source can be found at GitHub - Robmaister/SharpFont.
(x64 SharpFont.dll and freetype6.dll from MonoGame.Dependencies)
A full example can be found at GitHub - Rabbid76/c_sharp_opengl.
The example eis based on LearnOpenGL - Text Rendering.
Load the font and glyph information for the characters and create a texture object for each character:
public struct Character
{
public int TextureID { get; set; }
public Vector2 Size { get; set; }
public Vector2 Bearing { get; set; }
public int Advance { get; set; }
}
// initialize library
Library lib = new Library();
Face face = new Face(lib, "FreeSans.ttf");
face.SetPixelSizes(0, 32);
// set 1 byte pixel alignment
GL.PixelStore(PixelStoreParameter.UnpackAlignment, 1);
// Load first 128 characters of ASCII set
for (uint c = 0; c < 128; c++)
{
try
{
// load glyph
//face.LoadGlyph(c, LoadFlags.Render, LoadTarget.Normal);
face.LoadChar(c, LoadFlags.Render, LoadTarget.Normal);
GlyphSlot glyph = face.Glyph;
FTBitmap bitmap = glyph.Bitmap;
// create glyph texture
int texObj = GL.GenTexture();
GL.BindTexture(TextureTarget.Texture2D, texObj);
GL.TexImage2D(TextureTarget.Texture2D, 0,
PixelInternalFormat.R8, bitmap.Width, bitmap.Rows, 0,
PixelFormat.Red, PixelType.UnsignedByte, bitmap.Buffer);
// set texture parameters
GL.TextureParameter(texObj, TextureParameterName.TextureMinFilter, (int)TextureMinFilter.Linear);
GL.TextureParameter(texObj, TextureParameterName.TextureMagFilter, (int)TextureMagFilter.Linear);
GL.TextureParameter(texObj, TextureParameterName.TextureWrapS, (int)TextureWrapMode.ClampToEdge);
GL.TextureParameter(texObj, TextureParameterName.TextureWrapT, (int)TextureWrapMode.ClampToEdge);
// add character
Character ch = new Character();
ch.TextureID = texObj;
ch.Size = new Vector2(bitmap.Width, bitmap.Rows);
ch.Bearing = new Vector2(glyph.BitmapLeft, glyph.BitmapTop);
ch.Advance = (int)glyph.Advance.X.Value;
_characters.Add(c, ch);
}
catch (Exception ex)
{
Console.WriteLine(ex);
}
}
Create a Vertex Array Object which draws a quad by 2 trinagles:
// bind default texture
GL.BindTexture(TextureTarget.Texture2D, 0);
// set default (4 byte) pixel alignment
GL.PixelStore(PixelStoreParameter.UnpackAlignment, 4);
float[] vquad =
{
// x y u v
0.0f, -1.0f, 0.0f, 0.0f,
0.0f, 0.0f, 0.0f, 1.0f,
1.0f, 0.0f, 1.0f, 1.0f,
0.0f, -1.0f, 0.0f, 0.0f,
1.0f, 0.0f, 1.0f, 1.0f,
1.0f, -1.0f, 1.0f, 0.0f
};
// Create [Vertex Buffer Object](https://www.khronos.org/opengl/wiki/Vertex_Specification#Vertex_Buffer_Object)
_vbo = GL.GenBuffer();
GL.BindBuffer(BufferTarget.ArrayBuffer, _vbo);
GL.BufferData(BufferTarget.ArrayBuffer, 4 * 6 * 4, vquad, BufferUsageHint.StaticDraw);
// [Vertex Array Object](https://www.khronos.org/opengl/wiki/Vertex_Specification#Vertex_Array_Object)
_vao = GL.GenVertexArray();
GL.BindVertexArray(_vao);
GL.EnableVertexAttribArray(0);
GL.VertexAttribPointer(0, 2, VertexAttribPointerType.Float, false, 4 * 4, 0);
GL.EnableVertexAttribArray(1);
GL.VertexAttribPointer(1, 2, VertexAttribPointerType.Float, false, 4 * 4, 2 * 4);
Furthermore create a method which draws a string at specified position which a given direction:
public void RenderText(string text, float x, float y, float scale, Vector2 dir)
{
GL.ActiveTexture(TextureUnit.Texture0);
GL.BindVertexArray(_vao);
float angle_rad = (float)Math.Atan2(dir.Y, dir.X);
Matrix4 rotateM = Matrix4.CreateRotationZ(angle_rad);
Matrix4 transOriginM = Matrix4.CreateTranslation(new Vector3(x, y, 0f));
// Iterate through all characters
float char_x = 0.0f;
foreach (var c in text)
{
if (_characters.ContainsKey(c) == false)
continue;
Character ch = _characters[c];
float w = ch.Size.X * scale;
float h = ch.Size.Y * scale;
float xrel = char_x + ch.Bearing.X * scale;
float yrel = (ch.Size.Y - ch.Bearing.Y) * scale;
// Now advance cursors for next glyph (note that advance is number of 1/64 pixels)
char_x += (ch.Advance >> 6) * scale; // Bitshift by 6 to get value in pixels (2^6 = 64 (divide amount of 1/64th pixels by 64 to get amount of pixels))
Matrix4 scaleM = Matrix4.CreateScale(new Vector3(w, h, 1.0f));
Matrix4 transRelM = Matrix4.CreateTranslation(new Vector3(xrel, yrel, 0.0f));
Matrix4 modelM = scaleM * transRelM * rotateM * transOriginM; // OpenTK `*`-operator is reversed
GL.UniformMatrix4(0, false, ref modelM);
// Render glyph texture over quad
GL.BindTexture(TextureTarget.Texture2D, ch.TextureID);
// Render quad
GL.DrawArrays(PrimitiveType.Triangles, 0, 6);
}
GL.BindVertexArray(0);
GL.BindTexture(TextureTarget.Texture2D, 0);
}
Vertex shader:
#version 460
layout (location = 0) in vec2 in_pos;
layout (location = 1) in vec2 in_uv;
out vec2 vUV;
layout (location = 0) uniform mat4 model;
layout (location = 1) uniform mat4 projection;
void main()
{
vUV = in_uv.xy;
gl_Position = projection * model * vec4(in_pos.xy, 0.0, 1.0);
}
Fragment shader:
#version 460
in vec2 vUV;
layout (binding=0) uniform sampler2D u_texture;
layout (location = 2) uniform vec3 textColor;
out vec4 fragColor;
void main()
{
vec2 uv = vUV.xy;
float text = texture(u_texture, uv).r;
fragColor = vec4(textColor.rgb*text, text);
}
See the example:
Matrix4 projectionM = Matrix4.CreateScale(new Vector3(1f/this.Width, 1f/this.Height, 1.0f));
projectionM = Matrix4.CreateOrthographicOffCenter(0.0f, this.Width, this.Height, 0.0f, -1.0f, 1.0f);
GL.ClearColor(0.2f, 0.3f, 0.3f, 1.0f);
GL.Clear(ClearBufferMask.ColorBufferBit);
GL.Enable(EnableCap.Blend);
GL.BlendFunc(BlendingFactor.SrcAlpha, BlendingFactor.OneMinusSrcAlpha);
text_prog.Use();
GL.UniformMatrix4(1, false, ref projectionM);
GL.Uniform3(2, new Vector3(0.5f, 0.8f, 0.2f));
font.RenderText("This is sample text", 25.0f, 50.0f, 1.2f, new Vector2(1f, 0f));
GL.Uniform3(2, new Vector3(0.3f, 0.7f, 0.9f));
font.RenderText("(C) LearnOpenGL.com", 50.0f, 200.0f, 0.9f, new Vector2(1.0f, -0.25f));
Related
I want to rotate an image which is shown in my GLControl to 10 degree. For that I have rotated the bitmap using c# code and passed this rotated bitmap to opengl shader code. But the resulted image is seems like the rotated part is hiding/cut like below. Shall I need to do any changes on it's view port while rotating? or is it good to rotate the image in shader code itself?
public void DrawImage(int image, int glcontrolWidth, int glcontrolHeight,Matrix4 **transformMatrix**)
{
GL.Viewport(new Rectangle(0, 0, glcontrolWidth, glcontrolHeight));
GL.MatrixMode(MatrixMode.Projection);
GL.PushMatrix();
GL.LoadIdentity();
GL.MatrixMode(MatrixMode.Modelview);
GL.PushMatrix();
GL.LoadIdentity();
GL.Disable(EnableCap.Lighting);
GL.Enable(EnableCap.Texture2D);
GL.ActiveTexture(TextureUnit.Texture0);
GL.BindTexture(TextureTarget.Texture2D, image);
GL.Uniform1(positionLocation1, 0);
RunShaders();
GL.Disable(EnableCap.Texture2D);
GL.PopMatrix();
GL.MatrixMode(MatrixMode.Projection);
GL.PopMatrix();
GL.MatrixMode(MatrixMode.Modelview);
}
public void RunShaders()
{
GL.UseProgram(program);
**GL.UniformMatrix4(transformLocation, false, ref transformMatrix);**
GL.DrawArrays(PrimitiveType.Triangles, 0, vertices.Length / 3);
ErrorCode ec = GL.GetError();
if (ec != 0)
System.Console.WriteLine(ec.ToString());
Console.Read();
}
public void Init()
{
CreateShaders();
CreateProgram();
InitBuffers();
}
public void CreateProgram()
{
program = GL.CreateProgram();
GL.AttachShader(program, vertShader);
GL.AttachShader(program, fragShader);
GL.LinkProgram(program);
}
public void InitBuffers()
{
buffer = GL.GenBuffer();
positionLocation = GL.GetAttribLocation(program, "a_position");
positionLocation1 = GL.GetUniformLocation(program, "sTexture");
**transformLocation = GL.GetUniformLocation(program, "u_transform");**
GL.EnableVertexAttribArray(positionLocation);
GL.BindBuffer(BufferTarget.ArrayBuffer, buffer);
GL.BufferData(BufferTarget.ArrayBuffer, (IntPtr)(vertices.Length * sizeof(float)), vertices, BufferUsageHint.StaticDraw);
GL.VertexAttribPointer(positionLocation, 3, VertexAttribPointerType.Float, false, 0, 0);
}
public void CreateShaders()
{
/***********Vert Shader********************/
vertShader = GL.CreateShader(ShaderType.VertexShader);
GL.ShaderSource(vertShader, #"attribute vec3 a_position;
varying vec2 vTexCoord;
**uniform mat4 u_transform;**
void main() {
vTexCoord = (a_position.xy+1)/2 ;
**gl_Position = u_transform * vec4(a_position, 1);**
}");
GL.CompileShader(vertShader);
/***********Frag Shader ****************/
fragShader = GL.CreateShader(ShaderType.FragmentShader);
GL.ShaderSource(fragShader, #"precision highp float;
uniform sampler2D sTexture_2;varying vec2 vTexCoord;
void main ()
{
vec4 color = texture2D (sTexture_2, vec2(vTexCoord.x, vTexCoord.y));
gl_FragColor =color;
}"); GL.CompileShader(fragShader);
}
Do not rotate the image, but rotate and scale the vertex coordinates.
Add a transformation matrix to the vertex shader:
attribute vec3 a_position;
varying vec2 vTexCoord;
uniform mat4 u_transform;
void main()
{
vTexCoord = (a_position.xy+1)/2;
gl_Position = u_transform * vec4(a_position, 1);
}
Get the location of the transformation matrix uniform (``u_transform´) (after the program is linked).
int transformLocation = GL.GetUniformLocation(program, "u_transform");
Compute the scale dependent on the angle:
double diagonal = Math.Sqrt(bmp.Width * bmp.Width + bmp.Height * bmp.Height);
double dia_angle1 = Math.Atan2(bmp.Height, bmp.Width) + angle * Math.PI / 180;
double dia_angle2 = Math.Atan2(bmp.Height, -bmp.Width) + angle * Math.PI / 180;
double rot_w = Math.Max(Math.Abs(diagonal * Math.Cos(dia_angle1)), Math.Abs(diagonal * Math.Cos(dia_angle2)));
double rot_h = Math.Max(Math.Abs(diagonal * Math.Sin(dia_angle1)), Math.Abs(diagonal * Math.Sin(dia_angle2)));
double scale = Math.Min(bmp.Width / rot_w, bmp.Height / rot_h);
Define a transformation matrix that scales and rotates the image taking into account the aspect ratio:
Matrix4 transformMatrix =
Matrix4.CreateScale((float)scale) *
Matrix4.CreateScale(this.Width, this.Height, 1.0f) *
Matrix4.CreateRotationZ((float)(angle * Math.PI / 180)) *
Matrix4.CreateScale(1.0f / this.Width, 1.0f / this.Height, 1.0f);
Set the matrix uniform, after the program is installed (after GL.UseProgram):
GL.UniformMatrix4(transformLocation, false, ref transformMatrix);
This question already has an answer here:
OpenGL 4.2 LookAt matrix only works with -z value for eye position
(1 answer)
Closed 2 years ago.
I am currently trying to add shadows with Shadow Mapping to my 3D Engine.
First I render the scene from the light's point of view, and save the depth values in a texture. Then I use the defeault FBO to draw from the texture. Just like in this tutorial.
The problem is that my screen stays white, no matter where I move.
GL.GetError() outputs noError and the SSBO's which I use in vertex shader have the right values. GL.CheckFramebufferStatus() returns FramebufferCompleteExt.
This is how I create the FBO for depth values:
_depthMapFBO = GL.GenFramebuffer();
_depthMapFBOColorBuffer = BufferObjects.FBO_TextureAttachment(_depthMapFBO, PixelInternalFormat.DepthComponent, PixelFormat.DepthComponent, FramebufferAttachment.DepthAttachment, 1024, 1024);
GL.BindFramebuffer(FramebufferTarget.Framebuffer, _depthMapFBO);
GL.DrawBuffer(DrawBufferMode.None);
GL.ReadBuffer(ReadBufferMode.None);
====================================
public static int FBO_TextureAttachment(int FrameBuffer, PixelInternalFormat PixelInternalFormat, PixelFormat PixelFormat, FramebufferAttachment FramebufferAttachment, int Width, int Height)
{
// PixelInternalFormat = DepthComponent && PixelFormat = DepthComponent && FramebufferAttachment = DepthAttachment && Width, Height = 1024,
GL.BindFramebuffer(FramebufferTarget.Framebuffer, FrameBuffer);
int _texture = GL.GenTexture();
GL.BindTexture(TextureTarget.Texture2D, _texture);
GL.TexImage2D(TextureTarget.Texture2D, 0, PixelInternalFormat, Width, Height, 0, PixelFormat, PixelType.Float, IntPtr.Zero);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMinFilter, (int)All.Nearest);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureMagFilter, (int)All.Nearest);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapS, (int)All.Repeat);
GL.TexParameter(TextureTarget.Texture2D, TextureParameterName.TextureWrapT, (int)All.Repeat);
GL.FramebufferTexture2D(FramebufferTarget.Framebuffer, FramebufferAttachment, TextureTarget.Texture2D, _texture, 0);
return _texture;
}
In my Render function it looks like this:
GL.BindFramebuffer(FramebufferTarget.Framebuffer, _depthMapFBO);
GL.Clear(ClearBufferMask.DepthBufferBit);
GL.Viewport(0, 0, 1024, 1024);
_simpleDepthProgram.Use();
float _nearPlane = 1.0f, _farPlane = 100f;
_lightProjection = Matrix4.CreateOrthographicOffCenter(-100.0f, 100.0f, -100.0f, 100.0f, _nearPlane, _farPlane);
_ligthView = Matrix4.LookAt(_allLamps[0].Position, new Vector3(0f), new Vector3(0.0f, 1.0f, 0.0f));
_lightSpaceMatrix = _lightProjection * _ligthView;
GL.UniformMatrix4(21, false, ref _lightSpaceMatrix);
// Copy all SSBO's
GL.ActiveTexture(TextureUnit.Texture2);
GL.BindTexture(TextureTarget.Texture2D, _depthMapFBOColorBuffer);
Scene();
And the shader where I draw the depthMap:
#version 450 core
out vec4 FragColor;
uniform sampler2D scene;
uniform sampler2D bloomed;
uniform sampler2D depthMap;
uniform float zNear;
uniform float zFar;
float LinearizeDepth(float depth)
{
float z = depth * 2.0 - 1.0; // Back to NDC
return (2.0 * zNear * zFar) / (zFar + zNear - z * (zFar - zNear));
}
in vec2 TexCoord;
void main()
{
float depthValue = texture(depthMap, TexCoord).r;
//float depth = LinearizeDepth(gl_FragCoord.z) / far; // only for perspective
FragColor = vec4(vec3(depthValue), 1.0);
}
The computation of the _lightSpaceMatrix is wrong. The OpenTK matrix multiplication is reversed. See Problem with matrices #687:
Because of how matrices are treated in C# and OpenTK, multiplication order is inverted from what you might expect in C/C++ and GLSL. This is an old artefact in the library, and it's too late to change now, unfortunately.
Swap the _ligthView and _lightProjection when you multiply the matrices:
_lightSpaceMatrix = _lightProjection * _ligthView;
_lightSpaceMatrix = _ligthView * _lightProjection;
I'm using slimdx to build a small visualizer, however, i recently stumbled into a problem. When i transform the triangle with the mvp matrix it disappears.
Constant buffers are loaded properly because i can see the right color loaded through them.
The triangle used as test is seen if i don't transform it in the vertex shader.
So i suppose the problem is in either the view matrix or the projection matrix.
Moreover i don't know if i should transpose them..
vertices = new DataStream(12 * 3, true, true);
vertices.Write(new Vector3(0.0f, 0.5f, 0.5f));
vertices.Write(new Vector3(0.5f, -0.5f, 0.5f));
vertices.Write(new Vector3(-0.5f, -0.5f, 0.5f));
vertices.Position = 0;
vertexBuffer = new Buffer(device, vertices, 12 * 3, ResourceUsage.Default, BindFlags.VertexBuffer, CpuAccessFlags.None, ResourceOptionFlags.None, 0);
// configure the Input Assembler portion of the pipeline with the vertex data
dc3D.InputAssembler.InputLayout = baseShaders.GetInputLayout();
dc3D.InputAssembler.PrimitiveTopology = PrimitiveTopology.TriangleList;
dc3D.InputAssembler.SetVertexBuffers(0, new VertexBufferBinding(vertexBuffer, 12, 0));
// set the shaders
dc3D.VertexShader.Set(baseShaders.GetVertexShader());
dc3D.PixelShader.Set(baseShaders.GetPixelShader());
cbufferData = new Buffer(device, new BufferDescription
{
Usage = ResourceUsage.Default,
SizeInBytes = System.Runtime.InteropServices.Marshal.SizeOf(typeof(BaseShaders.ConstantBuffer)),
BindFlags = BindFlags.ConstantBuffer
});
dc3D.VertexShader.SetConstantBuffer(cbufferData, 0);
Vector3 eye = new Vector3(4, 4, 4);
Vector3 target = new Vector3(0, 0, 0);
Vector3 up = new Vector3(0, 1, 0);
Matrix.LookAtLH(ref eye, ref target, ref up, out cbuffer.view) ;
//for now width and height are hardcoded.
Matrix.PerspectiveFovLH((float)Math.PI / 4, 617/643.0f, 1.0f, 100.0f, out cbuffer.projection);
cbuffer.color = new Vector4(0.1f, 1.0f, 1.0f, 1.0f);
//Matrix.Transpose(cbuffer.view);
//Matrix.Transpose(cbuffer.projection);
// update constant buffers.
var data = new DataStream(System.Runtime.InteropServices.Marshal.SizeOf(typeof(BaseShaders.ConstantBuffer)), true, true);
data.Write(cbuffer);
data.Position = 0;
dc3D.UpdateSubresource(new DataBox(0, 0, data), cbufferData, 0);
Its been some hours now, and i didn't find any solution.
Oh, here is the vertex shader code:
cbuffer ConstantBuffer : register(b0)
{
matrix view;
matrix projection;
float4 color;
}
struct VOut
{
float4 position : SV_POSITION;
float4 color : COLOR;
};
VOut main(float4 position : POSITION)
{
VOut output;
output.position = mul(mul(position, view), projection);
output.color = color;
return output;
}
By making some small changes, and in fact transposing the matrix i managed to get the code to work.
Here is the vertex shader:
cbuffer ConstantBuffer : register(b0)
{
float4x4 mvp;
float4 color;
}
struct VOut
{
float4 position : SV_POSITION;
float4 color : COLOR;
};
VOut main(float4 position : POSITION)
{
VOut output;
output.position = mul(position, mvp);
output.color = color;
return output;
}
Here is the code changed:
Vector3 eye = new Vector3(1, 1, 1);
Vector3 target = new Vector3(0, 0, 0);
Vector3 up = new Vector3(0, 1, 0);
Matrix view = new Matrix();
Matrix projection = new Matrix();
view = Matrix.LookAtLH(eye, target, up) ;
projection = Matrix.PerspectiveFovLH((float)Math.PI / 4, 617/643.0f, 0.1f, 100.0f);
cbuffer.color = new Vector4(0.1f, 1.0f, 1.0f, 1.0f);
cbuffer.mvp = Matrix.Multiply(view, projection);
Matrix.Transpose(ref cbuffer.mvp, out cbuffer.mvp);
I want to draw a colored triangle in OpenTK using a simple fragment shader with a fixed output color. However, the triangle always stays white. What causes this problem?
Here is the init and render code:
game.Load += (sender, e) =>
{
game.VSync = VSyncMode.On;
float[] vertexPositions = {
0.75f, 0.75f, 0.0f, 1.0f,
0.75f, -0.75f, 0.0f, 1.0f,
-0.75f, -0.75f, 0.0f, 1.0f,
};
GL.GenBuffers(1, out vbo);
GL.BindBuffer(BufferTarget.ArrayBuffer, vbo);
GL.BufferData(BufferTarget.ArrayBuffer, new IntPtr(
vertexPositions.Length * sizeof(float)), vertexPositions, BufferUsageHint.StaticDraw);
GL.BindBuffer(BufferTarget.ArrayBuffer, 0);
// load shaders, create and link shader program
string shaderPath = #"K:\VisualProjects\ArcSynthesis\Tut2\Tut2\shaders\";
shVertex = new VertexShader(new System.IO.FileInfo(shaderPath + "vertex.glsl"));
shFragment = new FragShader(new System.IO.FileInfo(shaderPath + "fragment.glsl"));
spMain = new ShaderProgram(shVertex, shFragment);
spMain.Link();
};
and
game.RenderFrame += (sender, e) =>
{
GL.ClearColor(0f, 0f, 0f, 0f);
GL.Clear(ClearBufferMask.ColorBufferBit);
spMain.Use();
GL.BindBuffer(BufferTarget.ArrayBuffer, vbo);
GL.EnableVertexAttribArray(0);
GL.VertexAttribPointer(0, 4, VertexAttribPointerType.Float, false, 0, 0);
GL.DrawArrays(PrimitiveType.Triangles, 0, 3);
GL.DisableVertexAttribArray(0);
GL.UseProgram(0);
game.SwapBuffers();
};
Vertex shader:
#version 130
in vec4 position;
void main()
{
gl_Position = position;
}
Fragment shader:
#version 130
out vec4 outputColor;
void main()
{
outputColor = vec4(0.2f, 0.5f, 0.8f, 1.0f);
}
So I found out that the problem was the line
GL.UseProgram(0);
The shader program class I use (found in some forum) implements the glUseProgram() function like this:
public IDisposable Use()
{
IDisposable r = new ShaderProgram.Handle(curr_program);
if (curr_program != ID)
{
GL.UseProgram(ID);
curr_program = ID;
}
return r;
}
It only calls glUseProgram() if another ShaderProgram has previously set the static int curr_program to its own ID. It does nothing if the GL shader program has been set to 0 manually by calling GL.UseProgram(0).
I simply removed the line and everything is working now.
I have been working on a render and it has been working OK for one texture but would not render a second. I seemed to have changed something and it stopped rendering anything but the background color. I am not sure what I changed and I cannot get it back to the way it was. I try not to post lots of code at once onto here but I do not know enough OpenGL to isolate the issue. If you can offer any help or hints, I would greatly appreciate it!
My guess is that it is either coming from the way I am binding the coordinate or the shader.
The following is the code:
Shaders:
string vertexShaderSource = #"
#version 330
layout (location = 0) in vec3 Position;
uniform mat4 projectionmatrix;
uniform mat4 ModelMatrix;
uniform mat4 ViewMatrix;
attribute vec2 texcoord;
varying vec2 f_texcoord;
uniform vec2 pos;
void main()
{
f_texcoord = texcoord;
gl_Position = projectionmatrix * vec4(Position, 1);
//gl_Position = projectionmatrix * vec4(Position.xyz, 1.0);
}
";
string fragmentShaderSource = #"
#version 330
out vec4 FragColor;
varying vec2 f_texcoord;
uniform sampler2D mytexture;
void main()
{
FragColor = texture2D(mytexture, f_texcoord);
//FragColor = Vec4(0,0,0, 1);
}";
Vertexes:
Vector2[] g_vertex_buffer_data ={
new Vector2(-1.0f, 1.0f),
new Vector2(1.0f, 1.0f),
new Vector2(1.0f, -1.0f),
new Vector2(-1.0f, -1.0f)
};
Vector2[] g_texture_coords = {
new Vector2(0.0f, 0.0f),
new Vector2(1.0f, 0.0f),
new Vector2(1.0f, -1.0f),
new Vector2(0.0f, -1.0f)
};
Shader setup:
shaderProgramHandle = GL.CreateProgram();
vertexShaderHandle = GL.CreateShader(ShaderType.VertexShader);
fragmentShaderHandle = GL.CreateShader(ShaderType.FragmentShader);
GL.ShaderSource(vertexShaderHandle, vertexShaderSource);
GL.ShaderSource(fragmentShaderHandle, fragmentShaderSource);
GL.CompileShader(vertexShaderHandle);
GL.CompileShader(fragmentShaderHandle);
GL.AttachShader(shaderProgramHandle, vertexShaderHandle);
GL.AttachShader(shaderProgramHandle, fragmentShaderHandle);
GL.LinkProgram(shaderProgramHandle);
GL.UseProgram(shaderProgramHandle);
Basic setup and binding:
GL.ClearColor(Color4.Red);
//GL.LoadMatrix(ref projectionMatrix);
GL.GenBuffers(2, out vertexbuffer);
GL.BindBuffer(BufferTarget.ArrayBuffer, vertexbuffer);
GL.BufferData<Vector2>(BufferTarget.ArrayBuffer,
new IntPtr(g_vertex_buffer_data.Length * Vector2.SizeInBytes),
g_vertex_buffer_data, BufferUsageHint.StaticDraw);
//Shader Setup
CreateShaders();
Matrix4 projectionMatrix = Matrix4.CreateOrthographic(control.Width, control.Height, -1, 1);
vertexShaderProjectionHandle = GL.GetUniformLocation(shaderProgramHandle, "projectionmatrix");
GL.UniformMatrix4(vertexShaderProjectionHandle, false, ref projectionMatrix);
GL.EnableVertexAttribArray(0);
GL.BindBuffer(BufferTarget.ArrayBuffer, vertexbuffer);
GL.VertexAttribPointer(0, 2, VertexAttribPointerType.Float, false, 0, 0);
Loading and binding the texture:
GL.BlendFunc(BlendingFactorSrc.SrcAlpha, BlendingFactorDest.OneMinusSrcAlpha);
GL.Enable(EnableCap.Blend);
GL.ActiveTexture(TextureUnit.Texture0 + texture.textureID);
GL.BindTexture(TextureTarget.Texture2D, texture.textureID);
textureHandle = GL.GetAttribLocation(shaderProgramHandle, "texcoord");
GL.GenBuffers(1, out textureBufferHandle);
GL.BindBuffer(BufferTarget.ArrayBuffer, textureBufferHandle);
GL.BufferData<Vector2>(BufferTarget.ArrayBuffer, new IntPtr(Vector2.SizeInBytes * 4), g_texture_coords, BufferUsageHint.StaticDraw);
Matrix Setup:
//rotation += MathHelper.DegreesToRadians(1);
float displayRatio = ((float)control.Height / (float)control.Width);
Matrix4 ViewMatrix = Matrix4.Identity;
int ViewMatrixHandle = GL.GetUniformLocation(shaderProgramHandle, "ViewMatrix");
GL.UniformMatrix4(ViewMatrixHandle, true, ref ViewMatrix);
Matrix4 ModelMatrix = Matrix4.Identity;
int modelMatrixHandle = GL.GetUniformLocation(shaderProgramHandle, "ModelMatrix");
GL.UniformMatrix4(modelMatrixHandle, true, ref ModelMatrix);
int posHandle = GL.GetUniformLocation(shaderProgramHandle, "pos");
GL.Uniform2(posHandle, ref offset);
Rendering
GL.Viewport(0, 0, control.Width, control.Height);
//GL.Enable(EnableCap.Texture2D);
GL.Clear(ClearBufferMask.ColorBufferBit | ClearBufferMask.DepthBufferBit);
GL.BindVertexArray(0);
GL.EnableVertexAttribArray(textureHandle);
GL.BindBuffer(BufferTarget.ArrayBuffer, textureBufferHandle);
GL.VertexAttribPointer(textureHandle, 2, VertexAttribPointerType.Float, false, 0, 0);
GL.DrawArrays(BeginMode.Quads, 0, 4);
GL.Flush();
control.SwapBuffers();
You are using the old attribute qualifier to declare texcoord in your vertex shader. This is invalid in GLSL 330, and I suspect if you read the program/shader info logs when you compile/link your GLSL program it includes this information in the log.
To correct this, replace attribute vec2 texcoord with in vec2 texcoord. Then you should get a valid location when you query the attribute location, which is required to set your vertex attribute pointer correctly.
varying is also invalid in GLSL 330. You need to declare f_texcoord as out in your vertex shader and in in your fragment shader for your program to properly link.
There is no error detecting code at all in your code listings. You should read the manual pages for glValidateProgram (...), glGetProgramInfoLog (...) and glGetShaderInfoLog (...), because I am pretty sure the GLSL compiler would have told you your exact problem if you read its output log.