Hey im making a game out of primitive objects. To make these objects im using the Vertex Buffer. The problem im having is that when i am rendering a cube it looks perfect... But when i scale it bigger the cube deforms leaving holes and jagged ends. Ive tried numerous ways to make the cube and i cant figure it out. The idea of scaling the cube bigger is to make a Skybox.
Here is some images:
Deformed Cube:
http://i.imgur.com/5aGu7mF.png
Perfect small cube:
http://i.imgur.com/FmMBb4X.png
My code:
Path Data for the Skybox using vertices.
private void loadSkyboxData(Color color)
{
VertexPositionColorTexture[] verts = new VertexPositionColorTexture[25];
//Top Left
verts[0] = new VertexPositionColorTexture(new Vector3(-500, 500, -500), color, new Vector2(0, 0));
verts[1] = new VertexPositionColorTexture(new Vector3(500, 500, -500), color, new Vector2(0, 0));
verts[2] = new VertexPositionColorTexture(new Vector3(-500, -500, -500), color, new Vector2(0, 0));
verts[3] = new VertexPositionColorTexture(new Vector3(500, -500, -500), color, new Vector2(0, 0));
//Top Left
verts[4] = new VertexPositionColorTexture(new Vector3(500, 500, -500), color, new Vector2(0, 0));
verts[5] = new VertexPositionColorTexture(new Vector3(500, 500, 500), color, new Vector2(0, 0));
verts[6] = new VertexPositionColorTexture(new Vector3(500, -500, -500), color, new Vector2(0, 0));
verts[7] = new VertexPositionColorTexture(new Vector3(500, -500, 500), color, new Vector2(0, 0));
//Top Left
verts[8] = new VertexPositionColorTexture(new Vector3(-500, 500, 500), color, new Vector2(0, 0));
verts[9] = new VertexPositionColorTexture(new Vector3(500, 500, 500), color, new Vector2(0, 0));
verts[10] = new VertexPositionColorTexture(new Vector3(-500, 500, 500), color, new Vector2(0, 0));
verts[11] = new VertexPositionColorTexture(new Vector3(500, 500, -500), color, new Vector2(0, 0));
//Top Left
verts[12] = new VertexPositionColorTexture(new Vector3(500, 500, 500), color, new Vector2(0, 0));
verts[13] = new VertexPositionColorTexture(new Vector3(-500, 500, 500), color, new Vector2(0, 0));
verts[14] = new VertexPositionColorTexture(new Vector3(500, -500, 500), color, new Vector2(0, 0));
verts[15] = new VertexPositionColorTexture(new Vector3(-500, -500, 500), color, new Vector2(0, 0));
//Top Left
verts[16] = new VertexPositionColorTexture(new Vector3(-500, 500, 500), color, new Vector2(0, 0));
verts[17] = new VertexPositionColorTexture(new Vector3(-500, 500, -500), color, new Vector2(0, 0));
verts[18] = new VertexPositionColorTexture(new Vector3(-500, -500, 500), color, new Vector2(0, 0));
verts[19] = new VertexPositionColorTexture(new Vector3(500, -500, -500), color, new Vector2(0, 0));
//Top Left
verts[20] = new VertexPositionColorTexture(new Vector3(-500, -500 ,-500), color, new Vector2(0, 0));
verts[21] = new VertexPositionColorTexture(new Vector3(500, -500, -500), color, new Vector2(0, 0));
verts[23] = new VertexPositionColorTexture(new Vector3(-500, -500, 500), color, new Vector2(0, 0));
verts[24] = new VertexPositionColorTexture(new Vector3(500, -500, 500), color, new Vector2(0, 0));
vertexDictionary.Add("Skybox", new BufferedVertexTextureData(verts, PrimitiveType.TriangleStrip, 21));
BufferedVertexTextureData.cs
public class BufferedVertexTextureData : VertexTextureData
{
protected VertexBuffer vertsBuffer;
public BufferedVertexTextureData(VertexPositionColorTexture[] verts, PrimitiveType primitiveType, int primitiveCount)
: base(verts, primitiveType, primitiveCount)
{
this.vertsBuffer = new VertexBuffer(Object3D.Game.GraphicsDevice, typeof(VertexPositionColorTexture), verts.Length, BufferUsage.WriteOnly);
this.vertsBuffer.SetData(verts, 0, verts.Length);
}
public override void Draw(BasicEffect effect)
{
//apply all changes
effect.CurrentTechnique.Passes[0].Apply();
//instruct gfx card to use vertsbuffer
Game.GraphicsDevice.SetVertexBuffer(vertsBuffer);
//draw
Game.GraphicsDevice.DrawPrimitives(PrimitiveType, 0, PrimitiveCount);
}
}
Thanks for the help in advance. Its greaty appreciated.
Related
I need to create a framed GraphicsPath that self-intersects giving a z-order feeling:
The code that I used to obtain the image is the following:
private void Example1(PaintEventArgs e) {
Brush brush = new SolidBrush(Color.FromArgb(200, Color.LightBlue));
GraphicsPath path1 = new GraphicsPath(FillMode.Winding);
path1.AddLines(new Point[] {
new Point(400, 200),
new Point(400, 300),
new Point(100, 300),
new Point(100, 400),
new Point(500, 400),
new Point(500, 100)
});
e.Graphics.FillPath(brush, path1);
e.Graphics.DrawPath(Pens.Blue, path1);
GraphicsPath path2 = new GraphicsPath(FillMode.Winding);
path2.AddLines(new Point[] {
new Point(500, 100),
new Point(200, 100),
new Point(200, 500),
new Point(300, 500),
new Point(300, 200),
new Point(400, 200)
});
e.Graphics.FillPath(brush, path2);
e.Graphics.DrawPath(Pens.Blue, path2);
}
in which I draw the two paths independently.
My need is to handle it as a unique graphic object, but if I join the paths I obtain this image:
Example code:
private void Example2(PaintEventArgs e) {
Brush brush = new SolidBrush(Color.FromArgb(200, Color.LightBlue));
GraphicsPath path1 = new GraphicsPath(FillMode.Winding);
path1.AddLines(new Point[] {
new Point(400, 200),
new Point(400, 300),
new Point(100, 300),
new Point(100, 400),
new Point(500, 400),
new Point(500, 100)
});
GraphicsPath path2 = new GraphicsPath(FillMode.Winding);
path2.AddLines(new Point[] {
new Point(500, 100),
new Point(200, 100),
new Point(200, 500),
new Point(300, 500),
new Point(300, 200),
new Point(400, 200)
});
path1.AddPath(path2, true);
e.Graphics.FillPath(brush, path1);
e.Graphics.DrawPath(Pens.Blue, path1);
}
Same problem if I use StartFigure/CloseFigure. Maybe I can solve the problem using the SetMarkers method in conjunction with the GraphicsPathIterator, but it seems overwhelming.
The simplest way that I found is to use the GraphicsPathIterator. In this way I can store more figures on a single path and have the flexibility I need during the painting. The only drawback is that the paint method has to be modified accordingly.
Here is an example in which I define the path and that do also the painting:
private void Example4(PaintEventArgs e) {
Brush brush = new SolidBrush(Color.FromArgb(200, Color.LightBlue));
GraphicsPath path = new GraphicsPath(FillMode.Winding);
path.StartFigure();
path.AddLines(new Point[] {
new Point(400, 200),
new Point(400, 300),
new Point(100, 300),
new Point(100, 400),
new Point(500, 400),
new Point(500, 100)
});
path.StartFigure();
path.AddLines(new Point[] {
new Point(500, 100),
new Point(200, 100),
new Point(200, 500),
new Point(300, 500),
new Point(300, 200),
new Point(400, 200)
});
GraphicsPathIterator pathIterator = new GraphicsPathIterator(path);
GraphicsPath p = new GraphicsPath();
while (pathIterator.NextSubpath(p, out bool isClosed) > 0) {
e.Graphics.FillPath(brush, p);
e.Graphics.DrawPath(Pens.Blue, p);
}
}
I have a little problem with rendering in my SharpDX Direct11 App.
I had being tested rendering scene on a texture, and then draw this texture on backBuffer... but unfortunately renderTexture do not contains primitives which should be drawn. Texture is only filled by color.
Whole project on github: https://github.com/Kordi3112/SharpDXTest11
Main code part with rendering methods:
public override void Render()
{
//Camera
var proj = Matrix.OrthoLH(3 * Form.Bounds.Width / Form.Bounds.Height, 3, 0.01f, 100f);
var view = Matrix.LookAtLH(new Vector3(0, 0, -10), new Vector3(0, 0, 20), Vector3.UnitY);
var viewProj = Matrix.Multiply(view, proj);
var world = Matrix.Identity;
var worldViewProj = world * viewProj;
worldViewProj.Transpose();
//Update wvp matrix
Context.UpdateSubresource(ref worldViewProj, ContantBuffer);
DrawOnTexture();
//Set BackBuffer as render target
Context.OutputMerger.SetTargets(depthView, renderView);
// Clear views
Context.ClearDepthStencilView(depthView, DepthStencilClearFlags.Depth, 1.0f, 0);
Context.ClearRenderTargetView(renderView, Color.Pink);
//Set TextureColor Shader
Effect2.ApplyShader(Context);
//Set Buffers
Context.InputAssembler.SetVertexBuffers(0, new VertexBufferBinding(VertexBuffer2, Utilities.SizeOf<VertexPositionColorTexture>(), 0));
Context.InputAssembler.SetIndexBuffer(IndexBuffer, Format.R32_UInt, 0);
//Set Texture to Shader
Context.PixelShader.SetShaderResource(0, RenderTexture.ShaderResourceView);
//Draw
Context.DrawIndexed(6, 0, 0);
// Present!
SwapChain.Present(0, PresentFlags.None);
}
private void DrawOnTexture()
{
//Set Color Shader
Effect1.ApplyShader(Context);
//Set Buffers
Context.InputAssembler.SetVertexBuffers(0, new VertexBufferBinding(VertexBuffer, Utilities.SizeOf<VertexPositionColor>(), 0));
Context.InputAssembler.SetIndexBuffer(IndexBuffer, Format.R32_UInt, 0);
//Set Target
RenderTexture.SetRenderTarget(Context, depthView);
//Clear Targets - Green Bgound
RenderTexture.ClearRenderTarget(Context, depthView, 0, 1, 0, 1);
//Draw on RenderTarget
Context.DrawIndexed(6, 0, 0);
}
After call: Context.DrawIndexed(6, 0, 0); in private void DrawOnTexture() primitive should be drawn.
What this code above do
What i wanted to get
What's wrong with my code?
I'm sure the problem is not matrix or camera. When i will modify code to render primitive directly on backBuffer then its drawing normaly.
public override void Render()
{
//Camera
var proj = Matrix.OrthoLH(3 * Form.Bounds.Width / Form.Bounds.Height, 3, 0.01f, 100f);
var view = Matrix.LookAtLH(new Vector3(0, 0, -10), new Vector3(0, 0, 20), Vector3.UnitY);
var viewProj = Matrix.Multiply(view, proj);
var world = Matrix.Identity;
var worldViewProj = world * viewProj;
worldViewProj.Transpose();
//Update wvp matrix
Context.UpdateSubresource(ref worldViewProj, ContantBuffer);
//DrawOnTexture();
//Set BackBuffer as render target
Context.OutputMerger.SetTargets(depthView, renderView);
// Clear views
Context.ClearDepthStencilView(depthView, DepthStencilClearFlags.Depth, 1.0f, 0);
Context.ClearRenderTargetView(renderView, Color.Pink);
//Set Color Shader
Effect1.ApplyShader(Context);
//Set Buffers
Context.InputAssembler.SetVertexBuffers(0, new VertexBufferBinding(VertexBuffer, Utilities.SizeOf<VertexPositionColor>(), 0));
Context.InputAssembler.SetIndexBuffer(IndexBuffer, Format.R32_UInt, 0);
//Set Texture to Shader
//Context.PixelShader.SetShaderResource(0, RenderTexture.ShaderResourceView);
//Draw
Context.DrawIndexed(6, 0, 0);
// Present!
SwapChain.Present(0, PresentFlags.None);
}
output
Vertex Buffers declaration:
//Position Color
VertexBuffer = Buffer.Create(Device, BindFlags.VertexBuffer, new[] {
new VertexPositionColor(new Vector4(-1, -1, 0, 1), Color.Red.ToVector4()),
new VertexPositionColor(new Vector4(-1, 1, 0, 1), Color.Green.ToVector4()),
new VertexPositionColor(new Vector4(1, 1, 0, 1), Color.Blue.ToVector4()),
new VertexPositionColor(new Vector4(1, -1, 0, 1), Color.Yellow.ToVector4())
});
//Position Color Texture
VertexBuffer2 = Buffer.Create(Device, BindFlags.VertexBuffer, new[] {
new VertexPositionColorTexture(new Vector4(-1, -1, 0, 1), Color.White.ToVector4(), new Vector2(0,1)),
new VertexPositionColorTexture(new Vector4(-1, 1, 0, 1), Color.White.ToVector4(),new Vector2(0,0)),
new VertexPositionColorTexture(new Vector4(1, 1, 0, 1), Color.White.ToVector4(),new Vector2(1,0)),
new VertexPositionColorTexture(new Vector4(1, -1, 0, 1), Color.White.ToVector4(),new Vector2(1,1))
});
IndexBuffer = Buffer.Create(Device, BindFlags.IndexBuffer, new[] {
0,1,2,
0,2,3
});
I'm making a 2.5D game with Monogame. I have 2D meshes in a 3D space, and I want them to shine. But if I activate default lighting, they're pitch black.
Here the render code:
BasicEffect effect = new BasicEffect(graphicsDevice);
VertexPositionTexture[] vertices =
{
new VertexPositionTexture(new Vector3(-.5f + Position.X, 0.5f + Position.Y, 0.0f), new Vector2(0, 0)),
new VertexPositionTexture(new Vector3(-.5f + Position.X, -.5f + Position.Y, 0.0f), new Vector2(0, 1)),
new VertexPositionTexture(new Vector3(0.5f + Position.X, 0.5f + Position.Y, 0.0f), new Vector2(1, 0)),
new VertexPositionTexture(new Vector3(0.5f + Position.X, -.5f + Position.Y, 0.0f), new Vector2(1, 1)),
};
graphicsDevice.BlendState = BlendState.AlphaBlend;
effect.EnableDefaultLighting();
effect.LightingEnabled = true;
//effect.AmbientLightColor = new Vector3(.75f, .75f, .75f);
effect.DirectionalLight0.DiffuseColor = new Vector3(.75f, .75f, .75f);
effect.DirectionalLight0.Direction = new Vector3(0, 0, -1);
effect.DirectionalLight0.SpecularColor = new Vector3(.75f, .60f, .60f);
effect.TextureEnabled = true;
effect.Texture = Subtexture;
effect.Projection = camera.Fov;
effect.View = camera.ViewMatrix;
effect.World = camera.WorldMatrix;
VertexBuffer buffer = new VertexBuffer(graphicsDevice, typeof(VertexPositionTexture), vertices.Length, BufferUsage.WriteOnly);
buffer.SetData(vertices);
graphicsDevice.SetVertexBuffer(buffer);
graphicsDevice.RasterizerState = RasterizerState.CullNone;
foreach (EffectPass pass in effect.CurrentTechnique.Passes)
{
pass.Apply();
graphicsDevice.DrawUserPrimitives(PrimitiveType.TriangleStrip, vertices, 0, 2);
}
I'm having a problem with primitives on XNA. As you can see in the link, http://imgur.com/12UTd2s, some of the textured walls are see-through and some aren't. Can someone explain why this happens and help me come up with a solution?
Here's the see through wall declaration:
testWall.Add(new VertexPositionNormalTexture(new Vector3(x2, 0, z2), new Vector3(1, 0, 0), new Vector2(0,0)));
testWall.Add(new VertexPositionNormalTexture(new Vector3(x2, 0, z2 - 50), new Vector3(1, 0, 0), new Vector2(1,0)));
testWall.Add(new VertexPositionNormalTexture(new Vector3(x2, 20, z2), new Vector3(1, 0, 0), new Vector2(0,1)));
testWall.Add(new VertexPositionNormalTexture(new Vector3(x2, 20, z2), new Vector3(1, 0, 0), new Vector2(0,1)));
testWall.Add(new VertexPositionNormalTexture(new Vector3(x2, 20, z2 - 50), new Vector3(1, 0, 0), new Vector2(1,1)));
testWall.Add(new VertexPositionNormalTexture(new Vector3(x2, 0, z2 - 50), new Vector3(1, 0, 0), new Vector2(1,0)));
And here's the declaration of a wall I can't see through:
testWall.Add(new VertexPositionNormalTexture(new Vector3(x1, 0, z1 - 50), new Vector3(-1, 0, 0), new Vector2(0,1)));
testWall.Add(new VertexPositionNormalTexture(new Vector3(x1, 0, z1), new Vector3(-1, 0, 0), new Vector2(0,0)));
testWall.Add(new VertexPositionNormalTexture(new Vector3(x1, 20, z1 - 50), new Vector3(-1, 0, 0), new Vector2(1,1)));
testWall.Add(new VertexPositionNormalTexture(new Vector3(x1, 20, z1 - 50), new Vector3(-1, 0, 0), new Vector2(1,1)));
testWall.Add(new VertexPositionNormalTexture(new Vector3(x1, 20, z1), new Vector3(-1, 0, 0), new Vector2(1,0)));
testWall.Add(new VertexPositionNormalTexture(new Vector3(x1, 0, z1), new Vector3(-1, 0, 0), new Vector2(0,0)));
Culling is set to None and x2,x1,z1,z2 are vertices positions. Any thoughts?
Thank you
The reason is not because they're see-through, but rather because you do not have depth buffering enabled, so the walls will appear in the order they're drawn (bottom to top).
Prior to rendering you'll want to set the render state:
Renderer.GraphicsDevice.DepthStencilState = DepthStencilState.Default;
I'm trying to draw a polygon with more than one holes. I tried the following code and it does not work correctly. Please advise.
PointF[] mypoly = new PointF[6 + 5 + 5];
mypoly[0] = new PointF(0, 0);
mypoly[1] = new PointF(100, 0);
mypoly[2] = new PointF(100, 100);
mypoly[3] = new PointF(0, 100);
mypoly[4] = new PointF(10, 80);
mypoly[5] = new PointF(0, 0);
mypoly[6] = new PointF(10, 10);
mypoly[7] = new PointF(10, 20);
mypoly[8] = new PointF(20, 20);
mypoly[9] = new PointF(20, 10);
mypoly[10] = new PointF(10, 10);
mypoly[11] = new PointF(40, 10);
mypoly[12] = new PointF(40, 20);
mypoly[13] = new PointF(60, 20);
mypoly[14] = new PointF(60, 10);
mypoly[15] = new PointF(40, 10);
g.FillPolygon(new SolidBrush(Color.Red), mypoly, FillMode.Winding);
The first part is the outer polygon. The second and the third parts are the two holes inside the polygon.
Use a GraphicsPath instead. You can draw it with Graphics.FillPath, like this:
using System.Drawing.Drawing2D;
...
using (var gp = new GraphicsPath()) {
PointF[] outer = new PointF[] { new PointF(0, 0), new PointF(100, 0),
new PointF(100, 100), new PointF(0, 100), new PointF(10, 80),new PointF(0, 0) };
gp.AddPolygon(outer);
PointF[] inner1 = new PointF[] { new PointF(10, 10), new PointF(10, 20),
new PointF(20, 20), new PointF(20, 10), new PointF(10, 10) };
gp.AddPolygon(inner1);
PointF[] inner2 = new PointF[] { new PointF(40, 10), new PointF(40, 20),
new PointF(60, 20), new PointF(60, 10), new PointF(40, 10) };
gp.AddPolygon(inner2);
e.Graphics.FillPath(Brushes.Black, gp);
}