Why are my Textures in SharpDX 11 not working - c#

So im making a game right now, and i wanna have texture on my cubes,
I have my shaders ready, but not my code (i think)
So whats happening right now, is nothing, the cube stays the default white i set.
Any idea why?
Shader:
Texture2D Texture0 : register(t0);
SamplerState Sampler : register(s0);
float4 PSMain(PixelShaderInput pixel) : SV_Target
{
float3 normal = normalize(pixel.WorldNormal);
float3 toEye = normalize(CameraPosition - pixel.WorldPosition);
float3 toLight = normalize(-Light.Direction);
float4 sample = (float4)1.0f;
if (HasTexture)
sample = Texture0.Sample(Sampler, pixel.TextureUV);
float3 ambient = MaterialAmbient.rgb;
float3 emissive = MaterialEmissive.rgb;
float3 diffuse = Lambert(pixel.Diffuse, normal, toLight);
float3 specular = SpecularPhong(normal, toLight, toEye);
float3 color = (saturate(ambient+diffuse) * sample.rgb + specular) * Light.Color.rgb + emissive;
float alpha = pixel.Diffuse.a * sample.a;
return float4(color, alpha);
}
Renderer:
var srv = ShaderResourceView.FromFile(device, "texture.png");
var material = new ConstantBuffers.PerMaterial()
{
Ambient = mat.Ambient,
Diffuse = mat.Diffuse,
Emissive = mat.Emissive,
Specular = mat.Specular,
SpecularPower = mat.SpecularPower,
UVTransform = Matrix.Translation(1f, 0f, 0f)
};
int texIndxOffset = mIndx * Mesh.MaxTextures;
material.HasTexture = (uint)(text != null && text.texture != null ? 1 : 0);
context.PixelShader.SetShaderResource(0, (text != null && text.texture != null ? text.texture : null));
context.PixelShader.SetSampler(0, samplerState);
context.UpdateSubresource(ref material, PerMaterialBuffer);
context.InputAssembler.SetVertexBuffers(0, new VertexBufferBinding(vertexBuffer, Utilities.SizeOf<Vertex>(), 0));
context.InputAssembler.SetIndexBuffer(indexBuffers[(int)subMesh.IndexBufferIndex], Format.R16_UInt, 0);
context.InputAssembler.PrimitiveTopology = SharpDX.Direct3D.PrimitiveTopology.TriangleList;
context.DrawIndexed((int)subMesh.PrimCount * 3, (int)subMesh.StartIndex, 0);

So i fixed it, i am not really sure what fixed it but its working now.
It was something with the Shader and my Code.

Related

How to make Shader(unity) interact with lights with specific intensity?

I am working on a cel shader for a uni project that I would like to only be affected by scene lights over x intensity. I am struggling in figuring out how to implement this within the code. The code I have been working with I found online, and have been using it to test and understand how shaders work. I am unsure if I need to implement some kind of conditional statement? Some of the information I have read has said that using conditional statements within shaders makes for poor performance.
I am new to shaders and any help would be greatly appreciated.
Thank you in advance :)
ps. I am limited to using Unity 2018.3.8
Properties {
[Header(Base Parameters)]
_Color ("Tint", Color) = (0, 0, 0, 1)
_MainTex ("Texture", 2D) = "white" {}
[HDR] _Emission ("Emission", color) = (0, 0, 0, 1)
[HDR]
_SpecularColor("Specular Color", Color) = (0.9, 0.9, 0.9, 1)
// Controls the size of the specular reflection.
_Glossiness("Glossiness", Float) = 32
[HDR]
_RimColor("Rim Color", Color) = (1,1,1,1)
_RimAmount("Rim Amount", Range(0, 1)) = 0.716
// Control how smoothly the rim blends when approaching unlit
// parts of the surface.
_RimThreshold("Rim Threshold", Range(0, 1)) = 0.1
}
SubShader {
Tags{ "RenderType"="Opaque" "Queue"="Geometry"}
CGPROGRAM
#pragma surface surf Stepped fullforwardshadows
#pragma target 3.0
sampler2D _MainTex;
fixed4 _Color;
half3 _Emission;
float4 _SpecularColor;
float _Glossiness;
float4 _RimColor;
float _RimAmount;
float _RimThreshold;
float3 worldPos;
float4 LightingStepped(SurfaceOutput s, float3 lightDir, half3 viewDir, float shadowAttenuation){
float shadow = shadowAttenuation;
s.Normal=normalize(s.Normal);
float diff = dot(s.Normal, lightDir);
float towardsLightChange = fwidth(diff);
float lightIntensity = smoothstep(0, towardsLightChange, diff);
float3 diffuse = _LightColor0.rgb * lightIntensity * s.Albedo;
float diffussAvg = (diffuse.r + diffuse.g + diffuse.b) / 3;
float3 halfVector = normalize(viewDir + lightDir);
float NdotH = dot(s.Normal, halfVector);
float specularIntensity = pow(NdotH * lightIntensity, _Glossiness * _Glossiness);
float specularIntensitySmooth = smoothstep(0.005, 0.01, specularIntensity);
float3 specular = specularIntensitySmooth * _SpecularColor.rgb * diffussAvg;
float rimDot = 1 - dot(viewDir, s.Normal);
float rimIntensity = rimDot * pow(dot(lightDir, s.Normal), _RimThreshold);
rimIntensity = smoothstep(_RimAmount - 0.01, _RimAmount + 0.01, rimIntensity);
float3 rim = rimIntensity * _RimColor.rgb * diffussAvg;
float4 color;
color.rgb = (diffuse + specular + rim) * shadow;
color.a = s.Alpha;
return color;
}
struct Input {
float2 uv_MainTex;
float3 worldPos;
};
void surf (Input i, inout SurfaceOutput o) {
worldPos = i.worldPos;
fixed4 col = tex2D(_MainTex, i.uv_MainTex);
col *= _Color;
o.Albedo = col.rgb;
o.Alpha = col.a;
o.Emission = _Emission;
}
ENDCG
}
FallBack "Standard"
}

How to create a shader that can display a texture made out of many small images in Unity

So what I'm trying to do is load satellite images from an SQL table and wrap them around a sphere to create a globe. I know I've got loading the images covered, I'm just not sure how to make my shader display the images in the correct orientation.
I've gone to the Unity Forums as well as checked out this code from the Unity Docs.
Using the linked shader code and the help I received on the forums, here's the code I've ended up with:
Properties
{
_MainTexArray("Tex", 2DArray) = "" {}
_SliceRange("Slices", Range(0,32)) = 6
_UVScale("UVScale", Float) = 1
_COLUMNS("Columns", Range(0, 5)) = 1
_ROWS("Rows", Range(0, 5)) = 1
_CELLS("Cells", Range(0, 32)) = 16
}
SubShader
{
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
// texture arrays are not available everywhere,
// only compile shader on platforms where they are
#pragma require 2darray
#include "UnityCG.cginc"
struct v2f
{
float3 uv : TEXCOORD0;
float4 vertex : SV_POSITION;
};
float _SliceRange;
float _UVScale;
v2f vert(float4 vertex : POSITION)
{
v2f o;
o.vertex = UnityObjectToClipPos(vertex);
o.uv.xy = (vertex.xy + 0.5) * _UVScale;
o.uv.z = (vertex.z + 0.5) * _SliceRange;
return o;
}
float _COLUMNS; //Columns and rows only go between 0 and 1
float _ROWS;
float _CELLS;
UNITY_DECLARE_TEX2DARRAY(_MainTexArray);
half4 frag(v2f i) : SV_Target
{
float3 uv = float3(i.uv.x * _CELLS, i.uv.y * _CELLS, 0);
uv.z = floor(i.uv.x / _COLUMNS) * floor(i.uv.y / _ROWS);
return UNITY_SAMPLE_TEX2DARRAY(_MainTexArray, uv / _CELLS);
}
ENDCG
}
}
Using that I've gotten my materials to look like this:
Here's the code that I'm using to load the SQL images:
textures = new Texture2D[size];
for (int x = 0; x <= 7; x++)
{
for (int y = 0; y <= 3; y++)
{
textures[count] = tiler.Read(x, y, 2); //The z determines the zoom level, so I wouldn't want them all loaded at once
if (textures[count] != null) TextureScale.Bilinear(textures[count], 256, 256);
count++;
}
}
texArr = new Texture2DArray(256, 256, textures.Length, TextureFormat.RGBA32, true, true);
texArr.filterMode = FilterMode.Bilinear;
texArr.wrapMode = TextureWrapMode.Repeat;
for (int i = 0; i < textures.Length; i++)
{
if (textures[i] == null) continue;
texArr.SetPixels(textures[i].GetPixels(0), i, 0);
}
texArr.Apply();
mat.SetTexture("_MainTexArray", texArr);
In the SQL Table, the x and y determines the position of the tile and the z determines the zoom level. I'm just working with one zoom level for now.
Sorry for linking the whole shader class, but I'm not very experienced with shaders so I don't quite know where the problem lies.
If you can index into the array of photos such that you effectively have an equirectangular projection of the globe, you could try using a modified form of the shader code by Farfarer from the Unity forums copied and modified slightly below:
Shader "Custom/Equirectangular" {
Properties{
_MainTexArray("Tex", 2DArray) = "" {}
_COLUMNS("Columns", Int) = 2
_ROWS("Rows", Int) = 2
}
SubShader{
Pass {
Tags {"LightMode" = "Always"}
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#pragma require 2darray
#include "UnityCG.cginc"
struct appdata {
float4 vertex : POSITION;
float3 normal : NORMAL;
};
struct v2f
{
float4 pos : SV_POSITION;
float3 normal : TEXCOORD0;
};
v2f vert(appdata v)
{
v2f o;
o.pos = UnityObjectToClipPos(v.vertex);
o.normal = v.normal;
return o;
}
UNITY_DECLARE_TEX2DARRAY(_MainTexArray);
int _ROWS;
int _COLUMNS;
#define PI 3.141592653589793
inline float2 RadialCoords(float3 a_coords)
{
float3 a_coords_n = normalize(a_coords);
float lon = atan2(a_coords_n.z, a_coords_n.x);
float lat = acos(a_coords_n.y);
float2 sphereCoords = float2(lon, lat) * (1.0 / PI);
return float2(sphereCoords.x * 0.5 + 0.5, 1 - sphereCoords.y);
}
float4 frag(v2f IN) : COLOR
{
float2 equiUV = RadialCoords(IN.normal);
float2 texIndex;
float2 uvInTex = modf(equiUV * float2(_COLUMNS,_ROWS), texIndex);
int flatTexIndex = texIndex.x * _ROWS + texIndex.y;
return UNITY_SAMPLE_TEX2DARRAY(_MainTexArray,
float3(uvInTex, flatTexIndex));
}
ENDCG
}
}
FallBack "VertexLit"
}
You also need to use
texArr = new Texture2DArray(256, 256, textures.Length, TextureFormat.RGBA32, false, true);
instead of
texArr = new Texture2DArray(256, 256, textures.Length, TextureFormat.RGBA32, true, false);
It works for me if I attach this script to a sphere:
Material myMat;
public List<Texture2D> texes;
IEnumerator Start()
{
yield return null;
myMat = GetComponent<Renderer>().material;
Texture2DArray texArr = new Texture2DArray(256, 256, 9,
TextureFormat.RGBA32, false, true);
texArr.filterMode = FilterMode.Bilinear;
texArr.wrapMode = TextureWrapMode.Clamp;
for (int i = 0 ; i < texes.Count ; i++)
{
texArr.SetPixels(texes[i].GetPixels(), i, 0);
}
texArr.Apply();
myMat.SetTexture("_MainTexArray", texArr);
}
and in texes I add these textures in order:
0:
1:
2:
3:
4:
5:
6:
7:
8:
and set 3 for Rows and Columns, it produces decent results:
If bilinear filtering is enabled, there are still some artifacts at the borders of the textures. But these artifacts have to be zoomed in quite close to see. Due to the lack of adjacent pixels for bilinear filtering they mostly appear as improperly blended or missing pixels:
Of course this example doesn't properly tile so there is an obvious seam along one longitude line:
Since this setup expects normals from a sphere, this only works on things with normals that approximate those of a sphere. So it would not render properly on a plane, for instance.

Normal mapping giving wierd results. (C# XNA)

Okay, so this problem is kinda huge, and for the same reason i would rather post as little code as possible, but rather have you come with ideas as to what it could be. I will post the codere where i feel the problem could be. If you would like to see more just ask and i will provide it.
So, i just "stole" a shader for my game. By stole i mean found a tutorial that i had done before, and just copied the end result. Thus i know the shader should work, because i've used it before.
I have a custom mesh class, and also a custom vertex struct. I have never done a xertex struct before so initially i thought this is where the problem was.
But i have some counterarguments i later found:
All of their variables seems to be right, and everything works except the bump-mapping.
Changing the Tangent and/or binormal seems to have no effect on the shading what-so-ever. So i think the mistake is not in how they are calulated, but rather how they are used.
http://imageshack.us/photo/my-images/838/w6kv.png/
This is the output i get. Keep in mind that this is a voxel engine. As you can see all the boxes has the same wierd normal-map shadow. However this is the normal map:
http://imageshack.us/photo/my-images/268/r7jt.jpg/
As you can see, they don't fit whatsoever. Now, this could be one of three things as i see it:
It could be the way i set up the shader in xna.
It could also be something in the vertex struct
It could also be the way i call the actual drawing function.
So here's the code for those three things (And the shader as well):
Shader Setup:
((Here i set up the data for the shader, and the draw the mesh))
// Bind the parameters with the shader.
BBS.Parameters["World"].SetValue(Matrix.Identity);
BBS.Parameters["View"].SetValue(camera.viewMatrix);
BBS.Parameters["Projection"].SetValue(camera.projectionMatrix);
BBS.Parameters["AmbientColor"].SetValue(Color.White.ToVector4());
BBS.Parameters["AmbientIntensity"].SetValue(0.5f);
Vector3 LD = new Vector3(0, 1, -1);
LD.Normalize();
BBS.Parameters["DiffuseColor"].SetValue(Color.White.ToVector4());
BBS.Parameters["DiffuseIntensity"].SetValue(0);
BBS.Parameters["LightDirection"].SetValue(LD);
BBS.Parameters["EyePosition"].SetValue(new Vector3(0.0f, 2.0f, 5.0f));
BBS.Parameters["SpecularColor"].SetValue(Color.White.ToVector4());
BBS.Parameters["ColorMap"].SetValue(cubeTexture);
BBS.Parameters["NormalMap"].SetValue(Content.Load<Texture2D>("images"));
BBS.CurrentTechnique = BBS.Techniques["Technique1"];
for (int i = 0; i < BBS.CurrentTechnique.Passes.Count; i++)
{
//EffectPass.Apply will update the device to
//begin using the state information defined in the current pass
BBS.CurrentTechnique.Passes[i].Apply();
//theMesh contains all of the information required to draw
//the current mesh
graphics.DrawUserPrimitives(PrimitiveType.TriangleList, Mesh.Vertices, 0, Mesh.NUM_TRIANGLES);
}
Vertex struct:
public struct VertexPositionNormalTangentBinormalTexture : IVertexType
{
public Vector3 Position;
public Vector3 Normal;
public Vector2 TextureCoordinate;
public Vector3 Tangent;
public Vector3 Binormal;
public static readonly VertexDeclaration VertexElements = new VertexDeclaration
(
new VertexElement(0, VertexElementFormat.Vector3, VertexElementUsage.Position, 0),
new VertexElement(12, VertexElementFormat.Vector3, VertexElementUsage.Normal, 0),
new VertexElement(24, VertexElementFormat.Vector2, VertexElementUsage.TextureCoordinate, 0),
new VertexElement(32, VertexElementFormat.Vector3, VertexElementUsage.Tangent, 0),
new VertexElement(44, VertexElementFormat.Vector3, VertexElementUsage.Binormal, 0)
);
VertexDeclaration IVertexType.VertexDeclaration { get { return VertexElements; } }
public static readonly int SizeInBytes = sizeof(float) * (3 + 3 + 2 + 3 + 3);
}
Shader:
// XNA 4.0 Shader Programming #4 - Normal Mapping
// Matrix
float4x4 World;
float4x4 View;
float4x4 Projection;
// Light related
float4 AmbientColor;
float AmbientIntensity;
float3 LightDirection;
float4 DiffuseColor;
float DiffuseIntensity;
float4 SpecularColor;
float3 EyePosition;
texture2D ColorMap;
sampler2D ColorMapSampler = sampler_state
{
Texture = <ColorMap>;
MinFilter = linear;
MagFilter = linear;
MipFilter = linear;
};
texture2D NormalMap;
sampler2D NormalMapSampler = sampler_state
{
Texture = <NormalMap>;
MinFilter = linear;
MagFilter = linear;
MipFilter = linear;
};
// The input for the VertexShader
struct VertexShaderInput
{
float4 Position : POSITION0;
float2 TexCoord : TEXCOORD0;
float3 Normal : NORMAL0;
float3 Binormal : BINORMAL0;
float3 Tangent : TANGENT0;
};
// The output from the vertex shader, used for later processing
struct VertexShaderOutput
{
float4 Position : POSITION0;
float2 TexCoord : TEXCOORD0;
float3 View : TEXCOORD1;
float3x3 WorldToTangentSpace : TEXCOORD2;
};
// The VertexShader.
VertexShaderOutput VertexShaderFunction(VertexShaderInput input)
{
VertexShaderOutput output;
float4 worldPosition = mul(input.Position, World);
float4 viewPosition = mul(worldPosition, View);
output.Position = mul(viewPosition, Projection);
output.TexCoord = input.TexCoord;
output.WorldToTangentSpace[0] = mul(normalize(input.Tangent), World);
output.WorldToTangentSpace[1] = mul(normalize(input.Binormal), World);
output.WorldToTangentSpace[2] = mul(normalize(input.Normal), World);
output.View = normalize(float4(EyePosition,1.0) - worldPosition);
return output;
}
// The Pixel Shader
float4 PixelShaderFunction(VertexShaderOutput input) : COLOR0
{
float4 color = tex2D(ColorMapSampler, input.TexCoord);
float3 normalMap = 2.0 *(tex2D(NormalMapSampler, input.TexCoord)) - 1.0;
normalMap = normalize(mul(normalMap, input.WorldToTangentSpace));
float4 normal = float4(normalMap,1.0);
float4 diffuse = saturate(dot(-LightDirection,normal));
float4 reflect = normalize(2*diffuse*normal-float4(LightDirection,1.0));
float4 specular = pow(saturate(dot(reflect,input.View)),32);
return color * AmbientColor * AmbientIntensity +
color * DiffuseIntensity * DiffuseColor * diffuse +
color * SpecularColor * specular;
}
// Our Techinique
technique Technique1
{
pass Pass1
{
VertexShader = compile vs_2_0 VertexShaderFunction();
PixelShader = compile ps_2_0 PixelShaderFunction();
}
}
This Is Not Done In The Correct Order:
output.WorldToTangentSpace[0] = mul(normalize(input.Tangent), World);
output.WorldToTangentSpace[1] = mul(normalize(input.Binormal), World);
output.WorldToTangentSpace[2] = mul(normalize(input.Normal), World);
It Should Be Like This:
output.WorldToTangentSpace[0] = normalize(mul(input.Tangent, World));
output.WorldToTangentSpace[1] = normalize(mul(input.Binormal, World));
output.WorldToTangentSpace[2] = normalize(mul(input.Normal, World));
Otherwise, Your Normals Will Get Scaled From The World-space Transformation And Will Result In Very Bright And Very Dark Patches (Which Looks Like Your Problem). BTW, Seeing As You're Interested In Normal Mapping For Voxel Engines, Check Out The Following That I Had Made:
http://www.youtube.com/watch?v=roMlOmNgr_w
http://www.youtube.com/watch?v=qkfHoGzQ8ZY
Hope You Get Inspired And That You Complete Your Project.

Direct3D9 / HLSL Depth semantic not working

i have o problem with my shader code (HLSL). I use "DirectX for Managed Code" and Shader Model 3.0. I try to write a custom depth value into the depth buffer by using the DEPTH semantic in the pixel shader output struct:
struct PSOutput
{
float4 col : COLOR0;
float dept : DEPTH;
};
and i use this struct as return value in my pixel shader:
PSOutput PSFunction(VertexShaderOutput input)
{
PSOutput output;
...
output.col = float4(...);
output.dept = ...;
return output;
}
DirectX throws an exeption when i try to compile this shader, but gives no detailed information why. But when i remove the depth variable from the output struct it works! I also tried to write DEPTH0 as semantic, but no success. I hope anyone can help my with that.
EDIT:
If i write the following, it fails:
PSOutput PSFunction(VertexShaderOutput input)
{
PSOutput output;
float resDepth = input.Position[2] / input.Position[3];
if(...)
{
resDepth = ...;
}
output.col = float4(...);
output.dept = resDepth;
return output;
}
but if i write this code, it compiles:
PSOutput PSFunction(VertexShaderOutput input)
{
PSOutput output;
float resDepth = input.Position[2] / input.Position[3];
if(...)
{
resDepth = ...;
}
output.col = float4(...);
output.dept = 0.5;
return output;
}
any ideas?
Here's the full code:
float4x4 World;
float4x4 View;
float4x4 Projection;
float4 CamPos;
float4 LightDir;
float4 ObjColor;
static const float PI = 3.14159265f;
static const int MAX_FU = 32;
float fuPercent[MAX_FU];
float4 fuColor[MAX_FU];
int buildDir;
static int fuCount = 2;
float4 boxMin;
float4 boxMax;
struct VertexShaderInput
{
float4 Position : POSITION0;
float3 Normal : NORMAL;
};
struct VertexShaderOutput
{
float4 Position : POSITION0;
float3 Normal : NORMAL;
float3 ExactPos : TEXCOORD1;
};
struct PSOutput
{
float4 col : COLOR0;
//float dept : DEPTH;
};
VertexShaderOutput VSFunction(VertexShaderInput input)
{
VertexShaderOutput output;
float4 worldPosition = mul(input.Position, World);
float4 viewPosition = mul(worldPosition, View);
output.Position = mul(viewPosition, Projection);
output.Normal = mul(input.Normal, World);
output.ExactPos = input.Position;
return output;
}
PSOutput PSFunction(VertexShaderOutput input)
{
PSOutput output;
float4 resColor = ObjColor;
float resDepth = input.Position[2] / input.Position[3];
float prpos = 0;
if (buildDir == 0)
{
prpos = (input.ExactPos[1] - boxMin[1]) / (boxMax[1] - boxMin[1]);
}
else if (buildDir == 1)
{
prpos = 1.0 - ((input.ExactPos[1] - boxMin[1]) / (boxMax[1] - boxMin[1]));
}
else if (buildDir == 2)
{
prpos = (input.ExactPos[2] - boxMin[2]) / (boxMax[2] - boxMin[2]);
}
else if (buildDir == 3)
{
prpos = 1.0 - ((input.ExactPos[2] - boxMin[2]) / (boxMax[1] - boxMin[2]));
}
else if (buildDir == 4)
{
prpos = (input.ExactPos[0] - boxMin[0]) / (boxMax[0] - boxMin[0]);
}
else if (buildDir == 5)
{
prpos = 1.0 - ((input.ExactPos[0] - boxMin[0]) / (boxMax[0] - boxMin[0]));
}
float currPerc = 1.1;
for (int i = 0; i < fuCount; i++)
{
if (prpos - 0.0001 <= fuPercent[i])
{
if (fuPercent[i] < currPerc)
{
currPerc = fuPercent[i];
resColor = fuColor[i];
}
}
else
{
resDepth = 1.0;
resColor[3] = 0.0;
}
}
float3 nor = input.Normal;
float3 pos = input.ExactPos;
float glo = 0.5;
float id = (acos(dot(LightDir,nor) / pow(dot(LightDir,LightDir) * dot(nor, nor), 0.5)) / PI );
id = pow(id,2);
float3 look = reflect(normalize(pos - CamPos), nor);
float gl = (acos(dot(LightDir,look) / pow(dot(LightDir,LightDir) * dot(look, look), 0.5)) / PI );
gl = max(gl * 10.0 - 9.0, 0.0);
gl = pow(gl,2) * glo;
output.col = float4(resColor[0] * id + gl, resColor[1] * id + gl, resColor[2] * id + gl, resColor[3]);
//output.dept = resDepth;
return output;
}
technique MyTechnique
{
pass Pass1
{
VertexShader = compile vs_3_0 VSFunction();
PixelShader = compile ps_3_0 PSFunction();
}
}
If FXC is throwing an exception during compilation rather than giving you a compilation error it's probably not anything you've done wrong.
If you're using the DirectX SDK make sure you're using the most recent version (June 2010). If you're using the Windows Kit 8.0 SDK then you may have found a compiler bug. What version of the SDK / fxc are you using?
Can you post a shader that actually compiles (one with the missing VertexShaderOutput struct and without ...'s in place of actual code)? I've filled in the missing code and have no problem compiling it using fxc from Windows Kit 8.0.
EDIT:
Nope, I hadn't spotted you'd commented out the code that made it not compile.
Sure enough, it doesn't compile, but that's because it's not valid code (as reported by the compile errors). You're using the POSITION semantic as an input to your pixel shader, which is not valid. If you want to use the outputted position from a vertex shader as input to a pixel shader, copy it into a second attribute and use that instead. If I substitute the following code into your shader it then compiles:
struct VertexShaderOutput
{
float4 ClipPosition : POSITION; // Renamed this to ClipPosition.
float4 Position : TEXCOORD0; // This is valid to use as an input to the pixel shader.
float3 Normal : NORMAL;
float3 ExactPos : TEXCOORD1;
};
struct PSOutput
{
float4 col : COLOR0;
float dept : DEPTH;
};
VertexShaderOutput VSFunction(VertexShaderInput input)
{
VertexShaderOutput output;
float4 worldPosition = mul(input.Position, World);
float4 viewPosition = mul(worldPosition, View);
output.ClipPosition = mul(viewPosition, Projection);
output.Position = output.ClipPosition; // Copy output position to our other attribute.
output.Normal = mul(input.Normal, World);
output.ExactPos = input.Position;
return output;
}

How could I implement a bleach bypass shader effect for WPF?

How could I implement a bleach bypass shader effect for WPF?
I'm also interested in the possibility of implementing the first two Tehnicolor Film Processes, or any variety that would result in an old film look.
Check out: http://developer.download.nvidia.com/shaderlibrary/packages/post_bleach_bypass.fx.zip
The important part is:
float4 bypassPS(QuadVertexOutput IN, uniform sampler2D SceneSampler) : COLOR
{
float4 base = tex2D(SceneSampler, IN.UV);
float3 lumCoeff = float3(0.25,0.65,0.1);
float lum = dot(lumCoeff,base.rgb);
float3 blend = lum.rrr;
float L = min(1,max(0,10*(lum- 0.45)));
float3 result1 = 2.0f * base.rgb * blend;
float3 result2 = 1.0f - 2.0f*(1.0f-blend)*(1.0f-base.rgb);
float3 newColor = lerp(result1,result2,L);
float A2 = Opacity * base.a;
float3 mixRGB = A2 * newColor.rgb;
mixRGB += ((1.0f-A2) * base.rgb);
return float4(mixRGB,base.a);
}

Categories