- Home /
worldPos not correct in surface shader
Hello!
I am writing a custom Unity Shader where the mesh data is constructed in a Compute Shader and then drawn using Graphics.DrawProceduralIndirect.
The problem I have is, that the shader I want to write needs the worldPos of the current fragment, but when visualizing the worldPos it is always (1, 0, 0). I have checked the generated Shader code and what I understand it seems to be not properly set. The x-component is the only component which is assigned to 1:
// fragment shader
fixed4 frag_surf_shader (v2f_surf_shader IN) : SV_Target {
UNITY_SETUP_INSTANCE_ID(IN);
// prepare and unpack data
Input surfIN;
#ifdef FOG_COMBINED_WITH_TSPACE
UNITY_EXTRACT_FOG_FROM_TSPACE(IN);
#elif defined FOG_COMBINED_WITH_WORLD_POS
UNITY_EXTRACT_FOG_FROM_WORLD_POS(IN);
#else
UNITY_EXTRACT_FOG(IN);
#endif
UNITY_INITIALIZE_OUTPUT(Input,surfIN);
surfIN.col.x = 1.0;
surfIN.worldPos.x = 1.0;
surfIN.col = IN.custompack0.xyzw;
float3 worldPos = IN.worldPos.xyz;
#ifndef USING_DIRECTIONAL_LIGHT
fixed3 lightDir = normalize(UnityWorldSpaceLightDir(worldPos));
#else
fixed3 lightDir = _WorldSpaceLightPos0.xyz;
#endif
#ifdef UNITY_COMPILER_HLSL
SurfaceOutput o = (SurfaceOutput)0;
#else
SurfaceOutput o;
#endif
o.Albedo = 0.0;
o.Emission = 0.0;
o.Specular = 0.0;
o.Alpha = 0.0;
o.Gloss = 0.0;
fixed3 normalWorldVertex = fixed3(0,0,1);
o.Normal = IN.worldNormal;
normalWorldVertex = IN.worldNormal;
// call surface function
surf_shader (surfIN, o);
And this is the original shader:
Shader "Game/Map Shader" {
Properties
{
_MainTex ("Texture", 2D) = "white" {}
}
SubShader {
LOD 200
Tags {"RenderType"="Opaque" }
CGPROGRAM
#include "UnityCG.cginc"
#include "MapShared.cginc"
#pragma target 4.5
#pragma surface surf_shader Lambert vertex:vertex_shader addshadow fullforwardshadows
#ifdef SHADER_API_D3D11
StructuredBuffer<RenderData> renderData;
#endif
struct Input {
float3 worldPos;
};
struct appdata_id
{
float4 vertex : POSITION;
float3 normal : NORMAL;
float4 texcoord : TEXCOORD0;
uint id : SV_VertexID;
uint inst : SV_InstanceID;
};
void vertex_shader(inout appdata_id v, out Input i)
{
#ifdef SHADER_API_D3D11
UNITY_INITIALIZE_OUTPUT(Input,i);
RenderData rd = renderData[v.id];
v.vertex = float4(rd.position.xyz, 1.0f);
v.normal = normalize(mul(float4(rd.normal.xyz, 0.0), unity_WorldToObject).xyz);
#endif
}
void surf_shader(Input IN, inout SurfaceOutput o)
{
#ifdef SHADER_API_D3D11
o.Albedo = float4(clamp(abs(IN.worldPos.xyz),0,1), 1.0f);
#endif
}
ENDCG
}
}
Does anyone have an idea why this happens? I would really appreciate it if anyone could point me into the right direction.
Answer by metzzzo · Feb 21, 2021 at 09:59 PM
Okay after some debugging I found out, that the issue was that for some reason the "#ifdef SHADER_API_D3D11" does not work inside the "void surf_shader(Input IN, inout SurfaceOutput o)" function. Anyone knows why this is the case? Why can I use this #ifdef inside the vertex shader, but not inside the surface shader?
Your answer
Follow this Question
Related Questions
Scene Color Node in Shader Graph not working with Unity's 2D Renderer and URP 5 Answers
Distortion Shader does not render transparent objects (Shader Graph) 2 Answers
How to get the value of a pixel of a noise node if it's greater than some threshold? 0 Answers