- Home /
Stop "offscreen" sprites from being culled
I've made a shader that makes my game world curved like you're going round a tower.
Unfortunately, the sprites are getting culled too early. So, e.g. if on a rectangular plane the sprite wouldn't exist, it gets culled, even though it SHOULD be visible once it's bent back in 3D. How can I get my camera's "effective" FOV be larger than its actual FOV? Or how can I write my shader to account for this?
Here's my shader:
Shader "Custom/CurvedWorld" {
Properties
{
[PerRendererData] _MainTex ("Sprite Texture", 2D) = "white" {}
_Color ("Tint", Color) = (1,1,1,1)
[MaterialToggle] PixelSnap ("Pixel snap", Float) = 0
// Degree of curvature
[PerRendererData] _Curvature ("Curvature", Float) = 0.01
}
SubShader {
Tags
{
"Queue"="Transparent"
"IgnoreProjector"="True"
"RenderType"="Transparent"
"PreviewType"="Plane"
"CanUseSpriteAtlas"="True"
}
//LOD 200
Cull Off
Lighting Off
ZWrite Off
Fog { Mode Off }
Blend SrcAlpha OneMinusSrcAlpha
CGPROGRAM
#pragma surface surf Lambert alpha vertex:vert
#pragma multi_compile DUMMY PIXELSNAP_ON
sampler2D _MainTex;
fixed4 _Color;
float _Curvature;
// Basic input structure to the shader function
// requires only a single set of UV texture mapping coordinates
struct Input {
float2 uv_MainTex;
fixed4 _Color;
};
// This is where the curvature is applied
void vert( inout appdata_full v)
{
#if defined(PIXELSNAP_ON) && !defined(SHADER_API_FLASH)
v.vertex = UnityPixelSnap (v.vertex);
#endif
v.normal = float3(0,0,-1);
UNITY_INITIALIZE_OUTPUT(Input, o);
//o.color = _Color;
// Transform the vertex coordinates from model space into world space
float4 vv = mul( _Object2World, v.vertex );
// Now adjust the coordinates to be relative to the camera position
vv.xyz -= _WorldSpaceCameraPos.xyz;
// Reduce the y coordinate (i.e. lower the "height") of each vertex based
// on the square of the distance from the camera in the z axis, multiplied
// by the chosen curvature factor
vv = float4( 0.0f, 0.0f, (vv.x * vv.x) * _Curvature, 0.0f );
// Now apply the offset back to the vertices in model space
v.vertex += mul(_World2Object, vv);
//vv.xyz += _WorldSpaceCameraPos.xyz;
}
// This is just a default surface shader
void surf (Input IN, inout SurfaceOutput o) {
half4 c = tex2D (_MainTex, IN.uv_MainTex);
//fixed4 c = tex2D(_MainTex, IN.uv_MainTex) * IN.color;
o.Albedo = c.rgb;
o.Alpha = c.a;
}
ENDCG
}
Fallback "Transparent/VertexLit"
}
I'm also having another problem with my shader, in that.. I want them to be coloured, but if I uncomment the line "o.color = _Color", it claims _Color doesn't exist. I copied that out of the unity sprites-diffuse shader too, likewise I can't uncomment "fixed4 c = tex2D(_MainTex, IN.uv_MainTex) * IN.color;". Doing either of those things breaks it and makes everything pink.
(It claims Material doesn't have a color property '_Color', Material doesn't have a float or range property '_Curvature' and Material doesn't have a float or range property 'PixelSnap')
But yeah, if anyone could point me in the right direction on either of these things I'd be rather grateful.
The only solution I can think of, is either using sprites with a huge transparent area around them so they don't get culled (dumb), or something to do with.. repeatedly changing the camera's field of view to something high, and then changing it again to something low on LateUpdate() (or vice versa?) to change what it culls and what it loads? I really don't know though. Or can that be fixed in the shader? Like, can I force it to render things that are off screen?
Cheers.