- Home /
can't render shader on iOS althoungh it works in osx and simulator
i have a custom shader that i apply to the camera so get a nightvision effect. There is something weird going on with unity3d. i have encountered the problem before but can't remember how to fix it. I am using unity3d 3.5.
I run my game targeting mac os x and the shader works just fine. When i target iOS and export a build i get a black screen (black screen because the shader is for the camera view, i can still see GUITextures). The thing is that the shader isn't using anything that iOS can't handle and due to my previous experiences (when i fixed the problem) i'm sure it's something in the settings of unity3d.
here is my shader:
Shader "Custom/nightvisionShader" {
Properties {
_MainTex ("Texture ", 2D) = "white" {}
_noiseTex ("noiseTex ", 2D) = "white" {}
_maskTex ("mask tex",2D) = "white" {}
_elapsedTime("elapsed time",Float) = 0 // seconds
_luminanceThreshold("luminance threshold",Float) = .2 // 0.2
_colorAmplification("color amplification",Float) = 4 // 4.0
_effectCoverage("effect coverage",Float) = .5 // 0.5
}
SubShader {
Tags { "RenderType"="Transparent" }
//LOD 200
CGPROGRAM
#pragma surface surf Lambert
sampler2D _MainTex;
sampler2D _noiseTex;
sampler2D _maskTex;
float _elapsedTime;
float _luminanceThreshold;
float _colorAmplification;
float _effectCoverage;
struct Input {
float2 uv_MainTex;
float2 uv_noiseTex;
float2 uv_maskTex;
};
void surf (Input IN, inout SurfaceOutput o)
{
half4 finalColor;
if (IN.uv_MainTex.x < _effectCoverage)
{
float2 uv;
uv.x = 0.4*sin(_elapsedTime*50.0);
uv.y = 0.4*cos(_elapsedTime*50.0);
float m = tex2D(_maskTex, IN.uv_maskTex).r;
half3 n = tex2D(_noiseTex,(IN.uv_noiseTex*3.5) + uv).rgb;
half3 c = tex2D(_MainTex, IN.uv_MainTex + (n.xy*0.005)).rgb;
float lum = dot(float3(0.30, 0.59, 0.11), c);
if (lum < _luminanceThreshold)
c *= _colorAmplification;
half3 visionColor = float3(0.1, 0.95, 0.2);
finalColor.rgb = (c + (n*0.2)) * visionColor * m;
o.Albedo = finalColor;
}
else
{
o.Albedo = tex2D(_MainTex, IN.uv_MainTex);
// o.Emission = half4(.5,.5,.5,1);
}
}
ENDCG
}
FallBack "Diffuse"
}
and the code that renders the material to which the shader is attached:
void OnRenderImage (RenderTexture source, RenderTexture destination)
{
//overlayMaterial.SetTexture ("_MainTex", source);
//overlayMaterial.SetTexture ("_noiseTex", Resources.Load("nightvision/noise_tex6") as Texture2D);
//overlayMaterial.SetTexture ("_maskTex", Resources.Load("nightvision/binoculars_mask") as Texture2D);
overlayMaterial.SetFloat ("_elapsedTime", Time.time);
Graphics.Blit (source, destination, overlayMaterial, -1);
}
Again, the code works well in simulator and os x(even in the web player).
What am i setting wrong?
Your answer
Follow this Question
Related Questions
How to use Camera.RenderWithShader() to modify rendered image? 0 Answers
Unlit "diffuse" shader 2 Answers
angrybot water shader lagging problem 0 Answers
Clamp cone shape from shader alpha 0 Answers
Unity 3D fragment shader - distance from pixel to vertex? 0 Answers