- Home /
Capture rendered scene to PNG with background transparent
I am trying to capture a rendered object to a PNG with the background transparent. I have no problem capturing the image in general, and currently I render it onto a green background and cutout the object using a custom color key algorithm and some basic logic so that the object can still contain the color green without issue. The process I use currently works but it is slower than I would like, takes about 200 MS for the scene to render, be captured, and then the background set to transparent.
I am looking for a way to capture the scene directly with a transparent background, I use the GetPixels() function on the texture currently. Can anyone think of another way to get the rendered object into a PNG a preserve background transparency?
I'm also interested in how to do this! (my topic here: http://forum.unity3d.com/viewtopic.php?t=48185)
Were you able to find a way to directly capture the alpha channel? I've been searching around and can't find anything that works. I've tried rendering to a render texture and reading from it with ReadPixels and I've tried reading directly from the main screen, but it definitely isn't reading any alpha values. $$anonymous$$y camera is set to have a clear color of (0,0,0,0) and when I use ReadPixels into a texture and use EncodeToPNG and File.WriteAllBytes to save it out I get a png with an alpha channel set to 1 with a black background. Not sure if it matters, but I'm on a $$anonymous$$ac book pro retina display.
@jallwine Hi, see my answer at the bottom. I was able to do it to create http://videohive.net/item/spiky-red-virus-balls/4719773?WT.ac=search_thumb&WT.seg_1=search_thumb&WT.z_author=srmojuze with alpha channel. Cheers...!
Answer by idbrii · Mar 15, 2019 at 06:42 PM
AnimationToPNG and TransparentBackgroundScreenshotRecorder are two solutions that take two captures with different background colours (black and white) to determine which pixels should be alpha=0. At first, I thought they didn't work for me, but after some hacking around I got them working.
In a blank Unity project, it's also possible to just clearFlags = SolidColor and backgroundColor = Color.clear, but that doesn't work in my project. I don't know why.
Here's my simplified version of AnimationToPNG that just takes a single screenshot and demonstrates both the simple clear color method and black/white method:
// The MIT License (MIT)
// Copyright (c) 2014 Brad Nelson and Play-Em Inc.
// CaptureScreenshot is based on Brad Nelson's MIT-licensed AnimationToPng: http://wiki.unity3d.com/index.php/AnimationToPNG
// AnimationToPng is based on Twinfox and bitbutter's Render Particle to Animated Texture Scripts.
using UnityEngine;
using System.IO;
using System;
public class TriggerCapture : MonoBehaviour {
public bool UseSimple = false;
void OnEnable() {
var cam = Camera.main;
// Set a mask to only draw only elements in this layer. e.g., capture your player with a transparent background.
cam.cullingMask = LayerMask.GetMask("Player");
string filename = string.Format("Screenshots/capture_{0}.png", DateTime.Now.ToString("yyyy-MM-dd_HH-mm-ss-fff"));
int width = Screen.width;
int height = Screen.height;
if (UseSimple) {
CaptureScreenshot.SimpleCaptureTransparentScreenshot(cam, width, height, filename);
}
else {
CaptureScreenshot.CaptureTransparentScreenshot(cam, width, height, filename);
}
}
}
public static class CaptureScreenshot {
public static void CaptureTransparentScreenshot(Camera cam, int width, int height, string screengrabfile_path) {
// This is slower, but seems more reliable.
var bak_cam_targetTexture = cam.targetTexture;
var bak_cam_clearFlags = cam.clearFlags;
var bak_RenderTexture_active = RenderTexture.active;
var tex_white = new Texture2D(width, height, TextureFormat.ARGB32, false);
var tex_black = new Texture2D(width, height, TextureFormat.ARGB32, false);
var tex_transparent = new Texture2D(width, height, TextureFormat.ARGB32, false);
// Must use 24-bit depth buffer to be able to fill background.
var render_texture = RenderTexture.GetTemporary(width, height, 24, RenderTextureFormat.ARGB32);
var grab_area = new Rect(0, 0, width, height);
RenderTexture.active = render_texture;
cam.targetTexture = render_texture;
cam.clearFlags = CameraClearFlags.SolidColor;
cam.backgroundColor = Color.black;
cam.Render();
tex_black.ReadPixels(grab_area, 0, 0);
tex_black.Apply();
cam.backgroundColor = Color.white;
cam.Render();
tex_white.ReadPixels(grab_area, 0, 0);
tex_white.Apply();
// Create Alpha from the difference between black and white camera renders
for (int y = 0; y < tex_transparent.height; ++y) {
for (int x = 0; x < tex_transparent.width; ++x) {
float alpha = tex_white.GetPixel(x, y).r - tex_black.GetPixel(x, y).r;
alpha = 1.0f - alpha;
Color color;
if (alpha == 0) {
color = Color.clear;
}
else {
color = tex_black.GetPixel(x, y) / alpha;
}
color.a = alpha;
tex_transparent.SetPixel(x, y, color);
}
}
// Encode the resulting output texture to a byte array then write to the file
byte[] pngShot = ImageConversion.EncodeToPNG(tex_transparent);
File.WriteAllBytes(screengrabfile_path, pngShot);
cam.clearFlags = bak_cam_clearFlags;
cam.targetTexture = bak_cam_targetTexture;
RenderTexture.active = bak_RenderTexture_active;
RenderTexture.ReleaseTemporary(render_texture);
Texture2D.Destroy(tex_black);
Texture2D.Destroy(tex_white);
Texture2D.Destroy(tex_transparent);
}
public static void SimpleCaptureTransparentScreenshot(Camera cam, int width, int height, string screengrabfile_path) {
// Depending on your render pipeline, this may not work.
var bak_cam_targetTexture = cam.targetTexture;
var bak_cam_clearFlags = cam.clearFlags;
var bak_RenderTexture_active = RenderTexture.active;
var tex_transparent = new Texture2D(width, height, TextureFormat.ARGB32, false);
// Must use 24-bit depth buffer to be able to fill background.
var render_texture = RenderTexture.GetTemporary(width, height, 24, RenderTextureFormat.ARGB32);
var grab_area = new Rect(0, 0, width, height);
RenderTexture.active = render_texture;
cam.targetTexture = render_texture;
cam.clearFlags = CameraClearFlags.SolidColor;
// Simple: use a clear background
cam.backgroundColor = Color.clear;
cam.Render();
tex_transparent.ReadPixels(grab_area, 0, 0);
tex_transparent.Apply();
// Encode the resulting output texture to a byte array then write to the file
byte[] pngShot = ImageConversion.EncodeToPNG(tex_transparent);
File.WriteAllBytes(screengrabfile_path, pngShot);
cam.clearFlags = bak_cam_clearFlags;
cam.targetTexture = bak_cam_targetTexture;
RenderTexture.active = bak_RenderTexture_active;
RenderTexture.ReleaseTemporary(render_texture);
Texture2D.Destroy(tex_transparent);
}
}
This comment solved my problem on transparent camera rendering. Black + White background camera with diff calculation is pretty smart.
Answer by MaDDoX · Aug 30, 2015 at 01:23 PM
Set capture camera’s culling mask property to “Depth Only”, in your render to texture method (I use Unity Wiki’s async method) make sure to set the texture to 32 bits like so:
RenderTexture rt = new RenderTexture(Screen.width, Screen.height, 32);
Texture2D screenShot = new Texture2D(Screen.width, Screen.height, TextureFormat.ARGB32, false);
Works flawlessly :)
RenderTexture constructor's third argument is depth which doesn't support 32:
depth: Number of bits in depth buffer (0, 16 or 24). Note that only 24 bit depth has stencil buffer.
(Although it seems that 24 and 32 have the same behavior.)
Answer by Erik Harg · Apr 03, 2010 at 12:40 PM
Using Texture2D.ReadPixels(), you should be able to get the current render texture (or view) with the alpha component intact, into you Texture2D.
See: Script Reference - Texture2D - ReadPixels
We do the following, and get PNGs with alpha channel (variable screenshot is set by a key combo):
if(screenshot) { int ssn = sshotnum++;
Texture2D sshot = new Texture2D(destination.width, destination.height);
sshot.ReadPixels(new Rect(0, 0, destination.width, destination.height), 0, 0);
sshot.Apply();
byte[] pngShot = sshot.EncodeToPNG();
Destroy(sshot);
File.WriteAllBytes(Application.dataPath + "/../screenshot_" + ssn.ToString() + "_" + Random.Range(0, 1024).ToString() + ".png", pngShot);
screenshot = false;
}
Disclaimer/YMMV: I have only used this on scenes with non-transparent background, so I can't guarantee it does indeed give you the right alpha info, though there is no reason it shouldn't.
This is the method I currently use but even when clearing the screen with a color that has 0 alpha the background is NOT transparent.
Didn't you say you used GetPixels() ins$$anonymous$$d of ReadPixels()? The documentation for GetPixels() says nothing about alpha, while ReadPixels' doc. explicitly says it should return ARGB32 or RGB24, which indicates support for alpha channel.
We just discussed this, and it may be that the standard framebuffer doesn't have an alpha channel you can read. You could try rendering to a render texture, and ReadPixels from that. Also, if you have fancy shaders using special alpha blending, you should probably read this: http://forum.unity3d.com/viewtopic.php?t=32089
ReadPixels is good, in relation to applying a "global"(?) shader that allows ReadPixels ARGB32 to "detect" the transparency accordingly.
Answer by andrew_pearce · Nov 15, 2020 at 07:42 AM
I was searching for a while and it's really easy to solve. I believe everyone use the same code to get a texture from camera. So there are two important things to remember:
1) Make sure that Camera's "Clear Flag" is "Skybox" or "Solid Color". If "Solid Color" is selected, make sure that it's alpha channel is set to 0
2) When you create texture, use TextureFormat.ARGB32 instead of TextureFormat.ARGB16
Texture2D scrTexture = new Texture2D(width, height, TextureFormat.ARGB32, false);
RenderTexture scrRenderTexture = new RenderTexture(scrTexture.width, scrTexture.height, 24);
RenderTexture camRenderTexture = camera.targetTexture;
camera.targetTexture = scrRenderTexture;
camera.Render();
camera.targetTexture = camRenderTexture;
RenderTexture.active = scrRenderTexture;
scrTexture.ReadPixels(new Rect(0, 0, scrTexture.width, scrTexture.height), 0, 0);
scrTexture.Apply();
I hope my answer will help someone
I really wish it was this easy for me. I've tried off and on for a year to get transparency into RenderTextures but as much as I've heard "it's possible, just set the texture format to allow for transparency" I have tried every permutation and setting on the cameras to get it to retain the transparency. I use 2020 LTS and URP.
So I wish this solution worked, but it just doesn't for me. If anyone ever has a definitive answer on whether "RenderTextures allow transparency" I'd love to hear it, with proof of course. I've heard both yes and no but I've never been able to get it to work, so I'm in the 'no' camp.
Answer by srmojuze · May 11, 2013 at 06:32 PM
http://forum.unity3d.com/viewtopic.php?t=48185 is a good thread to start. What I learnt is this. You have two cameras. One does ReadPixels of the "normal" scene. The 2nd camera does ReadPixels using the "XRay" shader in the thread mentioned.
Then, what I do is take the color (RGB) from the first camera and then take the alpha of the 2nd camera, combining it so that it is ARGB32.
You have to use RendererTexture so that the cameras can render separately.
I'm sorry I can't provide more information I have been up for 12 hours and achieved a lot but have to wind down now.
Here's the code dump. A lot of parts have been commented in or out because I was doing other stuff with it but hope it gives you an insight into how it can be done.
function renderToScreenTempFunction(theFrame : int)
{
//Wait for framebuffer to be rendered
//NEEDED?? yield WaitForEndOfFrame();
// Create a texture the size of the screen, ARGB32 format
//FREEZE THE SCENE!
Time.timeScale = 0;
//Create a texture of main camera
// var mainCamTexture = new Texture2D(1280,720,TextureFormat.RGB24,false);
var mainCam : Camera = Global_ApplicationData.getTheEnvironment().getDefaultCamera().camera;
var theAlphaCam : Camera = GameObject.Find("PPCamera").camera;
//Initialize and render
var mainCamRenderTexture : RenderTexture = new RenderTexture(2048,1152,24);
var alphaCamRenderTexture : RenderTexture = new RenderTexture(2048,1152,24);
mainCam.targetTexture = mainCamRenderTexture;
mainCam.Render();
RenderTexture.active = mainCamRenderTexture;
//var width = Screen.width;
//var height = Screen.height;
var tex1 = new Texture2D (2048, 1152, TextureFormat.RGB24, false); //OR ARGB32 AS WORKS PREVIOUSLY
// Read screen contents into the texture
tex1.ReadPixels (Rect(0, 0, 2048, 1152), 0, 0);
tex1.Apply ();
// Encode texture into PNG
//var bytes = tex.EncodeToPNG();
//Now do it for Alpha Camera
theAlphaCam.targetTexture = alphaCamRenderTexture;
theAlphaCam.Render();
RenderTexture.active = alphaCamRenderTexture;
var tex2 = new Texture2D (2048, 1152, TextureFormat.ARGB32, false);
tex2.ReadPixels (Rect(0, 0, 2048, 1152), 0, 0);
tex2.Apply ();
for ( var x: int = 0; x < tex2.width; x++)
{
for ( var y: int = 0; y < tex2.height; y++)
{
var color:Color = tex2.GetPixel(x, y);
//let's get tex1 color
var color2:Color = tex1.GetPixel(x,y);
var alpha:float = color.a;
if (alpha != 0)
{
color /= alpha;
color.a = alpha;
//color2.a = alpha;
color2.a = alpha+0.05; //attempt transparency reduction
tex2.SetPixel(x, y, color2);
}
}
}
//attempt merge
var cols1 = tex1.GetPixels();
var cols2 = tex2.GetPixels();
for(var i = 0; i < cols1.Length; ++i)
{
cols1[i] += cols2[i];
Debug.Log(cols1[i]);
}
tex1.SetPixels(cols1);
tex1.Apply();
// Encode texture into PNG
var bytes2 = tex2.EncodeToPNG();
// For testing purposes, also write to a file in the project folder
File.WriteAllBytes("D:/SavedAlpha_"+theFrame+".png", bytes2);
Destroy (tex1);
Destroy (tex2);
// For testing purposes, also write to a file in the project folder
//File.WriteAllBytes("D:/SavedScreen_"+theFrame+".png", bytes);
//Clean Up
theAlphaCam.targetTexture = null;
RenderTexture.active = null;
DestroyImmediate(alphaCamRenderTexture);
//Clean Up
mainCam.targetTexture = null;
RenderTexture.active = null;
DestroyImmediate(mainCamRenderTexture);
//UNFREEZE THE SCENE!
Time.timeScale = 0.05;
//Time.timeScale = 1.0;
Time.fixedDeltaTime = 0.02 * Time.timeScale;
}