Optimize RPC streamed video as byte array (frame by frame jpg)
I'm working on networking a webcam over Photon so that I can send the camera view of one player over the network and project in on the screen in the environment of other players on the network. So far, the process is working and the image is appearing over the network, but the frame rate slowly decreases until eventually it crashes the network. I've done a lot of reading and it seems pretty obvious that this is because of how large the byte array being sent is and how slow the tex.LoadImage() and tex.EncodeToJPG functions are. Generally the byte array length is around 90,000.
So, I need the best way to optimize this. I've considered A) somehow using OpenGL to pass the byte array to a shader and doing the conversion in the shader, which would bypass the tex.LoadImage() function. Though, from what I've read this seems unlikely. I've done some shader programming but would need more information on the tex.EncodeToJPG() byte format
or B) and this is the more likely way I think find a way to write something similar to the algorithm mpeg uses, to only send frames when there's a change in the image frame, possibly even send only that changed data
C) whatever you can suggest
this is the code I'm using to send the byte array over the RPC
sing UnityEngine; using System.Collections; using System.Collections.Generic; using System.Runtime.InteropServices; using System; using System.IO; //using UnityEditor;
public class CamCaptureStream : Photon.MonoBehaviour {
public Renderer renderer;
public WebCamTexture webCamTexture;
public string deviceName;
Color32[] data;
public Texture2D tex;
byte[] newbytes;
byte[] bytesToSend;
public Texture2D MyTexture;
int count;
//static BytesCompression compress;
void Start(){
deviceName = WebCamTexture.devices[0].name;
webCamTexture = new WebCamTexture(deviceName, 20, 20, 20);
GetComponent<VoiceChat>().enabled = true;
GetComponent<VoiceChat>().ready = true;
GetComponent<AudioSource>().enabled = true;
GetComponent<AudioListener>().enabled = true;
count = 0;
if(photonView.isMine){
webCamTexture.Play();
}
}
void Update(){
if(webCamTexture.width < 100){
Debug.Log("Waiting for Camera to Load");
return;
}
else{
data = new Color32[webCamTexture.width*webCamTexture.height];
Debug.Log(data.Length);
MyTexture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGB24, false);
tex = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGB24, false);
}
if(photonView.isMine){
StartCoroutine(GetPixels());
//UDPSend.sendString(bytesToSend);
//Debug.Log("Fails after this: Probably too much bytes being sent");
}
else{
tex.LoadImage(bytesToSend);
renderer.material.mainTexture = tex;
count = 0;
}
//renderer.material.SetTexture("tex", tex);
//Destroy(tex);
}
[PunRPC]
void CamerStream(byte[] data){
tex.LoadImage(data);
renderer.material.mainTexture = tex;
tex.Apply();
}
private static byte[] Color32ArrayToByteArray(Color32[] colors)
{
byte[] bytes = new byte[colors.Length*4];
for(int i = 0; i < bytes.Length/4; i+=4){
bytes[i] = colors[i].r;
bytes[i+1] = colors[i].g;
bytes[i+2] = colors[i].b;
bytes[i+3] = colors[i].a;
}
return bytes;
}
IEnumerator GetPixels(){
MyTexture.SetPixels32(webCamTexture.GetPixels32(data));
//MyTexture.Compress(false);
//bytesToSend = MyTexture.GetRawTextureData();
bytesToSend = MyTexture.EncodeToJPG();
yield return bytesToSend;
print("Bytes to Send Size: " + bytesToSend.Length);
photonView.RPC("CamerStream", PhotonTargets.All, bytesToSend);
}
}
Your answer
