Skip to content

Instantly share code, notes, and snippets.

@jakep84
Created June 24, 2020 20:45
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save jakep84/8b0253b5949763efe8630726310f941e to your computer and use it in GitHub Desktop.
Save jakep84/8b0253b5949763efe8630726310f941e to your computer and use it in GitHub Desktop.
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using agora_gaming_rtc;
using UnityEngine.UI;
using System.Globalization;
using System.Runtime.InteropServices;
using System;
using OculusSampleFramework;
using UnityEditor;
public class VirtualCamManager : MonoBehaviour
{
class VideoUser
{
public uint uid;
public GameObject go;
}
Texture2D mTexture;
Rect mRect;
[SerializeField]
private string appId = "Your_AppID";
[SerializeField]
private string channelName = "agora";
public bool send360;
public bool disableAudio;
public IRtcEngine mRtcEngine;
int i = 100;
//-- Virtual cam
public GameObject virtualCamPrefab;
private DistanceGrabbable grabbable;
private GameObject remoteVideo;
private bool inChannel;
public GameObject remoteVideoPrefab;
public Transform remoteVideoSpawnPoint;
private Transform currentSpawnPoint;
public float remoteVideoHorizontalSpace = 0.2f;
private List<VideoUser> userList = new List<VideoUser>();
//---perspectiveCam
[SerializeField]
private Camera perspectiveCam;
[SerializeField]
private RenderTexture perspectiveRT;
//---360Cam
[SerializeField]
private Camera _360Cam;
[SerializeField]
private RenderTexture _360RT;
[SerializeField]
private RenderTexture equirectRT;
private int width;
private int height;
public static ChannelSwitchManager Instance { get; private set; }
private void Awake()
{
if(Instance == null)
{
Instance = this;
}
else
{
Destroy(gameObject);
}
}
void Start()
{
currentSpawnPoint = remoteVideoSpawnPoint;
Debug.Log("ScreenShare Activated");
mRtcEngine = IRtcEngine.getEngine(appId);
// enable log
mRtcEngine.SetLogFilter(LOG_FILTER.DEBUG | LOG_FILTER.INFO | LOG_FILTER.WARNING | LOG_FILTER.ERROR | LOG_FILTER.CRITICAL);
// set callbacks (optional)
mRtcEngine.SetParameters("{\"rtc.log_filter\": 65535}");
//Configure the external video source
mRtcEngine.SetExternalVideoSource(true, false);
// Start video mode
mRtcEngine.EnableVideo();
mRtcEngine.OnJoinChannelSuccess += Agora_OnJoinChannelSuccess;
mRtcEngine.OnUserJoined += Agora_OnUserJoined;
mRtcEngine.OnUserOffline += Agora_OnUserOffline;
//mRtcEngine.OnLeaveChannel += Agora_OnLeaveChannel;
JoinChannel(channelName);
//Create a rectangle width and height of the screen
if (!send360)
{
width = perspectiveRT.width;
height = perspectiveRT.height;
}
else
{
width = 512;
height = 256;
}
mRect = new Rect(0, 0, width, height);
Debug.Log("Before");
//Create a texture the size of the rectangle you just created
//mTexture = new Texture2D((int)mRect.width, (int)mRect.height, TextureFormat.RGBA32, false);
mTexture = new Texture2D((int)mRect.width, (int)mRect.height, TextureFormat.BGRA32, false);
Debug.Log("After");
grabbable = virtualCamPrefab.GetComponent<DistanceGrabbable>();
#if UNITY_EDITOR
EditorApplication.quitting += Quit;
#endif
}
private void Quit()
{
}
private void JoinChannel(string channelName)
{
// allow camera output callback
mRtcEngine.EnableVideoObserver();
// join channel
mRtcEngine.JoinChannel(channelName, null, 0);
}
private void Agora_OnJoinChannelSuccess(string channelName, uint uid, int elapsed)
{
inChannel = true;
}
private void Agora_OnUserOffline(uint uid, USER_OFFLINE_REASON reason)
{
var user = userList.Find((u) => u.uid == uid);
var index = userList.IndexOf(user);
userList.Remove(user);
// reposition
var position = currentSpawnPoint.position;
for (var i = index; i < userList.Count; i++)
{
var go = userList[i].go;
if (i > 5)
{
position.y -= 1;
}
position.x -= (i % 6) * (1 + remoteVideoHorizontalSpace);
go.transform.position = position;
}
Destroy(user.go);
}
private void Agora_OnUserJoined(uint uid, int elapsed)
{
var position = currentSpawnPoint.position;
if(userList.Count > 6)
{
position.y += 2;
}
position.x -= (userList.Count % 6) * (1 + remoteVideoHorizontalSpace);
remoteVideo = Instantiate(remoteVideoPrefab, position, Quaternion.Euler(180, 0, 0));
var vs = remoteVideo.GetComponent<VideoSurface>();
vs.SetForUser(uid);
vs.SetEnable(true);
vs.EnableFilpTextureApply(true, false);
userList.Add(new VideoUser() { uid = uid, go = remoteVideo });
}
public void SwitchChannel(string channelName = "", Transform spawn = null)
{
ClearUsers();
if (string.IsNullOrEmpty(channelName))
channelName = this.channelName;
currentSpawnPoint = spawn ?? remoteVideoSpawnPoint;
mRtcEngine.SwitchChannel(null, channelName);
}
void ClearUsers()
{
userList.ForEach(u => { Destroy(u.go); });
userList.Clear();
}
void Update()
{
if (disableAudio)
{
mRtcEngine.DisableAudio();
}
else
{
mRtcEngine.EnableAudio();
}
if(OVRInput.GetDown(OVRInput.Button.Two) && inChannel)
{
inChannel = false;
mRtcEngine.LeaveChannel();
mRtcEngine.DisableVideoObserver();
ClearUsers();
IRtcEngine.Destroy();
}
if (grabbable)
{
virtualCamPrefab.GetComponent<CapsuleCollider>().isTrigger = grabbable.isGrabbed;
}
//Start the screenshare Coroutine
StartCoroutine(shareScreen());
}
public void toggle360()
{
send360 = !send360;
}
private void FlipTextureVertically(Texture2D original)
{
var originalPixels = original.GetPixels();
Color[] newPixels = new Color[originalPixels.Length];
int width = original.width;
int rows = original.height;
for (int x = 0; x < width; x++)
{
for (int y = 0; y < rows; y++)
{
newPixels[x + y * width] = originalPixels[x + (rows - y - 1) * width];
}
}
original.SetPixels(newPixels);
}
//Screen Share
IEnumerator shareScreen()
{
yield return new WaitForEndOfFrame();
if (!send360)
{
//var cam = perspectiveCam.GetComponent<Camera>();
//cam.enabled = true;
perspectiveCam.targetTexture = perspectiveRT;
perspectiveCam.Render();
RenderTexture.active = perspectiveRT;
//cam.enabled = false;
}
else
{
_360Cam.RenderToCubemap(_360RT);
_360RT.ConvertToEquirect(equirectRT);
//_360Cam.Render();
//RenderTexture.active = _360RT;
}
//Read the Pixels inside the Rectangle
mTexture.ReadPixels(mRect, 0, 0);
//Apply the Pixels read from the rectangle to the texture
mTexture.Apply();
//Flip texture vertically
FlipTextureVertically(mTexture);
// Get the Raw Texture data from the the from the texture and apply it to an array of bytes
byte[] bytes = mTexture.GetRawTextureData();
// Make enough space for the bytes array
int size = Marshal.SizeOf(bytes[0]) * bytes.Length;
// Check to see if there is an engine instance already created
IRtcEngine rtc = IRtcEngine.QueryEngine();
//if the engine is present
if (rtc != null)
{
//Create a new external video frame
ExternalVideoFrame externalVideoFrame = new ExternalVideoFrame();
//Set the buffer type of the video frame
externalVideoFrame.type = ExternalVideoFrame.VIDEO_BUFFER_TYPE.VIDEO_BUFFER_RAW_DATA;
// Set the video pixel format
externalVideoFrame.format = ExternalVideoFrame.VIDEO_PIXEL_FORMAT.VIDEO_PIXEL_BGRA;
//apply raw data you are pulling from the rectangle you created earlier to the video frame
externalVideoFrame.buffer = bytes;
//Set the width of the video frame (in pixels)
externalVideoFrame.stride = (int)mRect.width;
//Set the height of the video frame
externalVideoFrame.height = (int)mRect.height;
//Remove pixels from the sides of the frame
externalVideoFrame.cropLeft = 10;
externalVideoFrame.cropTop = 10;
externalVideoFrame.cropRight = 10;
externalVideoFrame.cropBottom = 10;
//Rotate the video frame (0, 90, 180, or 270)
externalVideoFrame.rotation = 0;
// increment i with the video timestamp
externalVideoFrame.timestamp = i++;
//Push the external video frame with the frame we just created
int a = rtc.PushVideoFrame(externalVideoFrame);
Debug.Log(" pushVideoFrame = " + a);
}
RenderTexture.active = null;
perspectiveCam.targetTexture = null;
_360Cam.targetTexture = null;
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment