By default, the Agora Unity SDK enables the default video module to capture and render video during real-time communications.
However, the default modules may not meet your development requirements, such as in the following scenarios:
This article describes how to use the Agora Unity SDK to customize the video source and renderer.
Before customizing the video source or renderer, ensure that you have implemented the basic real-time communication functions in your project. See Start a Video Call or Start Interactive Video Streaming for details.
Refer to the following steps to customize the video source and renderer in your project:
SetExternalVideoSource
before JoinChannelByKey
to enable an external video source.PushVideoFrame
to send the video back to the SDK.The following steps show how to implement screen sharing by customizing the video source or renderer.
Specify the external video source by calling SetExternalVideoSource
before JoinChannelByKey
.
mRtcEngine.SetExternalVideoSource(true, false);
Define the Texture2D, and use Texture2D to read screen pixels as an external video source.
mRect = new Rect(0, 0, Screen.width, Screen.height);
mTexture = new Texture2D((int)mRect.width, (int)mRect.height, TextureFormat.BGRA32, false);
mTexture.ReadPixels(mRect, 0, 0);
mTexture.Apply();
Call PushVideoFrame
to send the video source to the SDK and to implement screen sharing.
int a = rtc.PushVideoFrame(externalVideoFrame);
The following diagram shows the API call sequence for customizing the video source and renderer.
Refer to the following code to customize the video source in your project, for functions such as screen sharing.
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using agora_gaming_rtc;
using UnityEngine.UI;
using System.Globalization;
using System.Runtime.InteropServices;
using System;
public class ShareScreen : MonoBehaviour
{
Texture2D mTexture;
Rect mRect;
[SerializeField]
private string appId = "Your_AppID";
[SerializeField]
private string channelName = "agora";
public IRtcEngine mRtcEngine;
void Start()
{
Debug.Log("ScreenShare Activated");
mRtcEngine = IRtcEngine.GetEngine(appId);
// Sets the output log level of the SDK.
mRtcEngine.SetLogFilter(LOG_FILTER.DEBUG | LOG_FILTER.INFO | LOG_FILTER.WARNING | LOG_FILTER.ERROR | LOG_FILTER.CRITICAL);
// Enables the video module.
mRtcEngine.EnableVideo();
// Enables the video observer.
mRtcEngine.EnableVideoObserver();
// Configures the external video source.
mRtcEngine.SetExternalVideoSource(true, false);
// Joins a channel
mRtcEngine.JoinChannel(channelName, null, 0);
// Creates a rectangular region of the screen.
mRect = new Rect(0, 0, Screen.width, Screen.height);
// Creates a texture of the rectangle you create.
mTexture = new Texture2D((int)mRect.width, (int)mRect.height, TextureFormat.BGRA32, false);
}
void Update()
{
StartCoroutine(shareScreen());
}
// Starts to share the screen.
IEnumerator shareScreen()
{
yield return new WaitForEndOfFrame();
// Reads the Pixels of the rectangle you create.
mTexture.ReadPixels(mRect, 0, 0);
// Applies the Pixels read from the rectangle to the texture.
mTexture.Apply();
// Gets the Raw Texture data from the texture and apply it to an array of bytes.
byte[] bytes = mTexture.GetRawTextureData();
// Gives enough space for the bytes array.
int size = Marshal.SizeOf(bytes[0]) * bytes.Length;
// Checks whether the IRtcEngine instance is existed.
IRtcEngine rtc = IRtcEngine.QueryEngine();
if (rtc != null)
{
// Creates a new external video frame.
ExternalVideoFrame externalVideoFrame = new ExternalVideoFrame();
// Sets the buffer type of the video frame.
externalVideoFrame.type = ExternalVideoFrame.VIDEO_BUFFER_TYPE.VIDEO_BUFFER_RAW_DATA;
// Sets the format of the video pixel.
externalVideoFrame.format = ExternalVideoFrame.VIDEO_PIXEL_FORMAT.VIDEO_PIXEL_BGRA;
// Applies raw data.
externalVideoFrame.buffer = bytes;
// Sets the width (pixel) of the video frame.
externalVideoFrame.stride = (int)mRect.width;
// Sets the height (pixel) of the video frame.
externalVideoFrame.height = (int)mRect.height;
// Removes pixels from the sides of the frame
externalVideoFrame.cropLeft = 10;
externalVideoFrame.cropTop = 10;
externalVideoFrame.cropRight = 10;
externalVideoFrame.cropBottom = 10;
// Rotates the video frame (0, 90, 180, or 270)
externalVideoFrame.rotation = 180;
// Calculates the video timestamp in milliseconds according to the system time.
externalVideoFrame.timestamp = System.DateTime.Now.Ticks / 10000;
// Pushes the external video frame with the frame you create.
int a = rtc.PushVideoFrame(externalVideoFrame);
}
}
}
For details of the AR feature, see Video chat with Unity3D and ARFoundation.