实现思路:
使用udp协议将渲染图像数据发送给另一个设备。
实现重点:
① 压缩图像尺寸后再发送(源尺寸图像的数据太大)
② udp分包发送(udp单个包最多64k,需要分成小包发送)
实现代码:
投屏数据 发起端:
挂载到一个Camera上
using System;
using System.Net;
using System.Net.Sockets;
using UnityEngine;
public class Sender : MonoBehaviour
{
public string tarIP = "127.0.0.1";//目标主机IP
public int tarPort = 8888;//目标主机端口
private IPEndPoint tarEP;
private UdpClient udpClient;
[Header("每秒发送多少张图像(约大约流畅)")]
[Range(10, 60)]
public int sendFPS = 25;
private float delTime = 0f;
[Header("画质(越大约清晰)")]
[Range(0.1f, 0.75f)]
public float huaZhi = 0.25f;
private int perByteCount = 1024;//单个包字节数
private RenderTexture renderTexture;
void Start()
{
renderTexture = new RenderTexture(Screen.width, Screen.height, 24);
GetComponent<Camera>().targetTexture = renderTexture;
tarEP = new IPEndPoint(IPAddress.Parse(tarIP), tarPort);
udpClient = new UdpClient();
perByteCount--;//留1字节表示包尾
}
private void Update()
{
if ((delTime += Time.deltaTime) >= 1f / sendFPS)//每积累一定时间发送一次
{
delTime = 0f;
//将渲染贴图转换为字节序列
byte[] bytes = ScaleTexture(renderTexture, huaZhi);//发送的图像
int sendCount = Mathf.CeilToInt(bytes.Length / (float)perByteCount);//分割发送的总次数
int minCount = bytes.Length % perByteCount;//最小发送字节数
for (int i = 0; i < sendCount; i++)
{
int c = (i == sendCount - 1) ? minCount : perByteCount;
byte[] bytes1 = new byte[c + 1];
Array.Copy(bytes, i * perByteCount, bytes1, 0, c);
bytes1[bytes1.Length - 1] = (byte)(i == sendCount - 1 ? 1 : 0);
udpClient.Send(bytes1, bytes1.Length, tarEP);
}
}
}
//根据一张渲染贴图、缩放倍数
//生成缩略的图
private static byte[] ScaleTexture(RenderTexture source, float scale)
{
// 计算缩放后的宽高
int width = (int)(source.width * scale);
int height = (int)(source.height * scale);
// 创建一个新的RenderTexture,用于存储缩放后的结果
RenderTexture scaled = RenderTexture.GetTemporary(width, height);
// 激活新的RenderTexture
RenderTexture.active = scaled;
// 将源RenderTexture绘制到新的RenderTexture上,实现缩放
Graphics.Blit(source, scaled);
// 创建一个新的Texture2D,用于存储最终结果
Texture2D result = new Texture2D(width, height);
// 读取RenderTexture的像素数据到Texture2D中
result.ReadPixels(new Rect(0, 0, width, height), 0, 0);
//result.Apply();
RenderTexture.active = null;
// 释放临时的RenderTexture
RenderTexture.ReleaseTemporary(scaled);
// 返回缩放后的Texture2D
return result.EncodeToJPG();
}
private void OnDestroy()
{
udpClient.Close();
}
}
投屏数据 接收端:
挂载到一个RawImage上
using System;
using System.Collections.Generic;
using System.Net.Sockets;
using UnityEngine;
using UnityEngine.UI;
public class Receiver : MonoBehaviour
{
private UdpClient udpClient;
private RawImage rawImage;
private Texture2D t2d;
private List<byte> bytes = new List<byte>();
private Queue<byte[]> videoImaDatas = new Queue<byte[]>();
void Start()
{
rawImage = GetComponent<RawImage>();
t2d = new Texture2D(2, 2);
rawImage.texture = t2d;
udpClient = new UdpClient(8888);
UdpRece();
}
void Update()
{
if (videoImaDatas.TryDequeue(out byte[] d))
{
bytes.AddRange(new ArraySegment<byte>(d, 0, d.Length - 1));
if (d[d.Length - 1] == 1)
{
t2d.LoadImage(bytes.ToArray());
bytes.Clear();
}
}
}
public async void UdpRece()
{
while (true)
{
byte[] buffer = (await udpClient.ReceiveAsync()).Buffer;
videoImaDatas.Enqueue(buffer);
}
}
}