IOS15.1/15.2 Safari 推流崩溃crash解决方案webrtc

IOS 15.1/15.2 Safari 和 Chrome 进行推流时,浏览器页面会 crash

此处使用 canvas 采集的方案规避了该问题,该规避方案性能开销相对会更大些。出于性能的考虑,建议在 iOS 15.1 采集 Profile 不高于 720p, 15fps。需要注意的是,该方案需要本地视频流处于播放状态.

我们写一个简单的单页面demo来演示怎么使用基于本地流新建canvas流,推流时使用canvas流即可避免crash问题

  1. 兼容性处理

window.URL = window.URL || window.webkitURL || window.mozURL || window.msURL;
    if (navigator.mediaDevices === undefined) {
      navigator.mediaDevices = {};
    }
    if (navigator.mediaDevices.getUserMedia === undefined) {
      navigator.mediaDevices.getUserMedia = function (constraints) {
        var getUserMedia = navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
        if (!getUserMedia) {
          return Promise.reject(new Error("getUserMedia is not implemented in this browser"));
        }
        return new Promise(function (resolve, reject) {
          getUserMedia.call(navigator, constraints, resolve, reject);
        });
      };
    }

2.采集本地流+播放+停止

var mediaSatream;
function openMedia() {
      navigator.mediaDevices
        .getUserMedia({
          audio: false,
          video: true,
          video: { facingMode: "environment" }, // 调用前置摄像头
          // video: { width: 1280, height: 720 },  //影响呈现的大小。注意不能用百分比
          //  video: { facingMode: { exact: "environment" } }// 这个是调用后置摄像头
        })
        .then(successFunc)
        .catch(errorFunc);
    }
    function successFunc(stream) {
      mediaSatream = stream;
      let video = document.querySelector("video");
      if ("srcObject" in video) {
        video.srcObject = stream;
      } else {
        video.src = (window.URL && window.URL.createObjectURL(stream)) || stream;
      }
      video.play();
    }
    function errorFunc(err) {
      console.warn(err);
      alert(err.name);
    }
    function closeMedia() {
      mediaSatream.getVideoTracks().forEach(function (track) {
        track.stop();
      });
      let video = document.querySelector("video");
      if ("srcObject" in video) {
        video.srcObject = null;
      } else {
        video.src = null;
      }
    }

3.新建canvas流+播放+停止

var canvas;
var canvasStream;
var canvasInterval;
function generateCanvasTrack(videoTrack) {
      let videoSettings = videoTrack.getSettings();
      canvas = document.createElement("canvas");
      let canvasCtx = canvas.getContext("2d");
      canvas.height = videoSettings.height;
      canvas.width = videoSettings.width;

      if (canvasInterval) {
        clearInterval(canvasInterval);
      }

      canvasInterval = setInterval(() => {
        if (document.getElementById("canvas")) {
          canvasCtx.drawImage(document.getElementById("video"), 0, 0, canvas.width, canvas.height);
        }
      }, Math.max(66, Math.floor(1000 / videoSettings.frameRate)));

      return canvas.captureStream().getVideoTracks()[0];
    }

function canvasMedia() {
      if (!mediaSatream) {
        alert("没有原始流");
      }
      canvasStream = new MediaStream();
      let canvasTrack;
      if (mediaSatream.getVideoTracks().length > 0) {
        canvasTrack = generateCanvasTrack(mediaSatream.getVideoTracks()[0]);
        canvasStream.addTrack(canvasTrack);
      }
      if (mediaSatream.getAudioTracks().length > 0) {
        canvasStream.addTrack(mediaSatream.getAudioTracks()[0]);
      }
      let video = document.getElementById("canvas");
      if ("srcObject" in video) {
        video.srcObject = canvasStream;
      } else {
        video.src = (window.URL && window.URL.createObjectURL(canvasStream)) || canvasStream;
      }
      video.play();
    }

function closeCanvas() {
      canvasStream.getVideoTracks().forEach(function (track) {
        track.stop();
      });
      let video = document.getElementById("canvas");
      if ("srcObject" in video) {
        video.srcObject = null;
      } else {
        video.src = null;
      }
    }

4.完整demo

<!DOCTYPE html>
<html lang="en">
  <head>
    <meta charset="UTF-8" />
    <meta http-equiv="X-UA-Compatible" content="IE=edge" />
    <meta name="viewport" content="width=device-width, initial-scale=1.0" />
    <meta name="format-detection" content="telephone=yes" />
  </head>
  <script>
    var mediaSatream;
    var canvas;
    var canvasStream;
    var canvasInterval;
    window.URL = window.URL || window.webkitURL || window.mozURL || window.msURL;
    if (navigator.mediaDevices === undefined) {
      navigator.mediaDevices = {};
    }
    if (navigator.mediaDevices.getUserMedia === undefined) {
      navigator.mediaDevices.getUserMedia = function (constraints) {
        var getUserMedia = navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
        if (!getUserMedia) {
          return Promise.reject(new Error("getUserMedia is not implemented in this browser"));
        }
        return new Promise(function (resolve, reject) {
          getUserMedia.call(navigator, constraints, resolve, reject);
        });
      };
    }
    function openMedia() {
      navigator.mediaDevices
        .getUserMedia({
          audio: false,
          video: true,
          video: { facingMode: "environment" }, // 调用前置摄像头
          // video: { width: 1280, height: 720 },  //影响呈现的大小。注意不能用百分比
          //  video: { facingMode: { exact: "environment" } }// 这个是调用后置摄像头
        })
        .then(successFunc)
        .catch(errorFunc);
    }
    function successFunc(stream) {
      mediaSatream = stream;
      let video = document.querySelector("video");
      if ("srcObject" in video) {
        video.srcObject = stream;
      } else {
        video.src = (window.URL && window.URL.createObjectURL(stream)) || stream;
      }
      video.play();
    }
    function errorFunc(err) {
      console.warn(err);
      alert(err.name);
    }
    function closeMedia() {
      mediaSatream.getVideoTracks().forEach(function (track) {
        track.stop();
      });
      let video = document.querySelector("video");
      if ("srcObject" in video) {
        video.srcObject = null;
      } else {
        video.src = null;
      }
    }
    function generateCanvasTrack(videoTrack) {
      let videoSettings = videoTrack.getSettings();
      canvas = document.createElement("canvas");
      let canvasCtx = canvas.getContext("2d");
      canvas.height = videoSettings.height;
      canvas.width = videoSettings.width;

      if (canvasInterval) {
        clearInterval(canvasInterval);
      }

      canvasInterval = setInterval(() => {
        if (document.getElementById("canvas")) {
          canvasCtx.drawImage(document.getElementById("video"), 0, 0, canvas.width, canvas.height);
        }
      }, Math.max(66, Math.floor(1000 / videoSettings.frameRate)));

      return canvas.captureStream().getVideoTracks()[0];
    }

    function canvasMedia() {
      if (!mediaSatream) {
        alert("没有原始流");
      }
      canvasStream = new MediaStream();
      let canvasTrack;
      if (mediaSatream.getVideoTracks().length > 0) {
        canvasTrack = generateCanvasTrack(mediaSatream.getVideoTracks()[0]);
        canvasStream.addTrack(canvasTrack);
      }
      if (mediaSatream.getAudioTracks().length > 0) {
        canvasStream.addTrack(mediaSatream.getAudioTracks()[0]);
      }
      let video = document.getElementById("canvas");
      if ("srcObject" in video) {
        video.srcObject = canvasStream;
      } else {
        video.src = (window.URL && window.URL.createObjectURL(canvasStream)) || canvasStream;
      }
      video.play();
    }

    function closeCanvas() {
      canvasStream.getVideoTracks().forEach(function (track) {
        track.stop();
      });
      let video = document.getElementById("canvas");
      if ("srcObject" in video) {
        video.srcObject = null;
      } else {
        video.src = null;
      }
    }
  </script>
  <body>
    <button onclick="openMedia()">打开</button>
    <button onclick="closeMedia()">关闭</button>
    <button onclick="canvasMedia()">canvas</button>
    <button onclick="closeCanvas()">关canvas</button>
    <br />
    <video id="video"></video>
    <video id="canvas"></video>
  </body>
</html>

猜你喜欢

转载自blog.csdn.net/shinjie1210/article/details/129423729