您如何使用收发器API初始化WEBRTC调用,但在发出信号完成后仅启用音频“视频”?

发布于 2025-01-30 03:32:14 字数 3436 浏览 1 评论 0 原文

我正在尝试首先连接两个WEBRTC同行。建立连接后,我想为双方的用户提供启用/禁用视频和音频的选项。这应该发生在不再次触发信号过程的情况下。

不过,我确实遇到了一个问题:如果我致电替换式(Audiotack),远程对等方将不会播放音频,直到我也致电替换式替换(视频)。

我不确定为什么会发生这种情况,也找不到文档中的任何线索。一旦我还附加了视频轨道,它确实可以在10秒钟后发挥良好状态。没有视频轨道,就没有音频播放。为什么?

function createVideoElement() {
  const vid = document.createElement("video")
  vid.width = 320;
  vid.controls = true;
  vid.autoplay = true;
  const root = document.body;
  document.body.appendChild(vid);
  return vid;
}

async function RunTestInit() {

  console.log("get media access");
  const p1_stream_out = await navigator.mediaDevices.getUserMedia({
    video: true,
    audio: true
  });
  const p2_stream_out = await navigator.mediaDevices.getUserMedia({
    video: true,
    audio: true
  });


  console.log("stream setup");
  const p1_stream_in = new MediaStream();
  const p2_stream_in = new MediaStream();

  const p1_video_in = createVideoElement();
  const p2_video_in = createVideoElement();

  console.log("peer setup");
  const p1 = new RTCPeerConnection();
  const p2 = new RTCPeerConnection();
  const p1_tca = p1.addTransceiver("audio", {
    direction: "sendrecv"
  });
  const p1_tcv = p1.addTransceiver("video", {
    direction: "sendrecv"
  });


  p1.onicecandidate = (ev) => {
    p2.addIceCandidate(ev.candidate);
  }
  p2.onicecandidate = (ev) => {
    p1.addIceCandidate(ev.candidate);
  }

  p1.onconnectionstatechange = (ev) => {
    console.log("p1 state: ", p1.connectionState);
  }
  p2.onconnectionstatechange = async (ev) => {
    console.log("p2 state: ", p2.connectionState);
  }

  p1.onnegotiationneeded = () => {
    //triggers once
    console.warn("p1.onnegotiationneeded");
  }

  p2.onnegotiationneeded = () => {
    //should never trigger
    console.warn("p2.onnegotiationneeded");
  }

  p1.ontrack = (ev) => {
    console.log("p1.ontrack", ev);
    p1_stream_in.addTrack(ev.track);
    p1_video_in.srcObject = p1_stream_in;
  }
  p2.ontrack = (ev) => {
    console.log("p2.ontrack", ev);
    p2_stream_in.addTrack(ev.track);
    p2_video_in.srcObject = p2_stream_in;
  }
  console.log("signaling");
  const offer = await p1.createOffer();
  await p1.setLocalDescription(offer);
  await p2.setRemoteDescription(offer);
  const p2_tca = p2.getTransceivers()[0];
  const p2_tcv = p2.getTransceivers()[1];

  p2_tca.direction = "sendrecv"
  p2_tcv.direction = "sendrecv"

  const answer = await p2.createAnswer();
  await p2.setLocalDescription(answer);
  await p1.setRemoteDescription(answer);
  console.log("signaling done");

  //send audio from p2 to p1 (direction doesn't matter)
  //after this runs nothing will happen and no audio plays
  setTimeout(async () => {
    await p2_tca.sender.replaceTrack(p2_stream_out.getAudioTracks()[0]);
    console.warn("audio playback should start now but nothing happens");
  }, 1000);

  //audio starts playing once this runs
  setTimeout(async () => {
    //uncomment this and it works just fine
    await p2_tcv.sender.replaceTrack(p2_stream_out.getVideoTracks()[0]);
    console.warn("now audio playback starts");
  }, 10000);
}

function start() {
  setTimeout(async () => {
    console.log("Init test case");
    await RunTestInit();
  }, 1);
}

JS小提琴(需要摄像头和麦克风访问)中的相同示例: https://jsfiddle.net/vnztcx5p/5/5/5/5/

一旦audio起作用,这将引起echo。

I am trying to first connect two WebRTC peers. Once the connection is established I want to give the users on both sides the option to enable/disable video and audio. This should happen without triggering the signaling process again.

I do run into an issue though: If I call replaceTrack(audioTack) the remote peer will not playback audio until I also call replaceTrack(video).

I am unsure why this happen and can not find any clue in the documentation. It does play fine after 10 seconds once I also attach the video track. Without video track there is no audio playback. Why?

function createVideoElement() {
  const vid = document.createElement("video")
  vid.width = 320;
  vid.controls = true;
  vid.autoplay = true;
  const root = document.body;
  document.body.appendChild(vid);
  return vid;
}

async function RunTestInit() {

  console.log("get media access");
  const p1_stream_out = await navigator.mediaDevices.getUserMedia({
    video: true,
    audio: true
  });
  const p2_stream_out = await navigator.mediaDevices.getUserMedia({
    video: true,
    audio: true
  });


  console.log("stream setup");
  const p1_stream_in = new MediaStream();
  const p2_stream_in = new MediaStream();

  const p1_video_in = createVideoElement();
  const p2_video_in = createVideoElement();

  console.log("peer setup");
  const p1 = new RTCPeerConnection();
  const p2 = new RTCPeerConnection();
  const p1_tca = p1.addTransceiver("audio", {
    direction: "sendrecv"
  });
  const p1_tcv = p1.addTransceiver("video", {
    direction: "sendrecv"
  });


  p1.onicecandidate = (ev) => {
    p2.addIceCandidate(ev.candidate);
  }
  p2.onicecandidate = (ev) => {
    p1.addIceCandidate(ev.candidate);
  }

  p1.onconnectionstatechange = (ev) => {
    console.log("p1 state: ", p1.connectionState);
  }
  p2.onconnectionstatechange = async (ev) => {
    console.log("p2 state: ", p2.connectionState);
  }

  p1.onnegotiationneeded = () => {
    //triggers once
    console.warn("p1.onnegotiationneeded");
  }

  p2.onnegotiationneeded = () => {
    //should never trigger
    console.warn("p2.onnegotiationneeded");
  }

  p1.ontrack = (ev) => {
    console.log("p1.ontrack", ev);
    p1_stream_in.addTrack(ev.track);
    p1_video_in.srcObject = p1_stream_in;
  }
  p2.ontrack = (ev) => {
    console.log("p2.ontrack", ev);
    p2_stream_in.addTrack(ev.track);
    p2_video_in.srcObject = p2_stream_in;
  }
  console.log("signaling");
  const offer = await p1.createOffer();
  await p1.setLocalDescription(offer);
  await p2.setRemoteDescription(offer);
  const p2_tca = p2.getTransceivers()[0];
  const p2_tcv = p2.getTransceivers()[1];

  p2_tca.direction = "sendrecv"
  p2_tcv.direction = "sendrecv"

  const answer = await p2.createAnswer();
  await p2.setLocalDescription(answer);
  await p1.setRemoteDescription(answer);
  console.log("signaling done");

  //send audio from p2 to p1 (direction doesn't matter)
  //after this runs nothing will happen and no audio plays
  setTimeout(async () => {
    await p2_tca.sender.replaceTrack(p2_stream_out.getAudioTracks()[0]);
    console.warn("audio playback should start now but nothing happens");
  }, 1000);

  //audio starts playing once this runs
  setTimeout(async () => {
    //uncomment this and it works just fine
    await p2_tcv.sender.replaceTrack(p2_stream_out.getVideoTracks()[0]);
    console.warn("now audio playback starts");
  }, 10000);
}

function start() {
  setTimeout(async () => {
    console.log("Init test case");
    await RunTestInit();
  }, 1);
}

Same example in the js fiddle (needs camera and microphone access):
https://jsfiddle.net/vnztcx5p/5/

Once audio works this will cause an echo.

如果你对这篇内容有疑问,欢迎到本站社区发帖提问 参与讨论,获取更多帮助,或者扫码二维码加入 Web 技术交流群。

扫码二维码加入Web技术交流群

发布评论

需要 登录 才能够评论, 你可以免费 注册 一个本站的账号。

评论(3

陈甜 2025-02-06 03:32:14

这是一个已知的问题。 有一些背景信息。

简而言之,视频元素希望您发送音频和视频数据,并且需要同步。但是您不会发送任何视频数据,并且元素需要发射加载的MetedMetadata并调整大小的事件,因为这是规格所说的。因此,它将无限期地阻止音频

that is a known issue. https://bugs.chromium.org/p/chromium/issues/detail?id=813243 and https://bugs.chromium.org/p/chromium/issues/detail?id=403710 have some background information.

In a nutshell the video element expect you to send audio and video data and these need to be synchronized. But you don't send any video data and the element needs to fire a loadedmetadata and resize event because that is what the specification says. Hence it will block audio indefinitely

鲜血染红嫁衣 2025-02-06 03:32:14

您可以启用/disable 音频和视频 tracks> ,因此您不必重新谈判。请注意,在谈判开始之前,必须添加此轨道。您可以实现它:

mediaStream.getAudioTracks()[0].enabled = false; // or true to enable it.

或者如果要禁用视频:

mediaStream.getVideoTracks()[0].enabled = false; // or true to enable it.

以下是文档

getaudiotracks()

You can enable/disable audio and video tracks, so you dont have to renegotiate. Note that this tracks have to be added before negotiation starts. You can achieve it with:

mediaStream.getAudioTracks()[0].enabled = false; // or true to enable it.

Or if you want to disable video:

mediaStream.getVideoTracks()[0].enabled = false; // or true to enable it.

Here is the documentation

getAudioTracks()

getVideoTracks()

短叹 2025-02-06 03:32:14

我有这个工作。 HTMLVID​​EOELEMENT的工作方式而不是WEBRTC,这看起来更是一个问题。

如果我

p1_video_in.srcObject = p1_stream_in;
p2_video_in.srcObject = p2_stream_in;

在将轨道添加到流中之前设置,则可以工作。

完整的示例看起来像:

function createVideoElement() {
  const vid = document.createElement("video")
  vid.width = 320;
  vid.controls = true;
  vid.autoplay = true;
  const root = document.body;
  document.body.appendChild(vid);
  return vid;
}

async function RunTestInit() {

  console.log("get media access");
  const p1_stream_out = await navigator.mediaDevices.getUserMedia({
    video: true,
    audio: true
  });
  const p2_stream_out = await navigator.mediaDevices.getUserMedia({
    video: true,
    audio: true
  });


  console.log("stream setup");
  const p1_stream_in = new MediaStream();
  const p2_stream_in = new MediaStream();

  const p1_video_in = createVideoElement();
  const p2_video_in = createVideoElement();
  p1_video_in.srcObject = p1_stream_in;
  p2_video_in.srcObject = p2_stream_in;

  console.log("peer setup");
  const p1 = new RTCPeerConnection();
  const p2 = new RTCPeerConnection();
  const p1_tca = p1.addTransceiver("audio", {
    direction: "sendrecv"
  });
  const p1_tcv = p1.addTransceiver("video", {
    direction: "sendrecv"
  });


  p1.onicecandidate = (ev) => {
    p2.addIceCandidate(ev.candidate);
  }
  p2.onicecandidate = (ev) => {
    p1.addIceCandidate(ev.candidate);
  }

  p1.onconnectionstatechange = (ev) => {
    console.log("p1 state: ", p1.connectionState);
  }
  p2.onconnectionstatechange = async (ev) => {
    console.log("p2 state: ", p2.connectionState);
  }

  p1.onnegotiationneeded = () => {
    //triggers once
    console.warn("p1.onnegotiationneeded");
  }

  p2.onnegotiationneeded = () => {
    //should never trigger
    console.warn("p2.onnegotiationneeded");
  }

  p1.ontrack = (ev) => {
    console.log("p1.ontrack", ev);
    p1_stream_in.addTrack(ev.track);
  }
  p2.ontrack = (ev) => {
    console.log("p2.ontrack", ev);
    p2_stream_in.addTrack(ev.track);
  }
  console.log("signaling");
  const offer = await p1.createOffer();
  await p1.setLocalDescription(offer);
  await p2.setRemoteDescription(offer);
  const p2_tca = p2.getTransceivers()[0];
  const p2_tcv = p2.getTransceivers()[1];

  p2_tca.direction = "sendrecv"
  p2_tcv.direction = "sendrecv"

  const answer = await p2.createAnswer();
  await p2.setLocalDescription(answer);
  await p1.setRemoteDescription(answer);
  console.log("signaling done");

  //send audio from p2 to p1 (direction doesn't matter)
  //after this runs nothing will happen and no audio plays
  setTimeout(async () => {
    await p2_tca.sender.replaceTrack(p2_stream_out.getAudioTracks()[0]);
    console.warn("audio playback should start now but nothing happens");
  }, 1000);

  //audio starts playing once this runs
  setTimeout(async () => {
    //uncomment this and it works just fine
    await p2_tcv.sender.replaceTrack(p2_stream_out.getVideoTracks()[0]);
    console.warn("now audio playback starts");
  }, 10000);
}

function start() {
  setTimeout(async () => {
    console.log("Init test case");
    await RunTestInit();
  }, 1);
}

I got this working. It looks like more a problem with how HTMLVideoElement works rather than WebRTC.

If I set

p1_video_in.srcObject = p1_stream_in;
p2_video_in.srcObject = p2_stream_in;

before I add the tracks to the stream it works.

Complete example looks like this:

function createVideoElement() {
  const vid = document.createElement("video")
  vid.width = 320;
  vid.controls = true;
  vid.autoplay = true;
  const root = document.body;
  document.body.appendChild(vid);
  return vid;
}

async function RunTestInit() {

  console.log("get media access");
  const p1_stream_out = await navigator.mediaDevices.getUserMedia({
    video: true,
    audio: true
  });
  const p2_stream_out = await navigator.mediaDevices.getUserMedia({
    video: true,
    audio: true
  });


  console.log("stream setup");
  const p1_stream_in = new MediaStream();
  const p2_stream_in = new MediaStream();

  const p1_video_in = createVideoElement();
  const p2_video_in = createVideoElement();
  p1_video_in.srcObject = p1_stream_in;
  p2_video_in.srcObject = p2_stream_in;

  console.log("peer setup");
  const p1 = new RTCPeerConnection();
  const p2 = new RTCPeerConnection();
  const p1_tca = p1.addTransceiver("audio", {
    direction: "sendrecv"
  });
  const p1_tcv = p1.addTransceiver("video", {
    direction: "sendrecv"
  });


  p1.onicecandidate = (ev) => {
    p2.addIceCandidate(ev.candidate);
  }
  p2.onicecandidate = (ev) => {
    p1.addIceCandidate(ev.candidate);
  }

  p1.onconnectionstatechange = (ev) => {
    console.log("p1 state: ", p1.connectionState);
  }
  p2.onconnectionstatechange = async (ev) => {
    console.log("p2 state: ", p2.connectionState);
  }

  p1.onnegotiationneeded = () => {
    //triggers once
    console.warn("p1.onnegotiationneeded");
  }

  p2.onnegotiationneeded = () => {
    //should never trigger
    console.warn("p2.onnegotiationneeded");
  }

  p1.ontrack = (ev) => {
    console.log("p1.ontrack", ev);
    p1_stream_in.addTrack(ev.track);
  }
  p2.ontrack = (ev) => {
    console.log("p2.ontrack", ev);
    p2_stream_in.addTrack(ev.track);
  }
  console.log("signaling");
  const offer = await p1.createOffer();
  await p1.setLocalDescription(offer);
  await p2.setRemoteDescription(offer);
  const p2_tca = p2.getTransceivers()[0];
  const p2_tcv = p2.getTransceivers()[1];

  p2_tca.direction = "sendrecv"
  p2_tcv.direction = "sendrecv"

  const answer = await p2.createAnswer();
  await p2.setLocalDescription(answer);
  await p1.setRemoteDescription(answer);
  console.log("signaling done");

  //send audio from p2 to p1 (direction doesn't matter)
  //after this runs nothing will happen and no audio plays
  setTimeout(async () => {
    await p2_tca.sender.replaceTrack(p2_stream_out.getAudioTracks()[0]);
    console.warn("audio playback should start now but nothing happens");
  }, 1000);

  //audio starts playing once this runs
  setTimeout(async () => {
    //uncomment this and it works just fine
    await p2_tcv.sender.replaceTrack(p2_stream_out.getVideoTracks()[0]);
    console.warn("now audio playback starts");
  }, 10000);
}

function start() {
  setTimeout(async () => {
    console.log("Init test case");
    await RunTestInit();
  }, 1);
}
~没有更多了~
我们使用 Cookies 和其他技术来定制您的体验包括您的登录状态等。通过阅读我们的 隐私政策 了解更多相关信息。 单击 接受 或继续使用网站,即表示您同意使用 Cookies 和您的相关数据。
原文