如何播放 blob url 中的视频?
我是前端新手,我正在尝试使用 Blazor 制作 Zoom 克隆。现在我可以打开摄像头并获取流并使用 signalR 发送,但我找不到在客户端中播放视频的方法。我对 JS 不太了解,所以我从这个网站的这个问题中获取了代码:
从 navigator.mediaDevices.getUserMedia() 获取字节流?
如何接收连续的视频块作为blob 数组并在 Websocket 中动态设置为视频标签
JS 代码
let stream = null;
let recorder = null;
let videoData = null;
let videoTeste = null;
let chunks = [];
let wholeVideo = [];
let mediaRecorder;
async function onStart(options) {
let video = document.getElementById(options.videoID);
if (navigator.mediaDevices.getUserMedia) {
try {
stream = await navigator.mediaDevices.getUserMedia({ video: true });
video.srcObject = stream;
video.play();
recorder = new MediaRecorder(stream);
recorder.ondataavailable = event => {
videoData = event.data;
chunks.push(videoData);
sendData();
};
recorder.start(100);
}
catch (err) {
console.log("An error occurred: " + err);
}
}
}
async function sendData() {
const superBuffer = new Blob(chunks, {
type: 'video/mp4'
});
//let base64data = window.URL.createObjectURL(superBuffer);
let base64data = await blobToBase64(superBuffer);
if (videoTeste) {
chunks = [];
videoTeste.invokeMethodAsync("SendVideoData", base64data);
window.URL.revokeObjectURL(base64data);
}
}
async function blobToBase64(blob) {
return new Promise((resolve, _) => {
const reader = new FileReader();
reader.onloadend = () => resolve(reader.result);
reader.readAsDataURL(blob);
return reader;
});
}
async function playVideo(source) {
try {
let video = document.getElementById("videoplayer");
video.srcObject = null;
let currentTime = video.currentTime;
let file = await fetch(source).then(r => r.blob());
video.src = file;
video.currentTime = currentTime;
video.play();
}
catch (err) {
console.log("An error occurred: " + err);
}
}
window.OnClassWebCam = {
start: async (options) => {
await onStart(options);
},
videoPlayer: async (source) => {
await playVideo(source);
},
dotNetHelper: async (dotNetHelper) => {
videoTeste = dotNetHelper;
}
};
C# 前端代码:
using Microsoft.AspNetCore.Components;
using Microsoft.AspNetCore.SignalR.Client;
using Microsoft.JSInterop;
using System.Text.Json;
namespace OnClassBlazor.Pages
{
public class VideoTesteBase : ComponentBase
{
[Inject]
protected IJSRuntime JSRuntime { get; set; }
private HubConnection? hubConnection;
protected string DataAtual = DateTime.Now.ToString();
protected string SourceVideo = string.Empty;
public async Task Start()
{
await JSRuntime.InvokeVoidAsync("OnClassWebCam.start", options);
}
protected override async Task OnInitializedAsync()
{
var dotNetReference = DotNetObjectReference.Create(this);
await JSRuntime.InvokeVoidAsync("OnClassWebCam.dotNetHelper", dotNetReference);
hubConnection = new HubConnectionBuilder()
.WithUrl(@"http://localhost:5000/videohub")
.ConfigureLogging(o => {
o.SetMinimumLevel(LogLevel.Trace);
})
.Build();
hubConnection.On<string>("ReceiveStream", (source) =>
{
JSRuntime.InvokeVoidAsync("OnClassWebCam.videoPlayer", source);
});
await hubConnection.StartAsync();
}
[JSInvokable]
public async Task SendVideoData(string stream)
{
Console.WriteLine($"stream size {stream.Length}");
if (IsConnected)
{
await hubConnection.SendAsync("UploadStreamBytes", stream);
}
}
public bool IsConnected =>
hubConnection?.State == HubConnectionState.Connected;
public async ValueTask DisposeAsync()
{
if (hubConnection is not null)
{
await hubConnection.DisposeAsync();
}
}
protected WebCamOptions options = new WebCamOptions()
{
CanvasID = "canvas",
VideoID = "video"
};
protected override void OnInitialized()
{
}
}
public class WebCamOptions
{
public int Width { get; set; } = 960;
public int Height { get; set; } = 540;
public string VideoID { get; set; }
public string CanvasID { get; set; }
public string Filter { get; set; } = null;
}
}
C# Hub 代码:
using Microsoft.AspNetCore.SignalR;
using System.Text.Json;
using System.Threading.Channels;
namespace OnClass.API.Hubs
{
public class VideoHub : Hub
{
public async Task SendStream(object stream)
{
await Clients.All.SendAsync("ReceiveMessage", stream);
}
public async Task UploadStreamBytes(string stream)
{
Console.WriteLine($"UploadStreamBytes size: {stream.Length}");
await Clients.All.SendAsync("ReceiveStream", stream);
}
}
}
组件代码:
@page "/videochat"
@inherits VideoTesteBase
<h3>VideoTeste</h3>
<div id="container">
<video id="@options.VideoID"
autoplay="true" muted="muted"
width="@options.Width"
height="@options.Height">
</video>
<button id="start" @onclick="Start" disabled="@(!IsConnected)">Start Video</button>
</div>
<div id="videodastream">
<video id="videoplayer"
autoplay="true" muted="muted"
width="100"
height="100">
</video>
<button id="aqui" >Video</button>
</div>
I'm new into front end and I am trying to make a Zoom clone using Blazor. Right now I can open the camera and get the stream and send with signalR, but I can't find a way to play the video in the clients. I don't know much of JS, so I get the code from this questions in this very site:
Get a stream of bytes from navigator.mediaDevices.getUserMedia()?
The JS code
let stream = null;
let recorder = null;
let videoData = null;
let videoTeste = null;
let chunks = [];
let wholeVideo = [];
let mediaRecorder;
async function onStart(options) {
let video = document.getElementById(options.videoID);
if (navigator.mediaDevices.getUserMedia) {
try {
stream = await navigator.mediaDevices.getUserMedia({ video: true });
video.srcObject = stream;
video.play();
recorder = new MediaRecorder(stream);
recorder.ondataavailable = event => {
videoData = event.data;
chunks.push(videoData);
sendData();
};
recorder.start(100);
}
catch (err) {
console.log("An error occurred: " + err);
}
}
}
async function sendData() {
const superBuffer = new Blob(chunks, {
type: 'video/mp4'
});
//let base64data = window.URL.createObjectURL(superBuffer);
let base64data = await blobToBase64(superBuffer);
if (videoTeste) {
chunks = [];
videoTeste.invokeMethodAsync("SendVideoData", base64data);
window.URL.revokeObjectURL(base64data);
}
}
async function blobToBase64(blob) {
return new Promise((resolve, _) => {
const reader = new FileReader();
reader.onloadend = () => resolve(reader.result);
reader.readAsDataURL(blob);
return reader;
});
}
async function playVideo(source) {
try {
let video = document.getElementById("videoplayer");
video.srcObject = null;
let currentTime = video.currentTime;
let file = await fetch(source).then(r => r.blob());
video.src = file;
video.currentTime = currentTime;
video.play();
}
catch (err) {
console.log("An error occurred: " + err);
}
}
window.OnClassWebCam = {
start: async (options) => {
await onStart(options);
},
videoPlayer: async (source) => {
await playVideo(source);
},
dotNetHelper: async (dotNetHelper) => {
videoTeste = dotNetHelper;
}
};
The C# Front Code:
using Microsoft.AspNetCore.Components;
using Microsoft.AspNetCore.SignalR.Client;
using Microsoft.JSInterop;
using System.Text.Json;
namespace OnClassBlazor.Pages
{
public class VideoTesteBase : ComponentBase
{
[Inject]
protected IJSRuntime JSRuntime { get; set; }
private HubConnection? hubConnection;
protected string DataAtual = DateTime.Now.ToString();
protected string SourceVideo = string.Empty;
public async Task Start()
{
await JSRuntime.InvokeVoidAsync("OnClassWebCam.start", options);
}
protected override async Task OnInitializedAsync()
{
var dotNetReference = DotNetObjectReference.Create(this);
await JSRuntime.InvokeVoidAsync("OnClassWebCam.dotNetHelper", dotNetReference);
hubConnection = new HubConnectionBuilder()
.WithUrl(@"http://localhost:5000/videohub")
.ConfigureLogging(o => {
o.SetMinimumLevel(LogLevel.Trace);
})
.Build();
hubConnection.On<string>("ReceiveStream", (source) =>
{
JSRuntime.InvokeVoidAsync("OnClassWebCam.videoPlayer", source);
});
await hubConnection.StartAsync();
}
[JSInvokable]
public async Task SendVideoData(string stream)
{
Console.WriteLine(quot;stream size {stream.Length}");
if (IsConnected)
{
await hubConnection.SendAsync("UploadStreamBytes", stream);
}
}
public bool IsConnected =>
hubConnection?.State == HubConnectionState.Connected;
public async ValueTask DisposeAsync()
{
if (hubConnection is not null)
{
await hubConnection.DisposeAsync();
}
}
protected WebCamOptions options = new WebCamOptions()
{
CanvasID = "canvas",
VideoID = "video"
};
protected override void OnInitialized()
{
}
}
public class WebCamOptions
{
public int Width { get; set; } = 960;
public int Height { get; set; } = 540;
public string VideoID { get; set; }
public string CanvasID { get; set; }
public string Filter { get; set; } = null;
}
}
The C# Hub code:
using Microsoft.AspNetCore.SignalR;
using System.Text.Json;
using System.Threading.Channels;
namespace OnClass.API.Hubs
{
public class VideoHub : Hub
{
public async Task SendStream(object stream)
{
await Clients.All.SendAsync("ReceiveMessage", stream);
}
public async Task UploadStreamBytes(string stream)
{
Console.WriteLine(quot;UploadStreamBytes size: {stream.Length}");
await Clients.All.SendAsync("ReceiveStream", stream);
}
}
}
The component code:
@page "/videochat"
@inherits VideoTesteBase
<h3>VideoTeste</h3>
<div id="container">
<video id="@options.VideoID"
autoplay="true" muted="muted"
width="@options.Width"
height="@options.Height">
</video>
<button id="start" @onclick="Start" disabled="@(!IsConnected)">Start Video</button>
</div>
<div id="videodastream">
<video id="videoplayer"
autoplay="true" muted="muted"
width="100"
height="100">
</video>
<button id="aqui" >Video</button>
</div>
如果你对这篇内容有疑问,欢迎到本站社区发帖提问 参与讨论,获取更多帮助,或者扫码二维码加入 Web 技术交流群。

绑定邮箱获取回复消息
由于您还没有绑定你的真实邮箱,如果其他用户或者作者回复了您的评论,将不能在第一时间通知您!
发布评论