如何基于uniapp開(kāi)發(fā)android播放webrtc流詳解
一、播放rtsp協(xié)議流
如果 webrtc 流以 rtsp 協(xié)議返回,流地址如:rtsp://127.0.0.1:5115/session.mpg,uniapp的 <video> 編譯到android上直接就能播放,但通常會(huì)有2-3秒的延遲。
二、播放webrtc協(xié)議流
如果 webrtc 流以 webrtc 協(xié)議返回,流地址如:webrtc://127.0.0.1:1988/live/livestream,我們需要通過(guò)sdp協(xié)商、連接推流服務(wù)端、搭建音視頻流通道來(lái)播放音視頻流,通常有500毫秒左右的延遲。
封裝 WebrtcVideo 組件
<template>
<video id="rtc_media_player" width="100%" height="100%" autoplay playsinline></video>
</template>
<!-- 因?yàn)槲覀兪褂玫?js 庫(kù),所以需要使用 uniapp 的 renderjs -->
<script module="webrtcVideo" lang="renderjs">
import $ from "./jquery-1.10.2.min.js";
import {prepareUrl} from "./utils.js";
export default {
data() {
return {
//RTCPeerConnection 對(duì)象
peerConnection: null,
//需要播放的webrtc流地址
playUrl: 'webrtc://127.0.0.1:1988/live/livestream'
}
},
methods: {
createPeerConnection() {
const that = this
//創(chuàng)建 WebRTC 通信通道
that.peerConnection = new RTCPeerConnection(null);
//添加一個(gè)單向的音視頻流收發(fā)器
that.peerConnection.addTransceiver("audio", { direction: "recvonly" });
that.peerConnection.addTransceiver("video", { direction: "recvonly" });
//收到服務(wù)器碼流,將音視頻流寫(xiě)入播放器
that.peerConnection.ontrack = (event) => {
const remoteVideo = document.getElementById("rtc_media_player");
if (remoteVideo.srcObject !== event.streams[0]) {
remoteVideo.srcObject = event.streams[0];
}
};
},
async makeCall() {
const that = this
const url = this.playUrl
this.createPeerConnection()
//拼接服務(wù)端請(qǐng)求地址,如:http://192.168.0.1:1988/rtc/v1/play/
const conf = prepareUrl(url);
//生成 offer sdp
const offer = await this.peerConnection.createOffer();
await this.peerConnection.setLocalDescription(offer);
var session = await new Promise(function (resolve, reject) {
$.ajax({
type: "POST",
url: conf.apiUrl,
data: offer.sdp,
contentType: "text/plain",
dataType: "json",
crossDomain: true,
})
.done(function (data) {
//服務(wù)端返回 answer sdp
if (data.code) {
reject(data);
return;
}
resolve(data);
})
.fail(function (reason) {
reject(reason);
});
});
//設(shè)置遠(yuǎn)端的描述信息,協(xié)商sdp,通過(guò)后搭建通道成功
await this.peerConnection.setRemoteDescription(
new RTCSessionDescription({ type: "answer", sdp: session.sdp })
);
session.simulator = conf.schema + '//' + conf.urlObject.server + ':' + conf.port + '/rtc/v1/nack/'
return session;
}
},
mounted() {
try {
this.makeCall().then((res) => {
// webrtc 通道建立成功
})
} catch (error) {
// webrtc 通道建立失敗
console.log(error)
}
}
}
</script>
utils.js
const defaultPath = "/rtc/v1/play/";
export const prepareUrl = webrtcUrl => {
var urlObject = parseUrl(webrtcUrl);
var schema = "http:";
var port = urlObject.port || 1985;
if (schema === "https:") {
port = urlObject.port || 443;
}
// @see https://github.com/rtcdn/rtcdn-draft
var api = urlObject.user_query.play || defaultPath;
if (api.lastIndexOf("/") !== api.length - 1) {
api += "/";
}
apiUrl = schema + "http://" + urlObject.server + ":" + port + api;
for (var key in urlObject.user_query) {
if (key !== "api" && key !== "play") {
apiUrl += "&" + key + "=" + urlObject.user_query[key];
}
}
// Replace /rtc/v1/play/&k=v to /rtc/v1/play/?k=v
var apiUrl = apiUrl.replace(api + "&", api + "?");
var streamUrl = urlObject.url;
return {
apiUrl: apiUrl,
streamUrl: streamUrl,
schema: schema,
urlObject: urlObject,
port: port,
tid: Number(parseInt(new Date().getTime() * Math.random() * 100))
.toString(16)
.substr(0, 7)
};
};
export const parseUrl = url => {
// @see: http://stackoverflow.com/questions/10469575/how-to-use-location-object-to-parse-url-without-redirecting-the-page-in-javascri
var a = document.createElement("a");
a.href = url
.replace("rtmp://", "http://")
.replace("webrtc://", "http://")
.replace("rtc://", "http://");
var vhost = a.hostname;
var app = a.pathname.substr(1, a.pathname.lastIndexOf("/") - 1);
var stream = a.pathname.substr(a.pathname.lastIndexOf("/") + 1);
// parse the vhost in the params of app, that srs supports.
app = app.replace("...vhost...", "?vhost=");
if (app.indexOf("?") >= 0) {
var params = app.substr(app.indexOf("?"));
app = app.substr(0, app.indexOf("?"));
if (params.indexOf("vhost=") > 0) {
vhost = params.substr(params.indexOf("vhost=") + "vhost=".length);
if (vhost.indexOf("&") > 0) {
vhost = vhost.substr(0, vhost.indexOf("&"));
}
}
}
// when vhost equals to server, and server is ip,
// the vhost is __defaultVhost__
if (a.hostname === vhost) {
var re = /^(\d+)\.(\d+)\.(\d+)\.(\d+)$/;
if (re.test(a.hostname)) {
vhost = "__defaultVhost__";
}
}
// parse the schema
var schema = "rtmp";
if (url.indexOf("://") > 0) {
schema = url.substr(0, url.indexOf("://"));
}
var port = a.port;
if (!port) {
if (schema === "http") {
port = 80;
} else if (schema === "https") {
port = 443;
} else if (schema === "rtmp") {
port = 1935;
}
}
var ret = {
url: url,
schema: schema,
server: a.hostname,
port: port,
vhost: vhost,
app: app,
stream: stream
};
fill_query(a.search, ret);
// For webrtc API, we use 443 if page is https, or schema specified it.
if (!ret.port) {
if (schema === "webrtc" || schema === "rtc") {
if (ret.user_query.schema === "https") {
ret.port = 443;
} else if (window.location.href.indexOf("https://") === 0) {
ret.port = 443;
} else {
// For WebRTC, SRS use 1985 as default API port.
ret.port = 1985;
}
}
}
return ret;
};
export const fill_query = (query_string, obj) => {
// pure user query object.
obj.user_query = {};
if (query_string.length === 0) {
return;
}
// split again for angularjs.
if (query_string.indexOf("?") >= 0) {
query_string = query_string.split("?")[1];
}
var queries = query_string.split("&");
for (var i = 0; i < queries.length; i++) {
var elem = queries[i];
var query = elem.split("=");
obj[query[0]] = query[1];
obj.user_query[query[0]] = query[1];
}
// alias domain for vhost.
if (obj.domain) {
obj.vhost = obj.domain;
}
};
頁(yè)面中使用
<template> <VideoWebrtc /> </template> <script setup> import VideoWebrtc from "@/components/videoWebrtc"; </script>
需要注意的事項(xiàng):
1.spd 協(xié)商的重要標(biāo)識(shí)之一為媒體描述: m=xxx <type> <code>,示例行如下:

一個(gè)完整的媒體描述,從第一個(gè)m=xxx <type> <code>開(kāi)始,到下一個(gè)m=xxx <type> <code>結(jié)束,以video為例,媒體描述包含了當(dāng)前設(shè)備允許播放的視頻流編碼格式,常見(jiàn)如:VP8/VP9/H264 等:


對(duì)照 m=video 后邊的編碼發(fā)現(xiàn),其包含所有 a=rtpmap 后的編碼,a=rtpmap 編碼后的字符串代表視頻流格式,但視頻編碼與視頻流格式卻不是固定的匹配關(guān)系,也就是說(shuō),在設(shè)備A中,可能存在 a=rtpmap:106 H264/90000 表示h264,在設(shè)備B中,a=rtpmap:100 H264/90000 表示h264。
因此,如果要鑒別設(shè)備允許播放的視頻流格式,我們需要觀察 a=rtpmap code 后的字符串。
協(xié)商通過(guò)的部分標(biāo)準(zhǔn)為:
- offer sdp 的 m=xxx 數(shù)量需要與 answer sdp 的 m=xxx 數(shù)量保持一致;
- offer sdp 的 m=xxx 順序需要與 answer sdp 的 m=xxx 順序保持一致;如兩者都需要將 m=audio 放在第一位,m=video放在第二位,或者反過(guò)來(lái);
- answer sdp 返回的 m=audio 后的
<code>,需要被包含在 offer sdp 的 m=audio 后的<code>中;
offer sdp 的 m=xxx 由 addTransceiver 創(chuàng)建,首個(gè)參數(shù)為 audio 時(shí),生成 m=audio,首個(gè)參數(shù)為video時(shí),生成 m=video ,創(chuàng)建順序?qū)?yīng) m=xxx 順序
"recvonly" }); that.peerConnection.addTransceiver("video", { direction: "recvonly" }); ```
- 在 sdp 中存在一項(xiàng)
a=mid:xxxxxx在瀏覽器中可能為audio、video,在 android 設(shè)備上為0、1,服務(wù)端需注意與 offer sdp 匹配。 - 關(guān)于音視頻流收發(fā)器,上面使用的api是
addTransceiver,但在部分android設(shè)備上會(huì)提示沒(méi)有這個(gè)api,我們可以替換為getUserMedia+addTrack:
data() {
return {
......
localStream: null,
......
}
},
methods: {
createPeerConnection() {
const that = this
//創(chuàng)建 WebRTC 通信通道
that.peerConnection = new RTCPeerConnection(null);
that.localStream.getTracks().forEach((track) => {
that.peerConnection.addTrack(track, that.localStream);
});
//收到服務(wù)器碼流,將音視頻流寫(xiě)入播放器
that.peerConnection.ontrack = (event) => {
......
};
},
async makeCall() {
const that = this
that.localStream = await navigator.mediaDevices.getUserMedia({
video: true,
audio: true,
});
const url = this.playUrl
......
......
}
}
需要注意的是,
navigator.mediaDevices.getUserMedia獲取的是設(shè)備攝像頭、錄音的媒體流,所以設(shè)備首先要具備攝像、錄音功能,并開(kāi)啟對(duì)應(yīng)權(quán)限,否則 api 將調(diào)用失敗。
三、音視頻實(shí)時(shí)通訊
這種 p2p 場(chǎng)景的流播放,通常需要使用 websocket 建立服務(wù)器連接,然后同時(shí)播放本地、服務(wù)端的流。
<template>
<div>Local Video</div>
<video id="localVideo" autoplay playsinline></video>
<div>Remote Video</div>
<video id="remoteVideo" autoplay playsinline></video>
</template>
<script module="webrtcVideo" lang="renderjs">
import $ from "./jquery-1.10.2.min.js";
export default {
data() {
return {
signalingServerUrl: "ws://127.0.0.1:8085",
iceServersUrl: 'stun:stun.l.google.com:19302',
localStream: null,
peerConnection: null
}
},
methods: {
async startLocalStream(){
try {
this.localStream = await navigator.mediaDevices.getUserMedia({
video: true,
audio: true,
});
document.getElementById("localVideo").srcObject = this.localStream;
}catch (err) {
console.error("Error accessing media devices.", err);
}
},
createPeerConnection() {
const configuration = { iceServers: [{
urls: this.iceServersUrl
}]};
this.peerConnection = new RTCPeerConnection(configuration);
this.localStream.getTracks().forEach((track) => {
this.peerConnection.addTrack(track, this.localStream);
});
this.peerConnection.onicecandidate = (event) => {
if (event.candidate) {
ws.send(
JSON.stringify({
type: "candidate",
candidate: event.candidate,
})
);
}
};
this.peerConnection.ontrack = (event) => {
const remoteVideo = document.getElementById("remoteVideo");
if (remoteVideo.srcObject !== event.streams[0]) {
remoteVideo.srcObject = event.streams[0];
}
};
},
async makeCall() {
this.createPeerConnection();
const offer = await this.peerConnection.createOffer();
await this.peerConnection.setLocalDescription(offer);
ws.send(JSON.stringify(offer));
}
},
mounted() {
this.makeCall()
const ws = new WebSocket(this.signalingServerUrl);
ws.onopen = () => {
console.log("Connected to the signaling server");
this.startLocalStream();
};
ws.onmessage = async (message) => {
const data = JSON.parse(message.data);
if (data.type === "offer") {
if (!this.peerConnection) createPeerConnection();
await this.peerConnection.setRemoteDescription(
new RTCSessionDescription(data)
);
const answer = await this.peerConnection.createAnswer();
await this.peerConnection.setLocalDescription(answer);
ws.send(JSON.stringify(this.peerConnection.localDescription));
} else if (data.type === "answer") {
if (!this.peerConnection) createPeerConnection();
await this.peerConnection.setRemoteDescription(
new RTCSessionDescription(data)
);
} else if (data.type === "candidate") {
if (this.peerConnection) {
try {
await this.peerConnection.addIceCandidate(
new RTCIceCandidate(data.candidate)
);
} catch (e) {
console.error("Error adding received ICE candidate", e);
}
}
}
}
}
}
</script>
與播放webrtc協(xié)議流相比,p2p 以 WebSocket 替代 ajax 實(shí)現(xiàn) sdp 的發(fā)送與接收,增加了本地流的播放功能,其他與播放協(xié)議流的代碼一致。
總結(jié)
到此這篇關(guān)于如何基于uniapp開(kāi)發(fā)android播放webrtc流的文章就介紹到這了,更多相關(guān)uniapp開(kāi)發(fā)android播放webrtc流內(nèi)容請(qǐng)搜索腳本之家以前的文章或繼續(xù)瀏覽下面的相關(guān)文章希望大家以后多多支持腳本之家!
相關(guān)文章
關(guān)于el-col的使用,占據(jù)寬度的應(yīng)用解析
這篇文章主要介紹了關(guān)于el-col的使用,占據(jù)寬度的應(yīng)用解析,具有很好的參考價(jià)值,希望對(duì)大家有所幫助。如有錯(cuò)誤或未考慮完全的地方,望不吝賜教2022-05-05
vue使用luckyexcel實(shí)現(xiàn)在線表格及導(dǎo)出導(dǎo)入方式
這篇文章主要介紹了vue使用luckyexcel實(shí)現(xiàn)在線表格及導(dǎo)出導(dǎo)入方式,具有很好的參考價(jià)值,希望對(duì)大家有所幫助,如有錯(cuò)誤或未考慮完全的地方,望不吝賜教2024-05-05
Vue項(xiàng)目使用CDN優(yōu)化首屏加載問(wèn)題
這篇文章主要介紹了Vue項(xiàng)目使用CDN優(yōu)化首屏加載問(wèn)題,本文以vue、vuex、vue-touter為例,給大家介紹處理流程,需要的朋友可以參考下2018-04-04
vue3+element?Plus實(shí)現(xiàn)表格前端分頁(yè)完整示例
這篇文章主要給大家介紹了關(guān)于vue3+element?Plus實(shí)現(xiàn)表格前端分頁(yè)的相關(guān)資料,雖然很多時(shí)候后端會(huì)把分頁(yè),搜索,排序都做好,但是有些返回?cái)?shù)據(jù)并不多的頁(yè)面,或者其他原因不能后端分頁(yè)的通常會(huì)前端處理,需要的朋友可以參考下2023-08-08
vue2.0在沒(méi)有dev-server.js下的本地?cái)?shù)據(jù)配置方法
這篇文章主要介紹了vue2.0在沒(méi)有dev-server.js下的本地?cái)?shù)據(jù)配置方法的相關(guān)資料,非常不錯(cuò),具有參考借鑒價(jià)值,需要的朋友可以參考下2018-02-02
餓了么UI中el-tree樹(shù)節(jié)點(diǎn)選中高亮的兩種常用方式(highlight-current屬性)
最近新做的項(xiàng)目有用到Element-UI tree組件,下面這篇文章主要給大家介紹了關(guān)于餓了么UI中el-tree樹(shù)節(jié)點(diǎn)選中高亮的兩種常用方式(highlight-current屬性),文中通過(guò)實(shí)例代碼介紹的非常詳細(xì),需要的朋友可以參考下2022-12-12

