668 lines
20 KiB
JavaScript
668 lines
20 KiB
JavaScript
/**
|
|
* VideoRTC v1.6.0 - Video player for go2rtc streaming application.
|
|
*
|
|
* All modern web technologies are supported in almost any browser except Apple Safari.
|
|
*
|
|
* Support:
|
|
* - ECMAScript 2017 (ES8) = ES6 + async
|
|
* - RTCPeerConnection for Safari iOS 11.0+
|
|
* - IntersectionObserver for Safari iOS 12.2+
|
|
* - ManagedMediaSource for Safari 17+
|
|
*
|
|
* Doesn't support:
|
|
* - MediaSource for Safari iOS
|
|
* - Customized built-in elements (extends HTMLVideoElement) because Safari
|
|
* - Autoplay for WebRTC in Safari
|
|
*/
|
|
export class VideoRTC extends HTMLElement {
|
|
constructor() {
|
|
super();
|
|
|
|
this.DISCONNECT_TIMEOUT = 5000;
|
|
this.RECONNECT_TIMEOUT = 30000;
|
|
|
|
this.CODECS = [
|
|
'avc1.640029', // H.264 high 4.1 (Chromecast 1st and 2nd Gen)
|
|
'avc1.64002A', // H.264 high 4.2 (Chromecast 3rd Gen)
|
|
'avc1.640033', // H.264 high 5.1 (Chromecast with Google TV)
|
|
'hvc1.1.6.L153.B0', // H.265 main 5.1 (Chromecast Ultra)
|
|
'mp4a.40.2', // AAC LC
|
|
'mp4a.40.5', // AAC HE
|
|
'flac', // FLAC (PCM compatible)
|
|
'opus', // OPUS Chrome, Firefox
|
|
];
|
|
|
|
/**
|
|
* [config] Supported modes (webrtc, webrtc/tcp, mse, hls, mp4, mjpeg).
|
|
* @type {string}
|
|
*/
|
|
this.mode = 'webrtc,mse,hls,mjpeg';
|
|
|
|
/**
|
|
* [Config] Requested medias (video, audio, microphone).
|
|
* @type {string}
|
|
*/
|
|
this.media = 'video,audio';
|
|
|
|
/**
|
|
* [config] Run stream when not displayed on the screen. Default `false`.
|
|
* @type {boolean}
|
|
*/
|
|
this.background = false;
|
|
|
|
/**
|
|
* [config] Run stream only when player in the viewport. Stop when user scroll out player.
|
|
* Value is percentage of visibility from `0` (not visible) to `1` (full visible).
|
|
* Default `0` - disable;
|
|
* @type {number}
|
|
*/
|
|
this.visibilityThreshold = 0;
|
|
|
|
/**
|
|
* [config] Run stream only when browser page on the screen. Stop when user change browser
|
|
* tab or minimise browser windows.
|
|
* @type {boolean}
|
|
*/
|
|
this.visibilityCheck = true;
|
|
|
|
/**
|
|
* [config] WebRTC configuration
|
|
* @type {RTCConfiguration}
|
|
*/
|
|
this.pcConfig = {
|
|
iceServers: [{urls: 'stun:stun.l.google.com:19302'}],
|
|
sdpSemantics: 'unified-plan', // important for Chromecast 1
|
|
};
|
|
|
|
/**
|
|
* [info] WebSocket connection state. Values: CONNECTING, OPEN, CLOSED
|
|
* @type {number}
|
|
*/
|
|
this.wsState = WebSocket.CLOSED;
|
|
|
|
/**
|
|
* [info] WebRTC connection state.
|
|
* @type {number}
|
|
*/
|
|
this.pcState = WebSocket.CLOSED;
|
|
|
|
/**
|
|
* @type {HTMLVideoElement}
|
|
*/
|
|
this.video = null;
|
|
|
|
/**
|
|
* @type {WebSocket}
|
|
*/
|
|
this.ws = null;
|
|
|
|
/**
|
|
* @type {string|URL}
|
|
*/
|
|
this.wsURL = '';
|
|
|
|
/**
|
|
* @type {RTCPeerConnection}
|
|
*/
|
|
this.pc = null;
|
|
|
|
/**
|
|
* @type {number}
|
|
*/
|
|
this.connectTS = 0;
|
|
|
|
/**
|
|
* @type {string}
|
|
*/
|
|
this.mseCodecs = '';
|
|
|
|
/**
|
|
* [internal] Disconnect TimeoutID.
|
|
* @type {number}
|
|
*/
|
|
this.disconnectTID = 0;
|
|
|
|
/**
|
|
* [internal] Reconnect TimeoutID.
|
|
* @type {number}
|
|
*/
|
|
this.reconnectTID = 0;
|
|
|
|
/**
|
|
* [internal] Handler for receiving Binary from WebSocket.
|
|
* @type {Function}
|
|
*/
|
|
this.ondata = null;
|
|
|
|
/**
|
|
* [internal] Handlers list for receiving JSON from WebSocket.
|
|
* @type {Object.<string,Function>}
|
|
*/
|
|
this.onmessage = null;
|
|
}
|
|
|
|
/**
|
|
* Set video source (WebSocket URL). Support relative path.
|
|
* @param {string|URL} value
|
|
*/
|
|
set src(value) {
|
|
if (typeof value !== 'string') value = value.toString();
|
|
if (value.startsWith('http')) {
|
|
value = 'ws' + value.substring(4);
|
|
} else if (value.startsWith('/')) {
|
|
value = 'ws' + location.origin.substring(4) + value;
|
|
}
|
|
|
|
this.wsURL = value;
|
|
|
|
this.onconnect();
|
|
}
|
|
|
|
/**
|
|
* Play video. Support automute when autoplay blocked.
|
|
* https://developer.chrome.com/blog/autoplay/
|
|
*/
|
|
play() {
|
|
this.video.play().catch(() => {
|
|
if (!this.video.muted) {
|
|
this.video.muted = true;
|
|
this.video.play().catch(er => {
|
|
console.warn(er);
|
|
});
|
|
}
|
|
});
|
|
}
|
|
|
|
/**
|
|
* Send message to server via WebSocket
|
|
* @param {Object} value
|
|
*/
|
|
send(value) {
|
|
if (this.ws) this.ws.send(JSON.stringify(value));
|
|
}
|
|
|
|
/** @param {Function} isSupported */
|
|
codecs(isSupported) {
|
|
return this.CODECS
|
|
.filter(codec => this.media.indexOf(codec.indexOf('vc1') > 0 ? 'video' : 'audio') >= 0)
|
|
.filter(codec => isSupported(`video/mp4; codecs="${codec}"`)).join();
|
|
}
|
|
|
|
/**
|
|
* `CustomElement`. Invoked each time the custom element is appended into a
|
|
* document-connected element.
|
|
*/
|
|
connectedCallback() {
|
|
if (this.disconnectTID) {
|
|
clearTimeout(this.disconnectTID);
|
|
this.disconnectTID = 0;
|
|
}
|
|
|
|
// because video autopause on disconnected from DOM
|
|
if (this.video) {
|
|
const seek = this.video.seekable;
|
|
if (seek.length > 0) {
|
|
this.video.currentTime = seek.end(seek.length - 1);
|
|
}
|
|
this.play();
|
|
} else {
|
|
this.oninit();
|
|
}
|
|
|
|
this.onconnect();
|
|
}
|
|
|
|
/**
|
|
* `CustomElement`. Invoked each time the custom element is disconnected from the
|
|
* document's DOM.
|
|
*/
|
|
disconnectedCallback() {
|
|
if (this.background || this.disconnectTID) return;
|
|
if (this.wsState === WebSocket.CLOSED && this.pcState === WebSocket.CLOSED) return;
|
|
|
|
this.disconnectTID = setTimeout(() => {
|
|
if (this.reconnectTID) {
|
|
clearTimeout(this.reconnectTID);
|
|
this.reconnectTID = 0;
|
|
}
|
|
|
|
this.disconnectTID = 0;
|
|
|
|
this.ondisconnect();
|
|
}, this.DISCONNECT_TIMEOUT);
|
|
}
|
|
|
|
/**
|
|
* Creates child DOM elements. Called automatically once on `connectedCallback`.
|
|
*/
|
|
oninit() {
|
|
this.video = document.createElement('video');
|
|
this.video.controls = true;
|
|
this.video.playsInline = true;
|
|
this.video.preload = 'auto';
|
|
|
|
this.video.style.display = 'block'; // fix bottom margin 4px
|
|
this.video.style.width = '100%';
|
|
this.video.style.height = '100%';
|
|
|
|
this.appendChild(this.video);
|
|
|
|
// all Safari lies about supported audio codecs
|
|
const m = window.navigator.userAgent.match(/Version\/(\d+).+Safari/);
|
|
if (m) {
|
|
// AAC from v13, FLAC from v14, OPUS - unsupported
|
|
const skip = m[1] < '13' ? 'mp4a.40.2' : m[1] < '14' ? 'flac' : 'opus';
|
|
this.CODECS.splice(this.CODECS.indexOf(skip));
|
|
}
|
|
|
|
if (this.background) return;
|
|
|
|
if ('hidden' in document && this.visibilityCheck) {
|
|
document.addEventListener('visibilitychange', () => {
|
|
if (document.hidden) {
|
|
this.disconnectedCallback();
|
|
} else if (this.isConnected) {
|
|
this.connectedCallback();
|
|
}
|
|
});
|
|
}
|
|
|
|
if ('IntersectionObserver' in window && this.visibilityThreshold) {
|
|
const observer = new IntersectionObserver(entries => {
|
|
entries.forEach(entry => {
|
|
if (!entry.isIntersecting) {
|
|
this.disconnectedCallback();
|
|
} else if (this.isConnected) {
|
|
this.connectedCallback();
|
|
}
|
|
});
|
|
}, {threshold: this.visibilityThreshold});
|
|
observer.observe(this);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Connect to WebSocket. Called automatically on `connectedCallback`.
|
|
* @return {boolean} true if the connection has started.
|
|
*/
|
|
onconnect() {
|
|
if (!this.isConnected || !this.wsURL || this.ws || this.pc) return false;
|
|
|
|
// CLOSED or CONNECTING => CONNECTING
|
|
this.wsState = WebSocket.CONNECTING;
|
|
|
|
this.connectTS = Date.now();
|
|
|
|
this.ws = new WebSocket(this.wsURL);
|
|
this.ws.binaryType = 'arraybuffer';
|
|
this.ws.addEventListener('open', () => this.onopen());
|
|
this.ws.addEventListener('close', () => this.onclose());
|
|
|
|
return true;
|
|
}
|
|
|
|
ondisconnect() {
|
|
this.wsState = WebSocket.CLOSED;
|
|
if (this.ws) {
|
|
this.ws.close();
|
|
this.ws = null;
|
|
}
|
|
|
|
this.pcState = WebSocket.CLOSED;
|
|
if (this.pc) {
|
|
this.pc.getSenders().forEach(sender => {
|
|
if (sender.track) sender.track.stop();
|
|
});
|
|
this.pc.close();
|
|
this.pc = null;
|
|
}
|
|
|
|
this.video.src = '';
|
|
this.video.srcObject = null;
|
|
}
|
|
|
|
/**
|
|
* @returns {Array.<string>} of modes (mse, webrtc, etc.)
|
|
*/
|
|
onopen() {
|
|
// CONNECTING => OPEN
|
|
this.wsState = WebSocket.OPEN;
|
|
|
|
this.ws.addEventListener('message', ev => {
|
|
if (typeof ev.data === 'string') {
|
|
const msg = JSON.parse(ev.data);
|
|
for (const mode in this.onmessage) {
|
|
this.onmessage[mode](msg);
|
|
}
|
|
} else {
|
|
this.ondata(ev.data);
|
|
}
|
|
});
|
|
|
|
this.ondata = null;
|
|
this.onmessage = {};
|
|
|
|
const modes = [];
|
|
|
|
if (this.mode.indexOf('mse') >= 0 && ('MediaSource' in window || 'ManagedMediaSource' in window)) {
|
|
modes.push('mse');
|
|
this.onmse();
|
|
} else if (this.mode.indexOf('hls') >= 0 && this.video.canPlayType('application/vnd.apple.mpegurl')) {
|
|
modes.push('hls');
|
|
this.onhls();
|
|
} else if (this.mode.indexOf('mp4') >= 0) {
|
|
modes.push('mp4');
|
|
this.onmp4();
|
|
}
|
|
|
|
if (this.mode.indexOf('webrtc') >= 0 && 'RTCPeerConnection' in window) {
|
|
modes.push('webrtc');
|
|
this.onwebrtc();
|
|
}
|
|
|
|
if (this.mode.indexOf('mjpeg') >= 0) {
|
|
if (modes.length) {
|
|
this.onmessage['mjpeg'] = msg => {
|
|
if (msg.type !== 'error' || msg.value.indexOf(modes[0]) !== 0) return;
|
|
this.onmjpeg();
|
|
};
|
|
} else {
|
|
modes.push('mjpeg');
|
|
this.onmjpeg();
|
|
}
|
|
}
|
|
|
|
return modes;
|
|
}
|
|
|
|
/**
|
|
* @return {boolean} true if reconnection has started.
|
|
*/
|
|
onclose() {
|
|
if (this.wsState === WebSocket.CLOSED) return false;
|
|
|
|
// CONNECTING, OPEN => CONNECTING
|
|
this.wsState = WebSocket.CONNECTING;
|
|
this.ws = null;
|
|
|
|
// reconnect no more than once every X seconds
|
|
const delay = Math.max(this.RECONNECT_TIMEOUT - (Date.now() - this.connectTS), 0);
|
|
|
|
this.reconnectTID = setTimeout(() => {
|
|
this.reconnectTID = 0;
|
|
this.onconnect();
|
|
}, delay);
|
|
|
|
return true;
|
|
}
|
|
|
|
onmse() {
|
|
/** @type {MediaSource} */
|
|
let ms;
|
|
|
|
if ('ManagedMediaSource' in window) {
|
|
const MediaSource = window.ManagedMediaSource;
|
|
|
|
ms = new MediaSource();
|
|
ms.addEventListener('sourceopen', () => {
|
|
this.send({type: 'mse', value: this.codecs(MediaSource.isTypeSupported)});
|
|
}, {once: true});
|
|
|
|
this.video.disableRemotePlayback = true;
|
|
this.video.srcObject = ms;
|
|
} else {
|
|
ms = new MediaSource();
|
|
ms.addEventListener('sourceopen', () => {
|
|
URL.revokeObjectURL(this.video.src);
|
|
this.send({type: 'mse', value: this.codecs(MediaSource.isTypeSupported)});
|
|
}, {once: true});
|
|
|
|
this.video.src = URL.createObjectURL(ms);
|
|
this.video.srcObject = null;
|
|
}
|
|
|
|
this.play();
|
|
|
|
this.mseCodecs = '';
|
|
|
|
this.onmessage['mse'] = msg => {
|
|
if (msg.type !== 'mse') return;
|
|
|
|
this.mseCodecs = msg.value;
|
|
|
|
const sb = ms.addSourceBuffer(msg.value);
|
|
sb.mode = 'segments'; // segments or sequence
|
|
sb.addEventListener('updateend', () => {
|
|
if (sb.updating) return;
|
|
|
|
try {
|
|
if (bufLen > 0) {
|
|
const data = buf.slice(0, bufLen);
|
|
bufLen = 0;
|
|
sb.appendBuffer(data);
|
|
} else if (sb.buffered && sb.buffered.length) {
|
|
const end = sb.buffered.end(sb.buffered.length - 1) - 15;
|
|
const start = sb.buffered.start(0);
|
|
if (end > start) {
|
|
sb.remove(start, end);
|
|
ms.setLiveSeekableRange(end, end + 15);
|
|
}
|
|
// console.debug("VideoRTC.buffered", start, end);
|
|
}
|
|
} catch (e) {
|
|
// console.debug(e);
|
|
}
|
|
});
|
|
|
|
const buf = new Uint8Array(2 * 1024 * 1024);
|
|
let bufLen = 0;
|
|
|
|
this.ondata = data => {
|
|
if (sb.updating || bufLen > 0) {
|
|
const b = new Uint8Array(data);
|
|
buf.set(b, bufLen);
|
|
bufLen += b.byteLength;
|
|
// console.debug("VideoRTC.buffer", b.byteLength, bufLen);
|
|
} else {
|
|
try {
|
|
sb.appendBuffer(data);
|
|
} catch (e) {
|
|
// console.debug(e);
|
|
}
|
|
}
|
|
};
|
|
};
|
|
}
|
|
|
|
onwebrtc() {
|
|
const pc = new RTCPeerConnection(this.pcConfig);
|
|
|
|
pc.addEventListener('icecandidate', ev => {
|
|
if (ev.candidate && this.mode.indexOf('webrtc/tcp') >= 0 && ev.candidate.protocol === 'udp') return;
|
|
|
|
const candidate = ev.candidate ? ev.candidate.toJSON().candidate : '';
|
|
this.send({type: 'webrtc/candidate', value: candidate});
|
|
});
|
|
|
|
pc.addEventListener('connectionstatechange', () => {
|
|
if (pc.connectionState === 'connected') {
|
|
const tracks = pc.getReceivers().map(receiver => receiver.track);
|
|
/** @type {HTMLVideoElement} */
|
|
const video2 = document.createElement('video');
|
|
video2.addEventListener('loadeddata', () => this.onpcvideo(video2), {once: true});
|
|
video2.srcObject = new MediaStream(tracks);
|
|
} else if (pc.connectionState === 'failed' || pc.connectionState === 'disconnected') {
|
|
pc.close(); // stop next events
|
|
|
|
this.pcState = WebSocket.CLOSED;
|
|
this.pc = null;
|
|
|
|
this.onconnect();
|
|
}
|
|
});
|
|
|
|
this.onmessage['webrtc'] = msg => {
|
|
switch (msg.type) {
|
|
case 'webrtc/candidate':
|
|
if (this.mode.indexOf('webrtc/tcp') >= 0 && msg.value.indexOf(' udp ') > 0) return;
|
|
|
|
pc.addIceCandidate({candidate: msg.value, sdpMid: '0'}).catch(er => {
|
|
console.warn(er);
|
|
});
|
|
break;
|
|
case 'webrtc/answer':
|
|
pc.setRemoteDescription({type: 'answer', sdp: msg.value}).catch(er => {
|
|
console.warn(er);
|
|
});
|
|
break;
|
|
case 'error':
|
|
if (msg.value.indexOf('webrtc/offer') < 0) return;
|
|
pc.close();
|
|
}
|
|
};
|
|
|
|
this.createOffer(pc).then(offer => {
|
|
this.send({type: 'webrtc/offer', value: offer.sdp});
|
|
});
|
|
|
|
this.pcState = WebSocket.CONNECTING;
|
|
this.pc = pc;
|
|
}
|
|
|
|
/**
|
|
* @param pc {RTCPeerConnection}
|
|
* @return {Promise<RTCSessionDescriptionInit>}
|
|
*/
|
|
async createOffer(pc) {
|
|
try {
|
|
if (this.media.indexOf('microphone') >= 0) {
|
|
const media = await navigator.mediaDevices.getUserMedia({audio: true});
|
|
media.getTracks().forEach(track => {
|
|
pc.addTransceiver(track, {direction: 'sendonly'});
|
|
});
|
|
}
|
|
} catch (e) {
|
|
console.warn(e);
|
|
}
|
|
|
|
for (const kind of ['video', 'audio']) {
|
|
if (this.media.indexOf(kind) >= 0) {
|
|
pc.addTransceiver(kind, {direction: 'recvonly'});
|
|
}
|
|
}
|
|
|
|
const offer = await pc.createOffer();
|
|
await pc.setLocalDescription(offer);
|
|
return offer;
|
|
}
|
|
|
|
/**
|
|
* @param video2 {HTMLVideoElement}
|
|
*/
|
|
onpcvideo(video2) {
|
|
if (this.pc) {
|
|
// Video+Audio > Video, H265 > H264, Video > Audio, WebRTC > MSE
|
|
let rtcPriority = 0, msePriority = 0;
|
|
|
|
/** @type {MediaStream} */
|
|
const stream = video2.srcObject;
|
|
if (stream.getVideoTracks().length > 0) rtcPriority += 0x220;
|
|
if (stream.getAudioTracks().length > 0) rtcPriority += 0x102;
|
|
|
|
if (this.mseCodecs.indexOf('hvc1.') >= 0) msePriority += 0x230;
|
|
if (this.mseCodecs.indexOf('avc1.') >= 0) msePriority += 0x210;
|
|
if (this.mseCodecs.indexOf('mp4a.') >= 0) msePriority += 0x101;
|
|
|
|
if (rtcPriority >= msePriority) {
|
|
this.video.srcObject = stream;
|
|
this.play();
|
|
|
|
this.pcState = WebSocket.OPEN;
|
|
|
|
this.wsState = WebSocket.CLOSED;
|
|
if (this.ws) {
|
|
this.ws.close();
|
|
this.ws = null;
|
|
}
|
|
} else {
|
|
this.pcState = WebSocket.CLOSED;
|
|
if (this.pc) {
|
|
this.pc.close();
|
|
this.pc = null;
|
|
}
|
|
}
|
|
}
|
|
|
|
video2.srcObject = null;
|
|
}
|
|
|
|
onmjpeg() {
|
|
this.ondata = data => {
|
|
this.video.controls = false;
|
|
this.video.poster = 'data:image/jpeg;base64,' + VideoRTC.btoa(data);
|
|
};
|
|
|
|
this.send({type: 'mjpeg'});
|
|
}
|
|
|
|
onhls() {
|
|
this.onmessage['hls'] = msg => {
|
|
if (msg.type !== 'hls') return;
|
|
|
|
const url = 'http' + this.wsURL.substring(2, this.wsURL.indexOf('/ws')) + '/hls/';
|
|
const playlist = msg.value.replace('hls/', url);
|
|
this.video.src = 'data:application/vnd.apple.mpegurl;base64,' + btoa(playlist);
|
|
this.play();
|
|
};
|
|
|
|
this.send({type: 'hls', value: this.codecs(type => this.video.canPlayType(type))});
|
|
}
|
|
|
|
onmp4() {
|
|
/** @type {HTMLCanvasElement} **/
|
|
const canvas = document.createElement('canvas');
|
|
/** @type {CanvasRenderingContext2D} */
|
|
let context;
|
|
|
|
/** @type {HTMLVideoElement} */
|
|
const video2 = document.createElement('video');
|
|
video2.autoplay = true;
|
|
video2.playsInline = true;
|
|
video2.muted = true;
|
|
|
|
video2.addEventListener('loadeddata', () => {
|
|
if (!context) {
|
|
canvas.width = video2.videoWidth;
|
|
canvas.height = video2.videoHeight;
|
|
context = canvas.getContext('2d');
|
|
}
|
|
|
|
context.drawImage(video2, 0, 0, canvas.width, canvas.height);
|
|
|
|
this.video.controls = false;
|
|
this.video.poster = canvas.toDataURL('image/jpeg');
|
|
});
|
|
|
|
this.ondata = data => {
|
|
video2.src = 'data:video/mp4;base64,' + VideoRTC.btoa(data);
|
|
};
|
|
|
|
this.send({type: 'mp4', value: this.codecs(this.video.canPlayType)});
|
|
}
|
|
|
|
volume(new_volume) {
|
|
console.log(this, "Setting volume to ", new_volume);
|
|
this.video.volume = new_volume;
|
|
}
|
|
|
|
static btoa(buffer) {
|
|
const bytes = new Uint8Array(buffer);
|
|
const len = bytes.byteLength;
|
|
let binary = '';
|
|
for (let i = 0; i < len; i++) {
|
|
binary += String.fromCharCode(bytes[i]);
|
|
}
|
|
return window.btoa(binary);
|
|
}
|
|
}
|