First release

This commit is contained in:
Owen Quinlan 2021-07-02 19:29:34 +10:00
commit fa6c85266e
2339 changed files with 761050 additions and 0 deletions

101
node_modules/@videojs/http-streaming/src/ad-cue-tags.js generated vendored Normal file
View file

@ -0,0 +1,101 @@
/**
* @file ad-cue-tags.js
*/
import window from 'global/window';
/**
* Searches for an ad cue that overlaps with the given mediaTime
*
* @param {Object} track
* the track to find the cue for
*
* @param {number} mediaTime
* the time to find the cue at
*
* @return {Object|null}
* the found cue or null
*/
export const findAdCue = function(track, mediaTime) {
const cues = track.cues;
for (let i = 0; i < cues.length; i++) {
const cue = cues[i];
if (mediaTime >= cue.adStartTime && mediaTime <= cue.adEndTime) {
return cue;
}
}
return null;
};
export const updateAdCues = function(media, track, offset = 0) {
if (!media.segments) {
return;
}
let mediaTime = offset;
let cue;
for (let i = 0; i < media.segments.length; i++) {
const segment = media.segments[i];
if (!cue) {
// Since the cues will span for at least the segment duration, adding a fudge
// factor of half segment duration will prevent duplicate cues from being
// created when timing info is not exact (e.g. cue start time initialized
// at 10.006677, but next call mediaTime is 10.003332 )
cue = findAdCue(track, mediaTime + (segment.duration / 2));
}
if (cue) {
if ('cueIn' in segment) {
// Found a CUE-IN so end the cue
cue.endTime = mediaTime;
cue.adEndTime = mediaTime;
mediaTime += segment.duration;
cue = null;
continue;
}
if (mediaTime < cue.endTime) {
// Already processed this mediaTime for this cue
mediaTime += segment.duration;
continue;
}
// otherwise extend cue until a CUE-IN is found
cue.endTime += segment.duration;
} else {
if ('cueOut' in segment) {
cue = new window.VTTCue(
mediaTime,
mediaTime + segment.duration,
segment.cueOut
);
cue.adStartTime = mediaTime;
// Assumes tag format to be
// #EXT-X-CUE-OUT:30
cue.adEndTime = mediaTime + parseFloat(segment.cueOut);
track.addCue(cue);
}
if ('cueOutCont' in segment) {
// Entered into the middle of an ad cue
// Assumes tag formate to be
// #EXT-X-CUE-OUT-CONT:10/30
const [adOffset, adTotal] = segment.cueOutCont.split('/').map(parseFloat);
cue = new window.VTTCue(
mediaTime,
mediaTime + segment.duration,
''
);
cue.adStartTime = mediaTime - adOffset;
cue.adEndTime = cue.adStartTime + adTotal;
track.addCue(cue);
}
}
mediaTime += segment.duration;
}
};

129
node_modules/@videojs/http-streaming/src/bin-utils.js generated vendored Normal file
View file

@ -0,0 +1,129 @@
/**
* @file bin-utils.js
*/
/**
* convert a TimeRange to text
*
* @param {TimeRange} range the timerange to use for conversion
* @param {number} i the iterator on the range to convert
* @return {string} the range in string format
*/
const textRange = function(range, i) {
return range.start(i) + '-' + range.end(i);
};
/**
* format a number as hex string
*
* @param {number} e The number
* @param {number} i the iterator
* @return {string} the hex formatted number as a string
*/
const formatHexString = function(e, i) {
const value = e.toString(16);
return '00'.substring(0, 2 - value.length) + value + (i % 2 ? ' ' : '');
};
const formatAsciiString = function(e) {
if (e >= 0x20 && e < 0x7e) {
return String.fromCharCode(e);
}
return '.';
};
/**
* Creates an object for sending to a web worker modifying properties that are TypedArrays
* into a new object with seperated properties for the buffer, byteOffset, and byteLength.
*
* @param {Object} message
* Object of properties and values to send to the web worker
* @return {Object}
* Modified message with TypedArray values expanded
* @function createTransferableMessage
*/
export const createTransferableMessage = function(message) {
const transferable = {};
Object.keys(message).forEach((key) => {
const value = message[key];
if (ArrayBuffer.isView(value)) {
transferable[key] = {
bytes: value.buffer,
byteOffset: value.byteOffset,
byteLength: value.byteLength
};
} else {
transferable[key] = value;
}
});
return transferable;
};
/**
* Returns a unique string identifier for a media initialization
* segment.
*
* @param {Object} initSegment
* the init segment object.
*
* @return {string} the generated init segment id
*/
export const initSegmentId = function(initSegment) {
const byterange = initSegment.byterange || {
length: Infinity,
offset: 0
};
return [
byterange.length, byterange.offset, initSegment.resolvedUri
].join(',');
};
/**
* Returns a unique string identifier for a media segment key.
*
* @param {Object} key the encryption key
* @return {string} the unique id for the media segment key.
*/
export const segmentKeyId = function(key) {
return key.resolvedUri;
};
/**
* utils to help dump binary data to the console
*
* @param {Array|TypedArray} data
* data to dump to a string
*
* @return {string} the data as a hex string.
*/
export const hexDump = (data) => {
const bytes = Array.prototype.slice.call(data);
const step = 16;
let result = '';
let hex;
let ascii;
for (let j = 0; j < bytes.length / step; j++) {
hex = bytes.slice(j * step, j * step + step).map(formatHexString).join('');
ascii = bytes.slice(j * step, j * step + step).map(formatAsciiString).join('');
result += hex + ' ' + ascii + '\n';
}
return result;
};
export const tagDump = ({ bytes }) => hexDump(bytes);
export const textRanges = (ranges) => {
let result = '';
let i;
for (i = 0; i < ranges.length; i++) {
result += textRange(ranges, i) + ' ';
}
return result;
};

21
node_modules/@videojs/http-streaming/src/config.js generated vendored Normal file
View file

@ -0,0 +1,21 @@
export default {
GOAL_BUFFER_LENGTH: 30,
MAX_GOAL_BUFFER_LENGTH: 60,
BACK_BUFFER_LENGTH: 30,
GOAL_BUFFER_LENGTH_RATE: 1,
// 0.5 MB/s
INITIAL_BANDWIDTH: 4194304,
// A fudge factor to apply to advertised playlist bitrates to account for
// temporary flucations in client bandwidth
BANDWIDTH_VARIANCE: 1.2,
// How much of the buffer must be filled before we consider upswitching
BUFFER_LOW_WATER_LINE: 0,
MAX_BUFFER_LOW_WATER_LINE: 30,
// TODO: Remove this when experimentalBufferBasedABR is removed
EXPERIMENTAL_MAX_BUFFER_LOW_WATER_LINE: 16,
BUFFER_LOW_WATER_LINE_RATE: 1,
// If the buffer is greater than the high water line, we won't switch down
BUFFER_HIGH_WATER_LINE: 30
};

View file

@ -0,0 +1,855 @@
import videojs from 'video.js';
import {
parse as parseMpd,
addSidxSegmentsToPlaylist,
generateSidxKey,
parseUTCTiming
} from 'mpd-parser';
import {
refreshDelay,
updateMaster as updatePlaylist,
isPlaylistUnchanged
} from './playlist-loader';
import { resolveUrl, resolveManifestRedirect } from './resolve-url';
import parseSidx from 'mux.js/lib/tools/parse-sidx';
import { segmentXhrHeaders } from './xhr';
import window from 'global/window';
import {
forEachMediaGroup,
addPropertiesToMaster
} from './manifest';
import containerRequest from './util/container-request.js';
import {toUint8} from '@videojs/vhs-utils/es/byte-helpers';
import logger from './util/logger';
const { EventTarget, mergeOptions } = videojs;
const dashPlaylistUnchanged = function(a, b) {
if (!isPlaylistUnchanged(a, b)) {
return false;
}
// for dash the above check will often return true in scenarios where
// the playlist actually has changed because mediaSequence isn't a
// dash thing, and we often set it to 1. So if the playlists have the same amount
// of segments we return true.
// So for dash we need to make sure that the underlying segments are different.
// if sidx changed then the playlists are different.
if (a.sidx && b.sidx && (a.sidx.offset !== b.sidx.offset || a.sidx.length !== b.sidx.length)) {
return false;
} else if ((!a.sidx && b.sidx) || (a.sidx && !b.sidx)) {
return false;
}
// one or the other does not have segments
// there was a change.
if (a.segments && !b.segments || !a.segments && b.segments) {
return false;
}
// neither has segments nothing changed
if (!a.segments && !b.segments) {
return true;
}
// check segments themselves
for (let i = 0; i < a.segments.length; i++) {
const aSegment = a.segments[i];
const bSegment = b.segments[i];
// if uris are different between segments there was a change
if (aSegment.uri !== bSegment.uri) {
return false;
}
// neither segment has a byterange, there will be no byterange change.
if (!aSegment.byterange && !bSegment.byterange) {
continue;
}
const aByterange = aSegment.byterange;
const bByterange = bSegment.byterange;
// if byterange only exists on one of the segments, there was a change.
if ((aByterange && !bByterange) || (!aByterange && bByterange)) {
return false;
}
// if both segments have byterange with different offsets, there was a change.
if (aByterange.offset !== bByterange.offset || aByterange.length !== bByterange.length) {
return false;
}
}
// if everything was the same with segments, this is the same playlist.
return true;
};
/**
* Parses the master XML string and updates playlist URI references.
*
* @param {Object} config
* Object of arguments
* @param {string} config.masterXml
* The mpd XML
* @param {string} config.srcUrl
* The mpd URL
* @param {Date} config.clientOffset
* A time difference between server and client
* @param {Object} config.sidxMapping
* SIDX mappings for moof/mdat URIs and byte ranges
* @return {Object}
* The parsed mpd manifest object
*/
export const parseMasterXml = ({ masterXml, srcUrl, clientOffset, sidxMapping }) => {
const master = parseMpd(masterXml, {
manifestUri: srcUrl,
clientOffset,
sidxMapping
});
addPropertiesToMaster(master, srcUrl);
return master;
};
/**
* Returns a new master manifest that is the result of merging an updated master manifest
* into the original version.
*
* @param {Object} oldMaster
* The old parsed mpd object
* @param {Object} newMaster
* The updated parsed mpd object
* @return {Object}
* A new object representing the original master manifest with the updated media
* playlists merged in
*/
export const updateMaster = (oldMaster, newMaster, sidxMapping) => {
let noChanges = true;
let update = mergeOptions(oldMaster, {
// These are top level properties that can be updated
duration: newMaster.duration,
minimumUpdatePeriod: newMaster.minimumUpdatePeriod
});
// First update the playlists in playlist list
for (let i = 0; i < newMaster.playlists.length; i++) {
const playlist = newMaster.playlists[i];
if (playlist.sidx) {
const sidxKey = generateSidxKey(playlist.sidx);
// add sidx segments to the playlist if we have all the sidx info already
if (sidxMapping && sidxMapping[sidxKey] && sidxMapping[sidxKey].sidx) {
addSidxSegmentsToPlaylist(playlist, sidxMapping[sidxKey].sidx, playlist.sidx.resolvedUri);
}
}
const playlistUpdate = updatePlaylist(update, playlist, dashPlaylistUnchanged);
if (playlistUpdate) {
update = playlistUpdate;
noChanges = false;
}
}
// Then update media group playlists
forEachMediaGroup(newMaster, (properties, type, group, label) => {
if (properties.playlists && properties.playlists.length) {
const id = properties.playlists[0].id;
const playlistUpdate = updatePlaylist(update, properties.playlists[0], dashPlaylistUnchanged);
if (playlistUpdate) {
update = playlistUpdate;
// update the playlist reference within media groups
update.mediaGroups[type][group][label].playlists[0] = update.playlists[id];
noChanges = false;
}
}
});
if (newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod) {
noChanges = false;
}
if (noChanges) {
return null;
}
return update;
};
// SIDX should be equivalent if the URI and byteranges of the SIDX match.
// If the SIDXs have maps, the two maps should match,
// both `a` and `b` missing SIDXs is considered matching.
// If `a` or `b` but not both have a map, they aren't matching.
const equivalentSidx = (a, b) => {
const neitherMap = Boolean(!a.map && !b.map);
const equivalentMap = neitherMap || Boolean(a.map && b.map &&
a.map.byterange.offset === b.map.byterange.offset &&
a.map.byterange.length === b.map.byterange.length);
return equivalentMap &&
a.uri === b.uri &&
a.byterange.offset === b.byterange.offset &&
a.byterange.length === b.byterange.length;
};
// exported for testing
export const compareSidxEntry = (playlists, oldSidxMapping) => {
const newSidxMapping = {};
for (const id in playlists) {
const playlist = playlists[id];
const currentSidxInfo = playlist.sidx;
if (currentSidxInfo) {
const key = generateSidxKey(currentSidxInfo);
if (!oldSidxMapping[key]) {
break;
}
const savedSidxInfo = oldSidxMapping[key].sidxInfo;
if (equivalentSidx(savedSidxInfo, currentSidxInfo)) {
newSidxMapping[key] = oldSidxMapping[key];
}
}
}
return newSidxMapping;
};
/**
* A function that filters out changed items as they need to be requested separately.
*
* The method is exported for testing
*
* @param {Object} master the parsed mpd XML returned via mpd-parser
* @param {Object} oldSidxMapping the SIDX to compare against
*/
export const filterChangedSidxMappings = (master, oldSidxMapping) => {
const videoSidx = compareSidxEntry(master.playlists, oldSidxMapping);
let mediaGroupSidx = videoSidx;
forEachMediaGroup(master, (properties, mediaType, groupKey, labelKey) => {
if (properties.playlists && properties.playlists.length) {
const playlists = properties.playlists;
mediaGroupSidx = mergeOptions(
mediaGroupSidx,
compareSidxEntry(playlists, oldSidxMapping)
);
}
});
return mediaGroupSidx;
};
export default class DashPlaylistLoader extends EventTarget {
// DashPlaylistLoader must accept either a src url or a playlist because subsequent
// playlist loader setups from media groups will expect to be able to pass a playlist
// (since there aren't external URLs to media playlists with DASH)
constructor(srcUrlOrPlaylist, vhs, options = { }, masterPlaylistLoader) {
super();
this.masterPlaylistLoader_ = masterPlaylistLoader || this;
if (!masterPlaylistLoader) {
this.isMaster_ = true;
}
const { withCredentials = false, handleManifestRedirects = false } = options;
this.vhs_ = vhs;
this.withCredentials = withCredentials;
this.handleManifestRedirects = handleManifestRedirects;
if (!srcUrlOrPlaylist) {
throw new Error('A non-empty playlist URL or object is required');
}
// event naming?
this.on('minimumUpdatePeriod', () => {
this.refreshXml_();
});
// live playlist staleness timeout
this.on('mediaupdatetimeout', () => {
this.refreshMedia_(this.media().id);
});
this.state = 'HAVE_NOTHING';
this.loadedPlaylists_ = {};
this.logger_ = logger('DashPlaylistLoader');
// initialize the loader state
// The masterPlaylistLoader will be created with a string
if (this.isMaster_) {
this.masterPlaylistLoader_.srcUrl = srcUrlOrPlaylist;
// TODO: reset sidxMapping between period changes
// once multi-period is refactored
this.masterPlaylistLoader_.sidxMapping_ = {};
} else {
this.childPlaylist_ = srcUrlOrPlaylist;
}
}
requestErrored_(err, request, startingState) {
// disposed
if (!this.request) {
return true;
}
// pending request is cleared
this.request = null;
if (err) {
// use the provided error object or create one
// based on the request/response
this.error = typeof err === 'object' && !(err instanceof Error) ? err : {
status: request.status,
message: 'DASH request error at URL: ' + request.uri,
response: request.response,
// MEDIA_ERR_NETWORK
code: 2
};
if (startingState) {
this.state = startingState;
}
this.trigger('error');
return true;
}
}
/**
* Verify that the container of the sidx segment can be parsed
* and if it can, get and parse that segment.
*/
addSidxSegments_(playlist, startingState, cb) {
const sidxKey = playlist.sidx && generateSidxKey(playlist.sidx);
// playlist lacks sidx or sidx segments were added to this playlist already.
if (!playlist.sidx || !sidxKey || this.masterPlaylistLoader_.sidxMapping_[sidxKey]) {
// keep this function async
this.mediaRequest_ = window.setTimeout(() => cb(false), 0);
return;
}
// resolve the segment URL relative to the playlist
const uri = resolveManifestRedirect(this.handleManifestRedirects, playlist.sidx.resolvedUri);
const fin = (err, request) => {
if (this.requestErrored_(err, request, startingState)) {
return;
}
const sidxMapping = this.masterPlaylistLoader_.sidxMapping_;
let sidx;
try {
sidx = parseSidx(toUint8(request.response).subarray(8));
} catch (e) {
// sidx parsing failed.
this.requestErrored_(e, request, startingState);
return;
}
sidxMapping[sidxKey] = {
sidxInfo: playlist.sidx,
sidx
};
addSidxSegmentsToPlaylist(playlist, sidx, playlist.sidx.resolvedUri);
return cb(true);
};
this.request = containerRequest(uri, this.vhs_.xhr, (err, request, container, bytes) => {
if (err) {
return fin(err, request);
}
if (!container || container !== 'mp4') {
return fin({
status: request.status,
message: `Unsupported ${container || 'unknown'} container type for sidx segment at URL: ${uri}`,
// response is just bytes in this case
// but we really don't want to return that.
response: '',
playlist,
internal: true,
blacklistDuration: Infinity,
// MEDIA_ERR_NETWORK
code: 2
}, request);
}
// if we already downloaded the sidx bytes in the container request, use them
const {offset, length} = playlist.sidx.byterange;
if (bytes.length >= (length + offset)) {
return fin(err, {
response: bytes.subarray(offset, offset + length),
status: request.status,
uri: request.uri
});
}
// otherwise request sidx bytes
this.request = this.vhs_.xhr({
uri,
responseType: 'arraybuffer',
headers: segmentXhrHeaders({byterange: playlist.sidx.byterange})
}, fin);
});
}
dispose() {
this.trigger('dispose');
this.stopRequest();
this.loadedPlaylists_ = {};
window.clearTimeout(this.minimumUpdatePeriodTimeout_);
window.clearTimeout(this.mediaRequest_);
window.clearTimeout(this.mediaUpdateTimeout);
this.mediaUpdateTimeout = null;
this.mediaRequest_ = null;
this.minimumUpdatePeriodTimeout_ = null;
if (this.masterPlaylistLoader_.createMupOnMedia_) {
this.off('loadedmetadata', this.masterPlaylistLoader_.createMupOnMedia_);
this.masterPlaylistLoader_.createMupOnMedia_ = null;
}
this.off();
}
hasPendingRequest() {
return this.request || this.mediaRequest_;
}
stopRequest() {
if (this.request) {
const oldRequest = this.request;
this.request = null;
oldRequest.onreadystatechange = null;
oldRequest.abort();
}
}
media(playlist) {
// getter
if (!playlist) {
return this.media_;
}
// setter
if (this.state === 'HAVE_NOTHING') {
throw new Error('Cannot switch media playlist from ' + this.state);
}
const startingState = this.state;
// find the playlist object if the target playlist has been specified by URI
if (typeof playlist === 'string') {
if (!this.masterPlaylistLoader_.master.playlists[playlist]) {
throw new Error('Unknown playlist URI: ' + playlist);
}
playlist = this.masterPlaylistLoader_.master.playlists[playlist];
}
const mediaChange = !this.media_ || playlist.id !== this.media_.id;
// switch to previously loaded playlists immediately
if (mediaChange &&
this.loadedPlaylists_[playlist.id] &&
this.loadedPlaylists_[playlist.id].endList) {
this.state = 'HAVE_METADATA';
this.media_ = playlist;
// trigger media change if the active media has been updated
if (mediaChange) {
this.trigger('mediachanging');
this.trigger('mediachange');
}
return;
}
// switching to the active playlist is a no-op
if (!mediaChange) {
return;
}
// switching from an already loaded playlist
if (this.media_) {
this.trigger('mediachanging');
}
this.addSidxSegments_(playlist, startingState, (sidxChanged) => {
// everything is ready just continue to haveMetadata
this.haveMetadata({startingState, playlist});
});
}
haveMetadata({startingState, playlist}) {
this.state = 'HAVE_METADATA';
this.loadedPlaylists_[playlist.id] = playlist;
this.mediaRequest_ = null;
// This will trigger loadedplaylist
this.refreshMedia_(playlist.id);
// fire loadedmetadata the first time a media playlist is loaded
// to resolve setup of media groups
if (startingState === 'HAVE_MASTER') {
this.trigger('loadedmetadata');
} else {
// trigger media change if the active media has been updated
this.trigger('mediachange');
}
}
pause() {
if (this.masterPlaylistLoader_.createMupOnMedia_) {
this.off('loadedmetadata', this.masterPlaylistLoader_.createMupOnMedia_);
this.masterPlaylistLoader_.createMupOnMedia_ = null;
}
this.stopRequest();
window.clearTimeout(this.mediaUpdateTimeout);
this.mediaUpdateTimeout = null;
if (this.isMaster_) {
window.clearTimeout(this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_);
this.masterPlaylistLoader_.minimumUpdatePeriodTimeout_ = null;
}
if (this.state === 'HAVE_NOTHING') {
// If we pause the loader before any data has been retrieved, its as if we never
// started, so reset to an unstarted state.
this.started = false;
}
}
load(isFinalRendition) {
window.clearTimeout(this.mediaUpdateTimeout);
this.mediaUpdateTimeout = null;
const media = this.media();
if (isFinalRendition) {
const delay = media ? (media.targetDuration / 2) * 1000 : 5 * 1000;
this.mediaUpdateTimeout = window.setTimeout(() => this.load(), delay);
return;
}
// because the playlists are internal to the manifest, load should either load the
// main manifest, or do nothing but trigger an event
if (!this.started) {
this.start();
return;
}
if (media && !media.endList) {
// Check to see if this is the master loader and the MUP was cleared (this happens
// when the loader was paused). `media` should be set at this point since one is always
// set during `start()`.
if (this.isMaster_ && !this.minimumUpdatePeriodTimeout_) {
// Trigger minimumUpdatePeriod to refresh the master manifest
this.trigger('minimumUpdatePeriod');
// Since there was no prior minimumUpdatePeriodTimeout it should be recreated
this.updateMinimumUpdatePeriodTimeout_();
}
this.trigger('mediaupdatetimeout');
} else {
this.trigger('loadedplaylist');
}
}
start() {
this.started = true;
// We don't need to request the master manifest again
// Call this asynchronously to match the xhr request behavior below
if (!this.isMaster_) {
this.mediaRequest_ = window.setTimeout(() => this.haveMaster_(), 0);
return;
}
this.requestMaster_((req, masterChanged) => {
this.haveMaster_();
if (!this.hasPendingRequest() && !this.media_) {
this.media(this.masterPlaylistLoader_.master.playlists[0]);
}
});
}
requestMaster_(cb) {
this.request = this.vhs_.xhr({
uri: this.masterPlaylistLoader_.srcUrl,
withCredentials: this.withCredentials
}, (error, req) => {
if (this.requestErrored_(error, req)) {
if (this.state === 'HAVE_NOTHING') {
this.started = false;
}
return;
}
const masterChanged = req.responseText !== this.masterPlaylistLoader_.masterXml_;
this.masterPlaylistLoader_.masterXml_ = req.responseText;
if (req.responseHeaders && req.responseHeaders.date) {
this.masterLoaded_ = Date.parse(req.responseHeaders.date);
} else {
this.masterLoaded_ = Date.now();
}
this.masterPlaylistLoader_.srcUrl = resolveManifestRedirect(this.handleManifestRedirects, this.masterPlaylistLoader_.srcUrl, req);
if (masterChanged) {
this.handleMaster_();
this.syncClientServerClock_(() => {
return cb(req, masterChanged);
});
return;
}
return cb(req, masterChanged);
});
}
/**
* Parses the master xml for UTCTiming node to sync the client clock to the server
* clock. If the UTCTiming node requires a HEAD or GET request, that request is made.
*
* @param {Function} done
* Function to call when clock sync has completed
*/
syncClientServerClock_(done) {
const utcTiming = parseUTCTiming(this.masterPlaylistLoader_.masterXml_);
// No UTCTiming element found in the mpd. Use Date header from mpd request as the
// server clock
if (utcTiming === null) {
this.masterPlaylistLoader_.clientOffset_ = this.masterLoaded_ - Date.now();
return done();
}
if (utcTiming.method === 'DIRECT') {
this.masterPlaylistLoader_.clientOffset_ = utcTiming.value - Date.now();
return done();
}
this.request = this.vhs_.xhr({
uri: resolveUrl(this.masterPlaylistLoader_.srcUrl, utcTiming.value),
method: utcTiming.method,
withCredentials: this.withCredentials
}, (error, req) => {
// disposed
if (!this.request) {
return;
}
if (error) {
// sync request failed, fall back to using date header from mpd
// TODO: log warning
this.masterPlaylistLoader_.clientOffset_ = this.masterLoaded_ - Date.now();
return done();
}
let serverTime;
if (utcTiming.method === 'HEAD') {
if (!req.responseHeaders || !req.responseHeaders.date) {
// expected date header not preset, fall back to using date header from mpd
// TODO: log warning
serverTime = this.masterLoaded_;
} else {
serverTime = Date.parse(req.responseHeaders.date);
}
} else {
serverTime = Date.parse(req.responseText);
}
this.masterPlaylistLoader_.clientOffset_ = serverTime - Date.now();
done();
});
}
haveMaster_() {
this.state = 'HAVE_MASTER';
if (this.isMaster_) {
// We have the master playlist at this point, so
// trigger this to allow MasterPlaylistController
// to make an initial playlist selection
this.trigger('loadedplaylist');
} else if (!this.media_) {
// no media playlist was specifically selected so select
// the one the child playlist loader was created with
this.media(this.childPlaylist_);
}
}
handleMaster_() {
// clear media request
this.mediaRequest_ = null;
let newMaster = parseMasterXml({
masterXml: this.masterPlaylistLoader_.masterXml_,
srcUrl: this.masterPlaylistLoader_.srcUrl,
clientOffset: this.masterPlaylistLoader_.clientOffset_,
sidxMapping: this.masterPlaylistLoader_.sidxMapping_
});
const oldMaster = this.masterPlaylistLoader_.master;
// if we have an old master to compare the new master against
if (oldMaster) {
newMaster = updateMaster(oldMaster, newMaster, this.masterPlaylistLoader_.sidxMapping_);
}
// only update master if we have a new master
this.masterPlaylistLoader_.master = newMaster ? newMaster : oldMaster;
const location = this.masterPlaylistLoader_.master.locations && this.masterPlaylistLoader_.master.locations[0];
if (location && location !== this.masterPlaylistLoader_.srcUrl) {
this.masterPlaylistLoader_.srcUrl = location;
}
if (!oldMaster || (newMaster && newMaster.minimumUpdatePeriod !== oldMaster.minimumUpdatePeriod)) {
this.updateMinimumUpdatePeriodTimeout_();
}
return Boolean(newMaster);
}
updateMinimumUpdatePeriodTimeout_() {
const mpl = this.masterPlaylistLoader_;
// cancel any pending creation of mup on media
// a new one will be added if needed.
if (mpl.createMupOnMedia_) {
mpl.off('loadedmetadata', mpl.createMupOnMedia_);
mpl.createMupOnMedia_ = null;
}
// clear any pending timeouts
if (mpl.minimumUpdatePeriodTimeout_) {
window.clearTimeout(mpl.minimumUpdatePeriodTimeout_);
mpl.minimumUpdatePeriodTimeout_ = null;
}
let mup = mpl.master && mpl.master.minimumUpdatePeriod;
// If the minimumUpdatePeriod has a value of 0, that indicates that the current
// MPD has no future validity, so a new one will need to be acquired when new
// media segments are to be made available. Thus, we use the target duration
// in this case
if (mup === 0) {
if (mpl.media()) {
mup = mpl.media().targetDuration * 1000;
} else {
mpl.createMupOnMedia_ = mpl.updateMinimumUpdatePeriodTimeout_;
mpl.one('loadedmetadata', mpl.createMupOnMedia_);
}
}
// if minimumUpdatePeriod is invalid or <= zero, which
// can happen when a live video becomes VOD. skip timeout
// creation.
if (typeof mup !== 'number' || mup <= 0) {
if (mup < 0) {
this.logger_(`found invalid minimumUpdatePeriod of ${mup}, not setting a timeout`);
}
return;
}
this.createMUPTimeout_(mup);
}
createMUPTimeout_(mup) {
const mpl = this.masterPlaylistLoader_;
mpl.minimumUpdatePeriodTimeout_ = window.setTimeout(() => {
mpl.minimumUpdatePeriodTimeout_ = null;
mpl.trigger('minimumUpdatePeriod');
mpl.createMUPTimeout_(mup);
}, mup);
}
/**
* Sends request to refresh the master xml and updates the parsed master manifest
*/
refreshXml_() {
this.requestMaster_((req, masterChanged) => {
if (!masterChanged) {
return;
}
if (this.media_) {
this.media_ = this.masterPlaylistLoader_.master.playlists[this.media_.id];
}
// This will filter out updated sidx info from the mapping
this.masterPlaylistLoader_.sidxMapping_ = filterChangedSidxMappings(
this.masterPlaylistLoader_.master,
this.masterPlaylistLoader_.sidxMapping_
);
this.addSidxSegments_(this.media(), this.state, (sidxChanged) => {
// TODO: do we need to reload the current playlist?
this.refreshMedia_(this.media().id);
});
});
}
/**
* Refreshes the media playlist by re-parsing the master xml and updating playlist
* references. If this is an alternate loader, the updated parsed manifest is retrieved
* from the master loader.
*/
refreshMedia_(mediaID) {
if (!mediaID) {
throw new Error('refreshMedia_ must take a media id');
}
// for master we have to reparse the master xml
// to re-create segments based on current timing values
// which may change media. We only skip updating master
// if this is the first time this.media_ is being set.
// as master was just parsed in that case.
if (this.media_ && this.isMaster_) {
this.handleMaster_();
}
const playlists = this.masterPlaylistLoader_.master.playlists;
const mediaChanged = !this.media_ || this.media_ !== playlists[mediaID];
if (mediaChanged) {
this.media_ = playlists[mediaID];
} else {
this.trigger('playlistunchanged');
}
if (!this.mediaUpdateTimeout) {
const createMediaUpdateTimeout = () => {
if (this.media().endList) {
return;
}
this.mediaUpdateTimeout = window.setTimeout(() => {
this.trigger('mediaupdatetimeout');
createMediaUpdateTimeout();
}, refreshDelay(this.media(), Boolean(mediaChanged)));
};
createMediaUpdateTimeout();
}
this.trigger('loadedplaylist');
}
}

View file

@ -0,0 +1,41 @@
/* global self */
import { Decrypter } from 'aes-decrypter';
import { createTransferableMessage } from './bin-utils';
/**
* Our web worker interface so that things can talk to aes-decrypter
* that will be running in a web worker. the scope is passed to this by
* webworkify.
*/
self.onmessage = function(event) {
const data = event.data;
const encrypted = new Uint8Array(
data.encrypted.bytes,
data.encrypted.byteOffset,
data.encrypted.byteLength
);
const key = new Uint32Array(
data.key.bytes,
data.key.byteOffset,
data.key.byteLength / 4
);
const iv = new Uint32Array(
data.iv.bytes,
data.iv.byteOffset,
data.iv.byteLength / 4
);
/* eslint-disable no-new, handle-callback-err */
new Decrypter(
encrypted,
key,
iv,
function(err, bytes) {
self.postMessage(createTransferableMessage({
source: data.source,
decrypted: bytes
}), [bytes.buffer]);
}
);
/* eslint-enable */
};

View file

@ -0,0 +1,654 @@
/*! @name @videojs/http-streaming @version 2.5.0 @license Apache-2.0 */
var decrypterWorker = (function () {
'use strict';
function _defineProperties(target, props) {
for (var i = 0; i < props.length; i++) {
var descriptor = props[i];
descriptor.enumerable = descriptor.enumerable || false;
descriptor.configurable = true;
if ("value" in descriptor) descriptor.writable = true;
Object.defineProperty(target, descriptor.key, descriptor);
}
}
function _createClass(Constructor, protoProps, staticProps) {
if (protoProps) _defineProperties(Constructor.prototype, protoProps);
if (staticProps) _defineProperties(Constructor, staticProps);
return Constructor;
}
var createClass = _createClass;
function _inheritsLoose(subClass, superClass) {
subClass.prototype = Object.create(superClass.prototype);
subClass.prototype.constructor = subClass;
subClass.__proto__ = superClass;
}
var inheritsLoose = _inheritsLoose;
/**
* @file stream.js
*/
/**
* A lightweight readable stream implemention that handles event dispatching.
*
* @class Stream
*/
var Stream = /*#__PURE__*/function () {
function Stream() {
this.listeners = {};
}
/**
* Add a listener for a specified event type.
*
* @param {string} type the event name
* @param {Function} listener the callback to be invoked when an event of
* the specified type occurs
*/
var _proto = Stream.prototype;
_proto.on = function on(type, listener) {
if (!this.listeners[type]) {
this.listeners[type] = [];
}
this.listeners[type].push(listener);
}
/**
* Remove a listener for a specified event type.
*
* @param {string} type the event name
* @param {Function} listener a function previously registered for this
* type of event through `on`
* @return {boolean} if we could turn it off or not
*/
;
_proto.off = function off(type, listener) {
if (!this.listeners[type]) {
return false;
}
var index = this.listeners[type].indexOf(listener); // TODO: which is better?
// In Video.js we slice listener functions
// on trigger so that it does not mess up the order
// while we loop through.
//
// Here we slice on off so that the loop in trigger
// can continue using it's old reference to loop without
// messing up the order.
this.listeners[type] = this.listeners[type].slice(0);
this.listeners[type].splice(index, 1);
return index > -1;
}
/**
* Trigger an event of the specified type on this stream. Any additional
* arguments to this function are passed as parameters to event listeners.
*
* @param {string} type the event name
*/
;
_proto.trigger = function trigger(type) {
var callbacks = this.listeners[type];
if (!callbacks) {
return;
} // Slicing the arguments on every invocation of this method
// can add a significant amount of overhead. Avoid the
// intermediate object creation for the common case of a
// single callback argument
if (arguments.length === 2) {
var length = callbacks.length;
for (var i = 0; i < length; ++i) {
callbacks[i].call(this, arguments[1]);
}
} else {
var args = Array.prototype.slice.call(arguments, 1);
var _length = callbacks.length;
for (var _i = 0; _i < _length; ++_i) {
callbacks[_i].apply(this, args);
}
}
}
/**
* Destroys the stream and cleans up.
*/
;
_proto.dispose = function dispose() {
this.listeners = {};
}
/**
* Forwards all `data` events on this stream to the destination stream. The
* destination stream should provide a method `push` to receive the data
* events as they arrive.
*
* @param {Stream} destination the stream that will receive all `data` events
* @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
*/
;
_proto.pipe = function pipe(destination) {
this.on('data', function (data) {
destination.push(data);
});
};
return Stream;
}();
/*! @name pkcs7 @version 1.0.4 @license Apache-2.0 */
/**
* Returns the subarray of a Uint8Array without PKCS#7 padding.
*
* @param padded {Uint8Array} unencrypted bytes that have been padded
* @return {Uint8Array} the unpadded bytes
* @see http://tools.ietf.org/html/rfc5652
*/
function unpad(padded) {
return padded.subarray(0, padded.byteLength - padded[padded.byteLength - 1]);
}
/*! @name aes-decrypter @version 3.1.2 @license Apache-2.0 */
/**
* @file aes.js
*
* This file contains an adaptation of the AES decryption algorithm
* from the Standford Javascript Cryptography Library. That work is
* covered by the following copyright and permissions notice:
*
* Copyright 2009-2010 Emily Stark, Mike Hamburg, Dan Boneh.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation
* are those of the authors and should not be interpreted as representing
* official policies, either expressed or implied, of the authors.
*/
/**
* Expand the S-box tables.
*
* @private
*/
var precompute = function precompute() {
var tables = [[[], [], [], [], []], [[], [], [], [], []]];
var encTable = tables[0];
var decTable = tables[1];
var sbox = encTable[4];
var sboxInv = decTable[4];
var i;
var x;
var xInv;
var d = [];
var th = [];
var x2;
var x4;
var x8;
var s;
var tEnc;
var tDec; // Compute double and third tables
for (i = 0; i < 256; i++) {
th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
}
for (x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
// Compute sbox
s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
s = s >> 8 ^ s & 255 ^ 99;
sbox[x] = s;
sboxInv[s] = x; // Compute MixColumns
x8 = d[x4 = d[x2 = d[x]]];
tDec = x8 * 0x1010101 ^ x4 * 0x10001 ^ x2 * 0x101 ^ x * 0x1010100;
tEnc = d[s] * 0x101 ^ s * 0x1010100;
for (i = 0; i < 4; i++) {
encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
}
} // Compactify. Considerable speedup on Firefox.
for (i = 0; i < 5; i++) {
encTable[i] = encTable[i].slice(0);
decTable[i] = decTable[i].slice(0);
}
return tables;
};
var aesTables = null;
/**
* Schedule out an AES key for both encryption and decryption. This
* is a low-level class. Use a cipher mode to do bulk encryption.
*
* @class AES
* @param key {Array} The key as an array of 4, 6 or 8 words.
*/
var AES = /*#__PURE__*/function () {
function AES(key) {
/**
* The expanded S-box and inverse S-box tables. These will be computed
* on the client so that we don't have to send them down the wire.
*
* There are two tables, _tables[0] is for encryption and
* _tables[1] is for decryption.
*
* The first 4 sub-tables are the expanded S-box with MixColumns. The
* last (_tables[01][4]) is the S-box itself.
*
* @private
*/
// if we have yet to precompute the S-box tables
// do so now
if (!aesTables) {
aesTables = precompute();
} // then make a copy of that object for use
this._tables = [[aesTables[0][0].slice(), aesTables[0][1].slice(), aesTables[0][2].slice(), aesTables[0][3].slice(), aesTables[0][4].slice()], [aesTables[1][0].slice(), aesTables[1][1].slice(), aesTables[1][2].slice(), aesTables[1][3].slice(), aesTables[1][4].slice()]];
var i;
var j;
var tmp;
var sbox = this._tables[0][4];
var decTable = this._tables[1];
var keyLen = key.length;
var rcon = 1;
if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
throw new Error('Invalid aes key size');
}
var encKey = key.slice(0);
var decKey = [];
this._key = [encKey, decKey]; // schedule encryption keys
for (i = keyLen; i < 4 * keyLen + 28; i++) {
tmp = encKey[i - 1]; // apply sbox
if (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {
tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255]; // shift rows and add rcon
if (i % keyLen === 0) {
tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
rcon = rcon << 1 ^ (rcon >> 7) * 283;
}
}
encKey[i] = encKey[i - keyLen] ^ tmp;
} // schedule decryption keys
for (j = 0; i; j++, i--) {
tmp = encKey[j & 3 ? i : i - 4];
if (i <= 4 || j < 4) {
decKey[j] = tmp;
} else {
decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];
}
}
}
/**
* Decrypt 16 bytes, specified as four 32-bit words.
*
* @param {number} encrypted0 the first word to decrypt
* @param {number} encrypted1 the second word to decrypt
* @param {number} encrypted2 the third word to decrypt
* @param {number} encrypted3 the fourth word to decrypt
* @param {Int32Array} out the array to write the decrypted words
* into
* @param {number} offset the offset into the output array to start
* writing results
* @return {Array} The plaintext.
*/
var _proto = AES.prototype;
_proto.decrypt = function decrypt(encrypted0, encrypted1, encrypted2, encrypted3, out, offset) {
var key = this._key[1]; // state variables a,b,c,d are loaded with pre-whitened data
var a = encrypted0 ^ key[0];
var b = encrypted3 ^ key[1];
var c = encrypted2 ^ key[2];
var d = encrypted1 ^ key[3];
var a2;
var b2;
var c2; // key.length === 2 ?
var nInnerRounds = key.length / 4 - 2;
var i;
var kIndex = 4;
var table = this._tables[1]; // load up the tables
var table0 = table[0];
var table1 = table[1];
var table2 = table[2];
var table3 = table[3];
var sbox = table[4]; // Inner rounds. Cribbed from OpenSSL.
for (i = 0; i < nInnerRounds; i++) {
a2 = table0[a >>> 24] ^ table1[b >> 16 & 255] ^ table2[c >> 8 & 255] ^ table3[d & 255] ^ key[kIndex];
b2 = table0[b >>> 24] ^ table1[c >> 16 & 255] ^ table2[d >> 8 & 255] ^ table3[a & 255] ^ key[kIndex + 1];
c2 = table0[c >>> 24] ^ table1[d >> 16 & 255] ^ table2[a >> 8 & 255] ^ table3[b & 255] ^ key[kIndex + 2];
d = table0[d >>> 24] ^ table1[a >> 16 & 255] ^ table2[b >> 8 & 255] ^ table3[c & 255] ^ key[kIndex + 3];
kIndex += 4;
a = a2;
b = b2;
c = c2;
} // Last round.
for (i = 0; i < 4; i++) {
out[(3 & -i) + offset] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];
a2 = a;
a = b;
b = c;
c = d;
d = a2;
}
};
return AES;
}();
/**
* A wrapper around the Stream class to use setTimeout
* and run stream "jobs" Asynchronously
*
* @class AsyncStream
* @extends Stream
*/
var AsyncStream = /*#__PURE__*/function (_Stream) {
inheritsLoose(AsyncStream, _Stream);
function AsyncStream() {
var _this;
_this = _Stream.call(this, Stream) || this;
_this.jobs = [];
_this.delay = 1;
_this.timeout_ = null;
return _this;
}
/**
* process an async job
*
* @private
*/
var _proto = AsyncStream.prototype;
_proto.processJob_ = function processJob_() {
this.jobs.shift()();
if (this.jobs.length) {
this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
} else {
this.timeout_ = null;
}
}
/**
* push a job into the stream
*
* @param {Function} job the job to push into the stream
*/
;
_proto.push = function push(job) {
this.jobs.push(job);
if (!this.timeout_) {
this.timeout_ = setTimeout(this.processJob_.bind(this), this.delay);
}
};
return AsyncStream;
}(Stream);
/**
* Convert network-order (big-endian) bytes into their little-endian
* representation.
*/
var ntoh = function ntoh(word) {
return word << 24 | (word & 0xff00) << 8 | (word & 0xff0000) >> 8 | word >>> 24;
};
/**
* Decrypt bytes using AES-128 with CBC and PKCS#7 padding.
*
* @param {Uint8Array} encrypted the encrypted bytes
* @param {Uint32Array} key the bytes of the decryption key
* @param {Uint32Array} initVector the initialization vector (IV) to
* use for the first round of CBC.
* @return {Uint8Array} the decrypted bytes
*
* @see http://en.wikipedia.org/wiki/Advanced_Encryption_Standard
* @see http://en.wikipedia.org/wiki/Block_cipher_mode_of_operation#Cipher_Block_Chaining_.28CBC.29
* @see https://tools.ietf.org/html/rfc2315
*/
var decrypt = function decrypt(encrypted, key, initVector) {
// word-level access to the encrypted bytes
var encrypted32 = new Int32Array(encrypted.buffer, encrypted.byteOffset, encrypted.byteLength >> 2);
var decipher = new AES(Array.prototype.slice.call(key)); // byte and word-level access for the decrypted output
var decrypted = new Uint8Array(encrypted.byteLength);
var decrypted32 = new Int32Array(decrypted.buffer); // temporary variables for working with the IV, encrypted, and
// decrypted data
var init0;
var init1;
var init2;
var init3;
var encrypted0;
var encrypted1;
var encrypted2;
var encrypted3; // iteration variable
var wordIx; // pull out the words of the IV to ensure we don't modify the
// passed-in reference and easier access
init0 = initVector[0];
init1 = initVector[1];
init2 = initVector[2];
init3 = initVector[3]; // decrypt four word sequences, applying cipher-block chaining (CBC)
// to each decrypted block
for (wordIx = 0; wordIx < encrypted32.length; wordIx += 4) {
// convert big-endian (network order) words into little-endian
// (javascript order)
encrypted0 = ntoh(encrypted32[wordIx]);
encrypted1 = ntoh(encrypted32[wordIx + 1]);
encrypted2 = ntoh(encrypted32[wordIx + 2]);
encrypted3 = ntoh(encrypted32[wordIx + 3]); // decrypt the block
decipher.decrypt(encrypted0, encrypted1, encrypted2, encrypted3, decrypted32, wordIx); // XOR with the IV, and restore network byte-order to obtain the
// plaintext
decrypted32[wordIx] = ntoh(decrypted32[wordIx] ^ init0);
decrypted32[wordIx + 1] = ntoh(decrypted32[wordIx + 1] ^ init1);
decrypted32[wordIx + 2] = ntoh(decrypted32[wordIx + 2] ^ init2);
decrypted32[wordIx + 3] = ntoh(decrypted32[wordIx + 3] ^ init3); // setup the IV for the next round
init0 = encrypted0;
init1 = encrypted1;
init2 = encrypted2;
init3 = encrypted3;
}
return decrypted;
};
/**
* The `Decrypter` class that manages decryption of AES
* data through `AsyncStream` objects and the `decrypt`
* function
*
* @param {Uint8Array} encrypted the encrypted bytes
* @param {Uint32Array} key the bytes of the decryption key
* @param {Uint32Array} initVector the initialization vector (IV) to
* @param {Function} done the function to run when done
* @class Decrypter
*/
var Decrypter = /*#__PURE__*/function () {
function Decrypter(encrypted, key, initVector, done) {
var step = Decrypter.STEP;
var encrypted32 = new Int32Array(encrypted.buffer);
var decrypted = new Uint8Array(encrypted.byteLength);
var i = 0;
this.asyncStream_ = new AsyncStream(); // split up the encryption job and do the individual chunks asynchronously
this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
for (i = step; i < encrypted32.length; i += step) {
initVector = new Uint32Array([ntoh(encrypted32[i - 4]), ntoh(encrypted32[i - 3]), ntoh(encrypted32[i - 2]), ntoh(encrypted32[i - 1])]);
this.asyncStream_.push(this.decryptChunk_(encrypted32.subarray(i, i + step), key, initVector, decrypted));
} // invoke the done() callback when everything is finished
this.asyncStream_.push(function () {
// remove pkcs#7 padding from the decrypted bytes
done(null, unpad(decrypted));
});
}
/**
* a getter for step the maximum number of bytes to process at one time
*
* @return {number} the value of step 32000
*/
var _proto = Decrypter.prototype;
/**
* @private
*/
_proto.decryptChunk_ = function decryptChunk_(encrypted, key, initVector, decrypted) {
return function () {
var bytes = decrypt(encrypted, key, initVector);
decrypted.set(bytes, encrypted.byteOffset);
};
};
createClass(Decrypter, null, [{
key: "STEP",
get: function get() {
// 4 * 8000;
return 32000;
}
}]);
return Decrypter;
}();
/**
* @file bin-utils.js
*/
/**
* Creates an object for sending to a web worker modifying properties that are TypedArrays
* into a new object with seperated properties for the buffer, byteOffset, and byteLength.
*
* @param {Object} message
* Object of properties and values to send to the web worker
* @return {Object}
* Modified message with TypedArray values expanded
* @function createTransferableMessage
*/
var createTransferableMessage = function createTransferableMessage(message) {
var transferable = {};
Object.keys(message).forEach(function (key) {
var value = message[key];
if (ArrayBuffer.isView(value)) {
transferable[key] = {
bytes: value.buffer,
byteOffset: value.byteOffset,
byteLength: value.byteLength
};
} else {
transferable[key] = value;
}
});
return transferable;
};
/* global self */
/**
* Our web worker interface so that things can talk to aes-decrypter
* that will be running in a web worker. the scope is passed to this by
* webworkify.
*
* @param {Object} self
* the scope for the web worker
*/
var DecrypterWorker = function DecrypterWorker(self) {
self.onmessage = function (event) {
var data = event.data;
var encrypted = new Uint8Array(data.encrypted.bytes, data.encrypted.byteOffset, data.encrypted.byteLength);
var key = new Uint32Array(data.key.bytes, data.key.byteOffset, data.key.byteLength / 4);
var iv = new Uint32Array(data.iv.bytes, data.iv.byteOffset, data.iv.byteLength / 4);
/* eslint-disable no-new, handle-callback-err */
new Decrypter(encrypted, key, iv, function (err, bytes) {
self.postMessage(createTransferableMessage({
source: data.source,
decrypted: bytes
}), [bytes.buffer]);
});
/* eslint-enable */
};
};
var decrypterWorker = new DecrypterWorker(self);
return decrypterWorker;
}());

View file

@ -0,0 +1,2 @@
// https://www.w3.org/TR/WebIDL-1/#quotaexceedederror
export const QUOTA_EXCEEDED_ERR = 22;

322
node_modules/@videojs/http-streaming/src/manifest.js generated vendored Normal file
View file

@ -0,0 +1,322 @@
import videojs from 'video.js';
import window from 'global/window';
import { Parser as M3u8Parser } from 'm3u8-parser';
import { resolveUrl } from './resolve-url';
import { getLastParts } from './playlist.js';
const { log } = videojs;
export const createPlaylistID = (index, uri) => {
return `${index}-${uri}`;
};
/**
* Parses a given m3u8 playlist
*
* @param {Function} [onwarn]
* a function to call when the parser triggers a warning event.
* @param {Function} [oninfo]
* a function to call when the parser triggers an info event.
* @param {string} manifestString
* The downloaded manifest string
* @param {Object[]} [customTagParsers]
* An array of custom tag parsers for the m3u8-parser instance
* @param {Object[]} [customTagMappers]
* An array of custom tag mappers for the m3u8-parser instance
* @param {boolean} [experimentalLLHLS=false]
* Whether to keep ll-hls features in the manifest after parsing.
* @return {Object}
* The manifest object
*/
export const parseManifest = ({
onwarn,
oninfo,
manifestString,
customTagParsers = [],
customTagMappers = [],
experimentalLLHLS
}) => {
const parser = new M3u8Parser();
if (onwarn) {
parser.on('warn', onwarn);
}
if (oninfo) {
parser.on('info', oninfo);
}
customTagParsers.forEach(customParser => parser.addParser(customParser));
customTagMappers.forEach(mapper => parser.addTagMapper(mapper));
parser.push(manifestString);
parser.end();
const manifest = parser.manifest;
// remove llhls features from the parsed manifest
// if we don't want llhls support.
if (!experimentalLLHLS) {
[
'preloadSegment',
'skip',
'serverControl',
'renditionReports',
'partInf',
'partTargetDuration'
].forEach(function(k) {
if (manifest.hasOwnProperty(k)) {
delete manifest[k];
}
});
if (manifest.segments) {
manifest.segments.forEach(function(segment) {
['parts', 'preloadHints'].forEach(function(k) {
if (segment.hasOwnProperty(k)) {
delete segment[k];
}
});
});
}
}
if (!manifest.targetDuration) {
let targetDuration = 10;
if (manifest.segments && manifest.segments.length) {
targetDuration = manifest
.segments.reduce((acc, s) => Math.max(acc, s.duration), 0);
}
if (onwarn) {
onwarn(`manifest has no targetDuration defaulting to ${targetDuration}`);
}
manifest.targetDuration = targetDuration;
}
const parts = getLastParts(manifest);
if (parts.length && !manifest.partTargetDuration) {
const partTargetDuration = parts.reduce((acc, p) => Math.max(acc, p.duration), 0);
if (onwarn) {
onwarn(`manifest has no partTargetDuration defaulting to ${partTargetDuration}`);
log.error('LL-HLS manifest has parts but lacks required #EXT-X-PART-INF:PART-TARGET value. See https://datatracker.ietf.org/doc/html/draft-pantos-hls-rfc8216bis-09#section-4.4.3.7. Playback is not guaranteed.');
}
manifest.partTargetDuration = partTargetDuration;
}
return manifest;
};
/**
* Loops through all supported media groups in master and calls the provided
* callback for each group
*
* @param {Object} master
* The parsed master manifest object
* @param {Function} callback
* Callback to call for each media group
*/
export const forEachMediaGroup = (master, callback) => {
if (!master.mediaGroups) {
return;
}
['AUDIO', 'SUBTITLES'].forEach((mediaType) => {
if (!master.mediaGroups[mediaType]) {
return;
}
for (const groupKey in master.mediaGroups[mediaType]) {
for (const labelKey in master.mediaGroups[mediaType][groupKey]) {
const mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
callback(mediaProperties, mediaType, groupKey, labelKey);
}
}
});
};
/**
* Adds properties and attributes to the playlist to keep consistent functionality for
* playlists throughout VHS.
*
* @param {Object} config
* Arguments object
* @param {Object} config.playlist
* The media playlist
* @param {string} [config.uri]
* The uri to the media playlist (if media playlist is not from within a master
* playlist)
* @param {string} id
* ID to use for the playlist
*/
export const setupMediaPlaylist = ({ playlist, uri, id }) => {
playlist.id = id;
playlist.playlistErrors_ = 0;
if (uri) {
// For media playlists, m3u8-parser does not have access to a URI, as HLS media
// playlists do not contain their own source URI, but one is needed for consistency in
// VHS.
playlist.uri = uri;
}
// For HLS master playlists, even though certain attributes MUST be defined, the
// stream may still be played without them.
// For HLS media playlists, m3u8-parser does not attach an attributes object to the
// manifest.
//
// To avoid undefined reference errors through the project, and make the code easier
// to write/read, add an empty attributes object for these cases.
playlist.attributes = playlist.attributes || {};
};
/**
* Adds ID, resolvedUri, and attributes properties to each playlist of the master, where
* necessary. In addition, creates playlist IDs for each playlist and adds playlist ID to
* playlist references to the playlists array.
*
* @param {Object} master
* The master playlist
*/
export const setupMediaPlaylists = (master) => {
let i = master.playlists.length;
while (i--) {
const playlist = master.playlists[i];
setupMediaPlaylist({
playlist,
id: createPlaylistID(i, playlist.uri)
});
playlist.resolvedUri = resolveUrl(master.uri, playlist.uri);
master.playlists[playlist.id] = playlist;
// URI reference added for backwards compatibility
master.playlists[playlist.uri] = playlist;
// Although the spec states an #EXT-X-STREAM-INF tag MUST have a BANDWIDTH attribute,
// the stream can be played without it. Although an attributes property may have been
// added to the playlist to prevent undefined references, issue a warning to fix the
// manifest.
if (!playlist.attributes.BANDWIDTH) {
log.warn('Invalid playlist STREAM-INF detected. Missing BANDWIDTH attribute.');
}
}
};
/**
* Adds resolvedUri properties to each media group.
*
* @param {Object} master
* The master playlist
*/
export const resolveMediaGroupUris = (master) => {
forEachMediaGroup(master, (properties) => {
if (properties.uri) {
properties.resolvedUri = resolveUrl(master.uri, properties.uri);
}
});
};
/**
* Creates a master playlist wrapper to insert a sole media playlist into.
*
* @param {Object} media
* Media playlist
* @param {string} uri
* The media URI
*
* @return {Object}
* Master playlist
*/
export const masterForMedia = (media, uri) => {
const id = createPlaylistID(0, uri);
const master = {
mediaGroups: {
'AUDIO': {},
'VIDEO': {},
'CLOSED-CAPTIONS': {},
'SUBTITLES': {}
},
uri: window.location.href,
resolvedUri: window.location.href,
playlists: [{
uri,
id,
resolvedUri: uri,
// m3u8-parser does not attach an attributes property to media playlists so make
// sure that the property is attached to avoid undefined reference errors
attributes: {}
}]
};
// set up ID reference
master.playlists[id] = master.playlists[0];
// URI reference added for backwards compatibility
master.playlists[uri] = master.playlists[0];
return master;
};
/**
* Does an in-place update of the master manifest to add updated playlist URI references
* as well as other properties needed by VHS that aren't included by the parser.
*
* @param {Object} master
* Master manifest object
* @param {string} uri
* The source URI
*/
export const addPropertiesToMaster = (master, uri) => {
master.uri = uri;
for (let i = 0; i < master.playlists.length; i++) {
if (!master.playlists[i].uri) {
// Set up phony URIs for the playlists since playlists are referenced by their URIs
// throughout VHS, but some formats (e.g., DASH) don't have external URIs
// TODO: consider adding dummy URIs in mpd-parser
const phonyUri = `placeholder-uri-${i}`;
master.playlists[i].uri = phonyUri;
}
}
forEachMediaGroup(master, (properties, mediaType, groupKey, labelKey) => {
const groupId = `placeholder-uri-${mediaType}-${groupKey}-${labelKey}`;
if (!properties.playlists || !properties.playlists.length) {
properties.playlists = [Object.assign({}, properties)];
}
properties.playlists.forEach(function(p, i) {
const id = createPlaylistID(i, groupId);
if (p.uri) {
p.resolvedUri = p.resolvedUri || resolveUrl(master.uri, p.uri);
} else {
// DEPRECATED, this has been added to prevent a breaking change.
// previously we only ever had a single media group playlist, so
// we mark the first playlist uri without prepending the index as we used to
// ideally we would do all of the playlists the same way.
p.uri = i === 0 ? groupId : id;
// don't resolve a placeholder uri to an absolute url, just use
// the placeholder again
p.resolvedUri = p.uri;
}
p.id = p.id || id;
// add an empty attributes object, all playlists are
// expected to have this.
p.attributes = p.attributes || {};
// setup ID and URI references (URI for backwards compatibility)
master.playlists[p.id] = p;
master.playlists[p.uri] = p;
});
});
setupMediaPlaylists(master);
resolveMediaGroupUris(master);
};

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,964 @@
import videojs from 'video.js';
import PlaylistLoader from './playlist-loader';
import DashPlaylistLoader from './dash-playlist-loader';
import noop from './util/noop';
import {isAudioOnly, playlistMatch} from './playlist.js';
import logger from './util/logger';
/**
* Convert the properties of an HLS track into an audioTrackKind.
*
* @private
*/
const audioTrackKind_ = (properties) => {
let kind = properties.default ? 'main' : 'alternative';
if (properties.characteristics &&
properties.characteristics.indexOf('public.accessibility.describes-video') >= 0) {
kind = 'main-desc';
}
return kind;
};
/**
* Pause provided segment loader and playlist loader if active
*
* @param {SegmentLoader} segmentLoader
* SegmentLoader to pause
* @param {Object} mediaType
* Active media type
* @function stopLoaders
*/
export const stopLoaders = (segmentLoader, mediaType) => {
segmentLoader.abort();
segmentLoader.pause();
if (mediaType && mediaType.activePlaylistLoader) {
mediaType.activePlaylistLoader.pause();
mediaType.activePlaylistLoader = null;
}
};
/**
* Start loading provided segment loader and playlist loader
*
* @param {PlaylistLoader} playlistLoader
* PlaylistLoader to start loading
* @param {Object} mediaType
* Active media type
* @function startLoaders
*/
export const startLoaders = (playlistLoader, mediaType) => {
// Segment loader will be started after `loadedmetadata` or `loadedplaylist` from the
// playlist loader
mediaType.activePlaylistLoader = playlistLoader;
playlistLoader.load();
};
/**
* Returns a function to be called when the media group changes. It performs a
* non-destructive (preserve the buffer) resync of the SegmentLoader. This is because a
* change of group is merely a rendition switch of the same content at another encoding,
* rather than a change of content, such as switching audio from English to Spanish.
*
* @param {string} type
* MediaGroup type
* @param {Object} settings
* Object containing required information for media groups
* @return {Function}
* Handler for a non-destructive resync of SegmentLoader when the active media
* group changes.
* @function onGroupChanged
*/
export const onGroupChanged = (type, settings) => () => {
const {
segmentLoaders: {
[type]: segmentLoader,
main: mainSegmentLoader
},
mediaTypes: { [type]: mediaType }
} = settings;
const activeTrack = mediaType.activeTrack();
const activeGroup = mediaType.getActiveGroup();
const previousActiveLoader = mediaType.activePlaylistLoader;
const lastGroup = mediaType.lastGroup_;
// the group did not change do nothing
if (activeGroup && lastGroup && activeGroup.id === lastGroup.id) {
return;
}
mediaType.lastGroup_ = activeGroup;
mediaType.lastTrack_ = activeTrack;
stopLoaders(segmentLoader, mediaType);
if (!activeGroup || activeGroup.isMasterPlaylist) {
// there is no group active or active group is a main playlist and won't change
return;
}
if (!activeGroup.playlistLoader) {
if (previousActiveLoader) {
// The previous group had a playlist loader but the new active group does not
// this means we are switching from demuxed to muxed audio. In this case we want to
// do a destructive reset of the main segment loader and not restart the audio
// loaders.
mainSegmentLoader.resetEverything();
}
return;
}
// Non-destructive resync
segmentLoader.resyncLoader();
startLoaders(activeGroup.playlistLoader, mediaType);
};
export const onGroupChanging = (type, settings) => () => {
const {
segmentLoaders: {
[type]: segmentLoader
},
mediaTypes: { [type]: mediaType }
} = settings;
mediaType.lastGroup_ = null;
segmentLoader.abort();
segmentLoader.pause();
};
/**
* Returns a function to be called when the media track changes. It performs a
* destructive reset of the SegmentLoader to ensure we start loading as close to
* currentTime as possible.
*
* @param {string} type
* MediaGroup type
* @param {Object} settings
* Object containing required information for media groups
* @return {Function}
* Handler for a destructive reset of SegmentLoader when the active media
* track changes.
* @function onTrackChanged
*/
export const onTrackChanged = (type, settings) => () => {
const {
masterPlaylistLoader,
segmentLoaders: {
[type]: segmentLoader,
main: mainSegmentLoader
},
mediaTypes: { [type]: mediaType }
} = settings;
const activeTrack = mediaType.activeTrack();
const activeGroup = mediaType.getActiveGroup();
const previousActiveLoader = mediaType.activePlaylistLoader;
const lastTrack = mediaType.lastTrack_;
// track did not change, do nothing
if (lastTrack && activeTrack && lastTrack.id === activeTrack.id) {
return;
}
mediaType.lastGroup_ = activeGroup;
mediaType.lastTrack_ = activeTrack;
stopLoaders(segmentLoader, mediaType);
if (!activeGroup) {
// there is no group active so we do not want to restart loaders
return;
}
if (activeGroup.isMasterPlaylist) {
// track did not change, do nothing
if (!activeTrack || !lastTrack || activeTrack.id === lastTrack.id) {
return;
}
const mpc = settings.vhs.masterPlaylistController_;
const newPlaylist = mpc.selectPlaylist();
// media will not change do nothing
if (mpc.media() === newPlaylist) {
return;
}
mediaType.logger_(`track change. Switching master audio from ${lastTrack.id} to ${activeTrack.id}`);
masterPlaylistLoader.pause();
mainSegmentLoader.resetEverything();
mpc.fastQualityChange_(newPlaylist);
return;
}
if (type === 'AUDIO') {
if (!activeGroup.playlistLoader) {
// when switching from demuxed audio/video to muxed audio/video (noted by no
// playlist loader for the audio group), we want to do a destructive reset of the
// main segment loader and not restart the audio loaders
mainSegmentLoader.setAudio(true);
// don't have to worry about disabling the audio of the audio segment loader since
// it should be stopped
mainSegmentLoader.resetEverything();
return;
}
// although the segment loader is an audio segment loader, call the setAudio
// function to ensure it is prepared to re-append the init segment (or handle other
// config changes)
segmentLoader.setAudio(true);
mainSegmentLoader.setAudio(false);
}
if (previousActiveLoader === activeGroup.playlistLoader) {
// Nothing has actually changed. This can happen because track change events can fire
// multiple times for a "single" change. One for enabling the new active track, and
// one for disabling the track that was active
startLoaders(activeGroup.playlistLoader, mediaType);
return;
}
if (segmentLoader.track) {
// For WebVTT, set the new text track in the segmentloader
segmentLoader.track(activeTrack);
}
// destructive reset
segmentLoader.resetEverything();
startLoaders(activeGroup.playlistLoader, mediaType);
};
export const onError = {
/**
* Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
* an error.
*
* @param {string} type
* MediaGroup type
* @param {Object} settings
* Object containing required information for media groups
* @return {Function}
* Error handler. Logs warning (or error if the playlist is blacklisted) to
* console and switches back to default audio track.
* @function onError.AUDIO
*/
AUDIO: (type, settings) => () => {
const {
segmentLoaders: { [type]: segmentLoader},
mediaTypes: { [type]: mediaType },
blacklistCurrentPlaylist
} = settings;
stopLoaders(segmentLoader, mediaType);
// switch back to default audio track
const activeTrack = mediaType.activeTrack();
const activeGroup = mediaType.activeGroup();
const id = (activeGroup.filter(group => group.default)[0] || activeGroup[0]).id;
const defaultTrack = mediaType.tracks[id];
if (activeTrack === defaultTrack) {
// Default track encountered an error. All we can do now is blacklist the current
// rendition and hope another will switch audio groups
blacklistCurrentPlaylist({
message: 'Problem encountered loading the default audio track.'
});
return;
}
videojs.log.warn('Problem encountered loading the alternate audio track.' +
'Switching back to default.');
for (const trackId in mediaType.tracks) {
mediaType.tracks[trackId].enabled = mediaType.tracks[trackId] === defaultTrack;
}
mediaType.onTrackChanged();
},
/**
* Returns a function to be called when a SegmentLoader or PlaylistLoader encounters
* an error.
*
* @param {string} type
* MediaGroup type
* @param {Object} settings
* Object containing required information for media groups
* @return {Function}
* Error handler. Logs warning to console and disables the active subtitle track
* @function onError.SUBTITLES
*/
SUBTITLES: (type, settings) => () => {
const {
segmentLoaders: { [type]: segmentLoader},
mediaTypes: { [type]: mediaType }
} = settings;
videojs.log.warn('Problem encountered loading the subtitle track.' +
'Disabling subtitle track.');
stopLoaders(segmentLoader, mediaType);
const track = mediaType.activeTrack();
if (track) {
track.mode = 'disabled';
}
mediaType.onTrackChanged();
}
};
export const setupListeners = {
/**
* Setup event listeners for audio playlist loader
*
* @param {string} type
* MediaGroup type
* @param {PlaylistLoader|null} playlistLoader
* PlaylistLoader to register listeners on
* @param {Object} settings
* Object containing required information for media groups
* @function setupListeners.AUDIO
*/
AUDIO: (type, playlistLoader, settings) => {
if (!playlistLoader) {
// no playlist loader means audio will be muxed with the video
return;
}
const {
tech,
requestOptions,
segmentLoaders: { [type]: segmentLoader }
} = settings;
playlistLoader.on('loadedmetadata', () => {
const media = playlistLoader.media();
segmentLoader.playlist(media, requestOptions);
// if the video is already playing, or if this isn't a live video and preload
// permits, start downloading segments
if (!tech.paused() || (media.endList && tech.preload() !== 'none')) {
segmentLoader.load();
}
});
playlistLoader.on('loadedplaylist', () => {
segmentLoader.playlist(playlistLoader.media(), requestOptions);
// If the player isn't paused, ensure that the segment loader is running
if (!tech.paused()) {
segmentLoader.load();
}
});
playlistLoader.on('error', onError[type](type, settings));
},
/**
* Setup event listeners for subtitle playlist loader
*
* @param {string} type
* MediaGroup type
* @param {PlaylistLoader|null} playlistLoader
* PlaylistLoader to register listeners on
* @param {Object} settings
* Object containing required information for media groups
* @function setupListeners.SUBTITLES
*/
SUBTITLES: (type, playlistLoader, settings) => {
const {
tech,
requestOptions,
segmentLoaders: { [type]: segmentLoader },
mediaTypes: { [type]: mediaType }
} = settings;
playlistLoader.on('loadedmetadata', () => {
const media = playlistLoader.media();
segmentLoader.playlist(media, requestOptions);
segmentLoader.track(mediaType.activeTrack());
// if the video is already playing, or if this isn't a live video and preload
// permits, start downloading segments
if (!tech.paused() || (media.endList && tech.preload() !== 'none')) {
segmentLoader.load();
}
});
playlistLoader.on('loadedplaylist', () => {
segmentLoader.playlist(playlistLoader.media(), requestOptions);
// If the player isn't paused, ensure that the segment loader is running
if (!tech.paused()) {
segmentLoader.load();
}
});
playlistLoader.on('error', onError[type](type, settings));
}
};
export const initialize = {
/**
* Setup PlaylistLoaders and AudioTracks for the audio groups
*
* @param {string} type
* MediaGroup type
* @param {Object} settings
* Object containing required information for media groups
* @function initialize.AUDIO
*/
'AUDIO': (type, settings) => {
const {
vhs,
sourceType,
segmentLoaders: { [type]: segmentLoader },
requestOptions,
master: {mediaGroups},
mediaTypes: {
[type]: {
groups,
tracks,
logger_
}
},
masterPlaylistLoader
} = settings;
const audioOnlyMaster = isAudioOnly(masterPlaylistLoader.master);
// force a default if we have none
if (!mediaGroups[type] ||
Object.keys(mediaGroups[type]).length === 0) {
mediaGroups[type] = { main: { default: { default: true } } };
}
for (const groupId in mediaGroups[type]) {
if (!groups[groupId]) {
groups[groupId] = [];
}
for (const variantLabel in mediaGroups[type][groupId]) {
let properties = mediaGroups[type][groupId][variantLabel];
let playlistLoader;
if (audioOnlyMaster) {
logger_(`AUDIO group '${groupId}' label '${variantLabel}' is a master playlist`);
properties.isMasterPlaylist = true;
playlistLoader = null;
// if vhs-json was provided as the source, and the media playlist was resolved,
// use the resolved media playlist object
} else if (sourceType === 'vhs-json' && properties.playlists) {
playlistLoader = new PlaylistLoader(
properties.playlists[0],
vhs,
requestOptions
);
} else if (properties.resolvedUri) {
playlistLoader = new PlaylistLoader(
properties.resolvedUri,
vhs,
requestOptions
);
// TODO: dash isn't the only type with properties.playlists
// should we even have properties.playlists in this check.
} else if (properties.playlists && sourceType === 'dash') {
playlistLoader = new DashPlaylistLoader(
properties.playlists[0],
vhs,
requestOptions,
masterPlaylistLoader
);
} else {
// no resolvedUri means the audio is muxed with the video when using this
// audio track
playlistLoader = null;
}
properties = videojs.mergeOptions(
{ id: variantLabel, playlistLoader },
properties
);
setupListeners[type](type, properties.playlistLoader, settings);
groups[groupId].push(properties);
if (typeof tracks[variantLabel] === 'undefined') {
const track = new videojs.AudioTrack({
id: variantLabel,
kind: audioTrackKind_(properties),
enabled: false,
language: properties.language,
default: properties.default,
label: variantLabel
});
tracks[variantLabel] = track;
}
}
}
// setup single error event handler for the segment loader
segmentLoader.on('error', onError[type](type, settings));
},
/**
* Setup PlaylistLoaders and TextTracks for the subtitle groups
*
* @param {string} type
* MediaGroup type
* @param {Object} settings
* Object containing required information for media groups
* @function initialize.SUBTITLES
*/
'SUBTITLES': (type, settings) => {
const {
tech,
vhs,
sourceType,
segmentLoaders: { [type]: segmentLoader },
requestOptions,
master: { mediaGroups },
mediaTypes: {
[type]: {
groups,
tracks
}
},
masterPlaylistLoader
} = settings;
for (const groupId in mediaGroups[type]) {
if (!groups[groupId]) {
groups[groupId] = [];
}
for (const variantLabel in mediaGroups[type][groupId]) {
if (mediaGroups[type][groupId][variantLabel].forced) {
// Subtitle playlists with the forced attribute are not selectable in Safari.
// According to Apple's HLS Authoring Specification:
// If content has forced subtitles and regular subtitles in a given language,
// the regular subtitles track in that language MUST contain both the forced
// subtitles and the regular subtitles for that language.
// Because of this requirement and that Safari does not add forced subtitles,
// forced subtitles are skipped here to maintain consistent experience across
// all platforms
continue;
}
let properties = mediaGroups[type][groupId][variantLabel];
let playlistLoader;
if (sourceType === 'hls') {
playlistLoader =
new PlaylistLoader(properties.resolvedUri, vhs, requestOptions);
} else if (sourceType === 'dash') {
const playlists = properties.playlists.filter((p) => p.excludeUntil !== Infinity);
if (!playlists.length) {
return;
}
playlistLoader = new DashPlaylistLoader(
properties.playlists[0],
vhs,
requestOptions,
masterPlaylistLoader
);
} else if (sourceType === 'vhs-json') {
playlistLoader = new PlaylistLoader(
// if the vhs-json object included the media playlist, use the media playlist
// as provided, otherwise use the resolved URI to load the playlist
properties.playlists ? properties.playlists[0] : properties.resolvedUri,
vhs,
requestOptions
);
}
properties = videojs.mergeOptions({
id: variantLabel,
playlistLoader
}, properties);
setupListeners[type](type, properties.playlistLoader, settings);
groups[groupId].push(properties);
if (typeof tracks[variantLabel] === 'undefined') {
const track = tech.addRemoteTextTrack({
id: variantLabel,
kind: 'subtitles',
default: properties.default && properties.autoselect,
language: properties.language,
label: variantLabel
}, false).track;
tracks[variantLabel] = track;
}
}
}
// setup single error event handler for the segment loader
segmentLoader.on('error', onError[type](type, settings));
},
/**
* Setup TextTracks for the closed-caption groups
*
* @param {String} type
* MediaGroup type
* @param {Object} settings
* Object containing required information for media groups
* @function initialize['CLOSED-CAPTIONS']
*/
'CLOSED-CAPTIONS': (type, settings) => {
const {
tech,
master: { mediaGroups },
mediaTypes: {
[type]: {
groups,
tracks
}
}
} = settings;
for (const groupId in mediaGroups[type]) {
if (!groups[groupId]) {
groups[groupId] = [];
}
for (const variantLabel in mediaGroups[type][groupId]) {
const properties = mediaGroups[type][groupId][variantLabel];
// Look for either 608 (CCn) or 708 (SERVICEn) caption services
if (!/^(?:CC|SERVICE)/.test(properties.instreamId)) {
continue;
}
const captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
let newProps = {
label: variantLabel,
language: properties.language,
instreamId: properties.instreamId,
default: properties.default && properties.autoselect
};
if (captionServices[newProps.instreamId]) {
newProps = videojs.mergeOptions(newProps, captionServices[newProps.instreamId]);
}
if (newProps.default === undefined) {
delete newProps.default;
}
// No PlaylistLoader is required for Closed-Captions because the captions are
// embedded within the video stream
groups[groupId].push(videojs.mergeOptions({ id: variantLabel }, properties));
if (typeof tracks[variantLabel] === 'undefined') {
const track = tech.addRemoteTextTrack({
id: newProps.instreamId,
kind: 'captions',
default: newProps.default,
language: newProps.language,
label: newProps.label
}, false).track;
tracks[variantLabel] = track;
}
}
}
}
};
const groupMatch = (list, media) => {
for (let i = 0; i < list.length; i++) {
if (playlistMatch(media, list[i])) {
return true;
}
if (list[i].playlists && groupMatch(list[i].playlists, media)) {
return true;
}
}
return false;
};
/**
* Returns a function used to get the active group of the provided type
*
* @param {string} type
* MediaGroup type
* @param {Object} settings
* Object containing required information for media groups
* @return {Function}
* Function that returns the active media group for the provided type. Takes an
* optional parameter {TextTrack} track. If no track is provided, a list of all
* variants in the group, otherwise the variant corresponding to the provided
* track is returned.
* @function activeGroup
*/
export const activeGroup = (type, settings) => (track) => {
const {
masterPlaylistLoader,
mediaTypes: { [type]: { groups } }
} = settings;
const media = masterPlaylistLoader.media();
if (!media) {
return null;
}
let variants = null;
// set to variants to main media active group
if (media.attributes[type]) {
variants = groups[media.attributes[type]];
}
const groupKeys = Object.keys(groups);
if (!variants) {
// find the masterPlaylistLoader media
// that is in a media group if we are dealing
// with audio only
if (type === 'AUDIO' && groupKeys.length > 1 && isAudioOnly(settings.master)) {
for (let i = 0; i < groupKeys.length; i++) {
const groupPropertyList = groups[groupKeys[i]];
if (groupMatch(groupPropertyList, media)) {
variants = groupPropertyList;
break;
}
}
// use the main group if it exists
} else if (groups.main) {
variants = groups.main;
// only one group, use that one
} else if (groupKeys.length === 1) {
variants = groups[groupKeys[0]];
}
}
if (typeof track === 'undefined') {
return variants;
}
if (track === null || !variants) {
// An active track was specified so a corresponding group is expected. track === null
// means no track is currently active so there is no corresponding group
return null;
}
return variants.filter((props) => props.id === track.id)[0] || null;
};
export const activeTrack = {
/**
* Returns a function used to get the active track of type provided
*
* @param {string} type
* MediaGroup type
* @param {Object} settings
* Object containing required information for media groups
* @return {Function}
* Function that returns the active media track for the provided type. Returns
* null if no track is active
* @function activeTrack.AUDIO
*/
AUDIO: (type, settings) => () => {
const { mediaTypes: { [type]: { tracks } } } = settings;
for (const id in tracks) {
if (tracks[id].enabled) {
return tracks[id];
}
}
return null;
},
/**
* Returns a function used to get the active track of type provided
*
* @param {string} type
* MediaGroup type
* @param {Object} settings
* Object containing required information for media groups
* @return {Function}
* Function that returns the active media track for the provided type. Returns
* null if no track is active
* @function activeTrack.SUBTITLES
*/
SUBTITLES: (type, settings) => () => {
const { mediaTypes: { [type]: { tracks } } } = settings;
for (const id in tracks) {
if (tracks[id].mode === 'showing' || tracks[id].mode === 'hidden') {
return tracks[id];
}
}
return null;
}
};
export const getActiveGroup = (type, {mediaTypes}) => () => {
const activeTrack_ = mediaTypes[type].activeTrack();
if (!activeTrack_) {
return null;
}
return mediaTypes[type].activeGroup(activeTrack_);
};
/**
* Setup PlaylistLoaders and Tracks for media groups (Audio, Subtitles,
* Closed-Captions) specified in the master manifest.
*
* @param {Object} settings
* Object containing required information for setting up the media groups
* @param {Tech} settings.tech
* The tech of the player
* @param {Object} settings.requestOptions
* XHR request options used by the segment loaders
* @param {PlaylistLoader} settings.masterPlaylistLoader
* PlaylistLoader for the master source
* @param {VhsHandler} settings.vhs
* VHS SourceHandler
* @param {Object} settings.master
* The parsed master manifest
* @param {Object} settings.mediaTypes
* Object to store the loaders, tracks, and utility methods for each media type
* @param {Function} settings.blacklistCurrentPlaylist
* Blacklists the current rendition and forces a rendition switch.
* @function setupMediaGroups
*/
export const setupMediaGroups = (settings) => {
['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach((type) => {
initialize[type](type, settings);
});
const {
mediaTypes,
masterPlaylistLoader,
tech,
vhs,
segmentLoaders: {
['AUDIO']: audioSegmentLoader,
main: mainSegmentLoader
}
} = settings;
// setup active group and track getters and change event handlers
['AUDIO', 'SUBTITLES'].forEach((type) => {
mediaTypes[type].activeGroup = activeGroup(type, settings);
mediaTypes[type].activeTrack = activeTrack[type](type, settings);
mediaTypes[type].onGroupChanged = onGroupChanged(type, settings);
mediaTypes[type].onGroupChanging = onGroupChanging(type, settings);
mediaTypes[type].onTrackChanged = onTrackChanged(type, settings);
mediaTypes[type].getActiveGroup = getActiveGroup(type, settings);
});
// DO NOT enable the default subtitle or caption track.
// DO enable the default audio track
const audioGroup = mediaTypes.AUDIO.activeGroup();
if (audioGroup) {
const groupId = (audioGroup.filter(group => group.default)[0] || audioGroup[0]).id;
mediaTypes.AUDIO.tracks[groupId].enabled = true;
mediaTypes.AUDIO.onGroupChanged();
mediaTypes.AUDIO.onTrackChanged();
const activeAudioGroup = mediaTypes.AUDIO.getActiveGroup();
// a similar check for handling setAudio on each loader is run again each time the
// track is changed, but needs to be handled here since the track may not be considered
// changed on the first call to onTrackChanged
if (!activeAudioGroup.playlistLoader) {
// either audio is muxed with video or the stream is audio only
mainSegmentLoader.setAudio(true);
} else {
// audio is demuxed
mainSegmentLoader.setAudio(false);
audioSegmentLoader.setAudio(true);
}
}
masterPlaylistLoader.on('mediachange', () => {
['AUDIO', 'SUBTITLES'].forEach(type => mediaTypes[type].onGroupChanged());
});
masterPlaylistLoader.on('mediachanging', () => {
['AUDIO', 'SUBTITLES'].forEach(type => mediaTypes[type].onGroupChanging());
});
// custom audio track change event handler for usage event
const onAudioTrackChanged = () => {
mediaTypes.AUDIO.onTrackChanged();
tech.trigger({ type: 'usage', name: 'vhs-audio-change' });
tech.trigger({ type: 'usage', name: 'hls-audio-change' });
};
tech.audioTracks().addEventListener('change', onAudioTrackChanged);
tech.remoteTextTracks().addEventListener(
'change',
mediaTypes.SUBTITLES.onTrackChanged
);
vhs.on('dispose', () => {
tech.audioTracks().removeEventListener('change', onAudioTrackChanged);
tech.remoteTextTracks().removeEventListener(
'change',
mediaTypes.SUBTITLES.onTrackChanged
);
});
// clear existing audio tracks and add the ones we just created
tech.clearTracks('audio');
for (const id in mediaTypes.AUDIO.tracks) {
tech.audioTracks().addTrack(mediaTypes.AUDIO.tracks[id]);
}
};
/**
* Creates skeleton object used to store the loaders, tracks, and utility methods for each
* media type
*
* @return {Object}
* Object to store the loaders, tracks, and utility methods for each media type
* @function createMediaTypes
*/
export const createMediaTypes = () => {
const mediaTypes = {};
['AUDIO', 'SUBTITLES', 'CLOSED-CAPTIONS'].forEach((type) => {
mediaTypes[type] = {
groups: {},
tracks: {},
activePlaylistLoader: null,
activeGroup: noop,
activeTrack: noop,
getActiveGroup: noop,
onGroupChanged: noop,
onTrackChanged: noop,
lastTrack_: null,
logger_: logger(`MediaGroups[${type}]`)
};
});
return mediaTypes;
};

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,641 @@
/**
* @file playback-watcher.js
*
* Playback starts, and now my watch begins. It shall not end until my death. I shall
* take no wait, hold no uncleared timeouts, father no bad seeks. I shall wear no crowns
* and win no glory. I shall live and die at my post. I am the corrector of the underflow.
* I am the watcher of gaps. I am the shield that guards the realms of seekable. I pledge
* my life and honor to the Playback Watch, for this Player and all the Players to come.
*/
import window from 'global/window';
import * as Ranges from './ranges';
import logger from './util/logger';
// Set of events that reset the playback-watcher time check logic and clear the timeout
const timerCancelEvents = [
'seeking',
'seeked',
'pause',
'playing',
'error'
];
/**
* Returns whether or not the current time should be considered close to buffered content,
* taking into consideration whether there's enough buffered content for proper playback.
*
* @param {Object} options
* Options object
* @param {TimeRange} options.buffered
* Current buffer
* @param {number} options.targetDuration
* The active playlist's target duration
* @param {number} options.currentTime
* The current time of the player
* @return {boolean}
* Whether the current time should be considered close to the buffer
*/
export const closeToBufferedContent = ({ buffered, targetDuration, currentTime }) => {
if (!buffered.length) {
return false;
}
// At least two to three segments worth of content should be buffered before there's a
// full enough buffer to consider taking any actions.
if (buffered.end(0) - buffered.start(0) < targetDuration * 2) {
return false;
}
// It's possible that, on seek, a remove hasn't completed and the buffered range is
// somewhere past the current time. In that event, don't consider the buffered content
// close.
if (currentTime > buffered.start(0)) {
return false;
}
// Since target duration generally represents the max (or close to max) duration of a
// segment, if the buffer is within a segment of the current time, the gap probably
// won't be closed, and current time should be considered close to buffered content.
return buffered.start(0) - currentTime < targetDuration;
};
/**
* @class PlaybackWatcher
*/
export default class PlaybackWatcher {
/**
* Represents an PlaybackWatcher object.
*
* @class
* @param {Object} options an object that includes the tech and settings
*/
constructor(options) {
this.masterPlaylistController_ = options.masterPlaylistController;
this.tech_ = options.tech;
this.seekable = options.seekable;
this.allowSeeksWithinUnsafeLiveWindow = options.allowSeeksWithinUnsafeLiveWindow;
this.liveRangeSafeTimeDelta = options.liveRangeSafeTimeDelta;
this.media = options.media;
this.consecutiveUpdates = 0;
this.lastRecordedTime = null;
this.timer_ = null;
this.checkCurrentTimeTimeout_ = null;
this.logger_ = logger('PlaybackWatcher');
this.logger_('initialize');
const playHandler = () => this.monitorCurrentTime_();
const canPlayHandler = () => this.monitorCurrentTime_();
const waitingHandler = () => this.techWaiting_();
const cancelTimerHandler = () => this.cancelTimer_();
const fixesBadSeeksHandler = () => this.fixesBadSeeks_();
const mpc = this.masterPlaylistController_;
const loaderTypes = ['main', 'subtitle', 'audio'];
const loaderChecks = {};
loaderTypes.forEach((type) => {
loaderChecks[type] = {
reset: () => this.resetSegmentDownloads_(type),
updateend: () => this.checkSegmentDownloads_(type)
};
mpc[`${type}SegmentLoader_`].on('appendsdone', loaderChecks[type].updateend);
// If a rendition switch happens during a playback stall where the buffer
// isn't changing we want to reset. We cannot assume that the new rendition
// will also be stalled, until after new appends.
mpc[`${type}SegmentLoader_`].on('playlistupdate', loaderChecks[type].reset);
// Playback stalls should not be detected right after seeking.
// This prevents one segment playlists (single vtt or single segment content)
// from being detected as stalling. As the buffer will not change in those cases, since
// the buffer is the entire video duration.
this.tech_.on(['seeked', 'seeking'], loaderChecks[type].reset);
});
this.tech_.on('seekablechanged', fixesBadSeeksHandler);
this.tech_.on('waiting', waitingHandler);
this.tech_.on(timerCancelEvents, cancelTimerHandler);
this.tech_.on('canplay', canPlayHandler);
/*
An edge case exists that results in gaps not being skipped when they exist at the beginning of a stream. This case
is surfaced in one of two ways:
1) The `waiting` event is fired before the player has buffered content, making it impossible
to find or skip the gap. The `waiting` event is followed by a `play` event. On first play
we can check if playback is stalled due to a gap, and skip the gap if necessary.
2) A source with a gap at the beginning of the stream is loaded programatically while the player
is in a playing state. To catch this case, it's important that our one-time play listener is setup
even if the player is in a playing state
*/
this.tech_.one('play', playHandler);
// Define the dispose function to clean up our events
this.dispose = () => {
this.logger_('dispose');
this.tech_.off('seekablechanged', fixesBadSeeksHandler);
this.tech_.off('waiting', waitingHandler);
this.tech_.off(timerCancelEvents, cancelTimerHandler);
this.tech_.off('canplay', canPlayHandler);
this.tech_.off('play', playHandler);
loaderTypes.forEach((type) => {
mpc[`${type}SegmentLoader_`].off('appendsdone', loaderChecks[type].updateend);
mpc[`${type}SegmentLoader_`].off('playlistupdate', loaderChecks[type].reset);
this.tech_.off(['seeked', 'seeking'], loaderChecks[type].reset);
});
if (this.checkCurrentTimeTimeout_) {
window.clearTimeout(this.checkCurrentTimeTimeout_);
}
this.cancelTimer_();
};
}
/**
* Periodically check current time to see if playback stopped
*
* @private
*/
monitorCurrentTime_() {
this.checkCurrentTime_();
if (this.checkCurrentTimeTimeout_) {
window.clearTimeout(this.checkCurrentTimeTimeout_);
}
// 42 = 24 fps // 250 is what Webkit uses // FF uses 15
this.checkCurrentTimeTimeout_ =
window.setTimeout(this.monitorCurrentTime_.bind(this), 250);
}
/**
* Reset stalled download stats for a specific type of loader
*
* @param {string} type
* The segment loader type to check.
*
* @listens SegmentLoader#playlistupdate
* @listens Tech#seeking
* @listens Tech#seeked
*/
resetSegmentDownloads_(type) {
const loader = this.masterPlaylistController_[`${type}SegmentLoader_`];
if (this[`${type}StalledDownloads_`] > 0) {
this.logger_(`resetting possible stalled download count for ${type} loader`);
}
this[`${type}StalledDownloads_`] = 0;
this[`${type}Buffered_`] = loader.buffered_();
}
/**
* Checks on every segment `appendsdone` to see
* if segment appends are making progress. If they are not
* and we are still downloading bytes. We blacklist the playlist.
*
* @param {string} type
* The segment loader type to check.
*
* @listens SegmentLoader#appendsdone
*/
checkSegmentDownloads_(type) {
const mpc = this.masterPlaylistController_;
const loader = mpc[`${type}SegmentLoader_`];
const buffered = loader.buffered_();
const isBufferedDifferent = Ranges.isRangeDifferent(this[`${type}Buffered_`], buffered);
this[`${type}Buffered_`] = buffered;
// if another watcher is going to fix the issue or
// the buffered value for this loader changed
// appends are working
if (isBufferedDifferent) {
this.resetSegmentDownloads_(type);
return;
}
this[`${type}StalledDownloads_`]++;
this.logger_(`found #${this[`${type}StalledDownloads_`]} ${type} appends that did not increase buffer (possible stalled download)`, {
playlistId: loader.playlist_ && loader.playlist_.id,
buffered: Ranges.timeRangesToArray(buffered)
});
// after 10 possibly stalled appends with no reset, exclude
if (this[`${type}StalledDownloads_`] < 10) {
return;
}
this.logger_(`${type} loader stalled download exclusion`);
this.resetSegmentDownloads_(type);
this.tech_.trigger({type: 'usage', name: `vhs-${type}-download-exclusion`});
if (type === 'subtitle') {
return;
}
// TODO: should we exclude audio tracks rather than main tracks
// when type is audio?
mpc.blacklistCurrentPlaylist({
message: `Excessive ${type} segment downloading detected.`
}, Infinity);
}
/**
* The purpose of this function is to emulate the "waiting" event on
* browsers that do not emit it when they are waiting for more
* data to continue playback
*
* @private
*/
checkCurrentTime_() {
if (this.tech_.seeking() && this.fixesBadSeeks_()) {
this.consecutiveUpdates = 0;
this.lastRecordedTime = this.tech_.currentTime();
return;
}
if (this.tech_.paused() || this.tech_.seeking()) {
return;
}
const currentTime = this.tech_.currentTime();
const buffered = this.tech_.buffered();
if (this.lastRecordedTime === currentTime &&
(!buffered.length ||
currentTime + Ranges.SAFE_TIME_DELTA >= buffered.end(buffered.length - 1))) {
// If current time is at the end of the final buffered region, then any playback
// stall is most likely caused by buffering in a low bandwidth environment. The tech
// should fire a `waiting` event in this scenario, but due to browser and tech
// inconsistencies. Calling `techWaiting_` here allows us to simulate
// responding to a native `waiting` event when the tech fails to emit one.
return this.techWaiting_();
}
if (this.consecutiveUpdates >= 5 &&
currentTime === this.lastRecordedTime) {
this.consecutiveUpdates++;
this.waiting_();
} else if (currentTime === this.lastRecordedTime) {
this.consecutiveUpdates++;
} else {
this.consecutiveUpdates = 0;
this.lastRecordedTime = currentTime;
}
}
/**
* Cancels any pending timers and resets the 'timeupdate' mechanism
* designed to detect that we are stalled
*
* @private
*/
cancelTimer_() {
this.consecutiveUpdates = 0;
if (this.timer_) {
this.logger_('cancelTimer_');
clearTimeout(this.timer_);
}
this.timer_ = null;
}
/**
* Fixes situations where there's a bad seek
*
* @return {boolean} whether an action was taken to fix the seek
* @private
*/
fixesBadSeeks_() {
const seeking = this.tech_.seeking();
if (!seeking) {
return false;
}
const seekable = this.seekable();
const currentTime = this.tech_.currentTime();
const isAfterSeekableRange = this.afterSeekableWindow_(
seekable,
currentTime,
this.media(),
this.allowSeeksWithinUnsafeLiveWindow
);
let seekTo;
if (isAfterSeekableRange) {
const seekableEnd = seekable.end(seekable.length - 1);
// sync to live point (if VOD, our seekable was updated and we're simply adjusting)
seekTo = seekableEnd;
}
if (this.beforeSeekableWindow_(seekable, currentTime)) {
const seekableStart = seekable.start(0);
// sync to the beginning of the live window
// provide a buffer of .1 seconds to handle rounding/imprecise numbers
seekTo = seekableStart +
// if the playlist is too short and the seekable range is an exact time (can
// happen in live with a 3 segment playlist), then don't use a time delta
(seekableStart === seekable.end(0) ? 0 : Ranges.SAFE_TIME_DELTA);
}
if (typeof seekTo !== 'undefined') {
this.logger_(`Trying to seek outside of seekable at time ${currentTime} with ` +
`seekable range ${Ranges.printableRange(seekable)}. Seeking to ` +
`${seekTo}.`);
this.tech_.setCurrentTime(seekTo);
return true;
}
const buffered = this.tech_.buffered();
if (
closeToBufferedContent({
buffered,
targetDuration: this.media().targetDuration,
currentTime
})
) {
seekTo = buffered.start(0) + Ranges.SAFE_TIME_DELTA;
this.logger_(`Buffered region starts (${buffered.start(0)}) ` +
` just beyond seek point (${currentTime}). Seeking to ${seekTo}.`);
this.tech_.setCurrentTime(seekTo);
return true;
}
return false;
}
/**
* Handler for situations when we determine the player is waiting.
*
* @private
*/
waiting_() {
if (this.techWaiting_()) {
return;
}
// All tech waiting checks failed. Use last resort correction
const currentTime = this.tech_.currentTime();
const buffered = this.tech_.buffered();
const currentRange = Ranges.findRange(buffered, currentTime);
// Sometimes the player can stall for unknown reasons within a contiguous buffered
// region with no indication that anything is amiss (seen in Firefox). Seeking to
// currentTime is usually enough to kickstart the player. This checks that the player
// is currently within a buffered region before attempting a corrective seek.
// Chrome does not appear to continue `timeupdate` events after a `waiting` event
// until there is ~ 3 seconds of forward buffer available. PlaybackWatcher should also
// make sure there is ~3 seconds of forward buffer before taking any corrective action
// to avoid triggering an `unknownwaiting` event when the network is slow.
if (currentRange.length && currentTime + 3 <= currentRange.end(0)) {
this.cancelTimer_();
this.tech_.setCurrentTime(currentTime);
this.logger_(`Stopped at ${currentTime} while inside a buffered region ` +
`[${currentRange.start(0)} -> ${currentRange.end(0)}]. Attempting to resume ` +
'playback by seeking to the current time.');
// unknown waiting corrections may be useful for monitoring QoS
this.tech_.trigger({type: 'usage', name: 'vhs-unknown-waiting'});
this.tech_.trigger({type: 'usage', name: 'hls-unknown-waiting'});
return;
}
}
/**
* Handler for situations when the tech fires a `waiting` event
*
* @return {boolean}
* True if an action (or none) was needed to correct the waiting. False if no
* checks passed
* @private
*/
techWaiting_() {
const seekable = this.seekable();
const currentTime = this.tech_.currentTime();
if (this.tech_.seeking() && this.fixesBadSeeks_()) {
// Tech is seeking or bad seek fixed, no action needed
return true;
}
if (this.tech_.seeking() || this.timer_ !== null) {
// Tech is seeking or already waiting on another action, no action needed
return true;
}
if (this.beforeSeekableWindow_(seekable, currentTime)) {
const livePoint = seekable.end(seekable.length - 1);
this.logger_(`Fell out of live window at time ${currentTime}. Seeking to ` +
`live point (seekable end) ${livePoint}`);
this.cancelTimer_();
this.tech_.setCurrentTime(livePoint);
// live window resyncs may be useful for monitoring QoS
this.tech_.trigger({type: 'usage', name: 'vhs-live-resync'});
this.tech_.trigger({type: 'usage', name: 'hls-live-resync'});
return true;
}
const sourceUpdater = this.tech_.vhs.masterPlaylistController_.sourceUpdater_;
const buffered = this.tech_.buffered();
const videoUnderflow = this.videoUnderflow_({
audioBuffered: sourceUpdater.audioBuffered(),
videoBuffered: sourceUpdater.videoBuffered(),
currentTime
});
if (videoUnderflow) {
// Even though the video underflowed and was stuck in a gap, the audio overplayed
// the gap, leading currentTime into a buffered range. Seeking to currentTime
// allows the video to catch up to the audio position without losing any audio
// (only suffering ~3 seconds of frozen video and a pause in audio playback).
this.cancelTimer_();
this.tech_.setCurrentTime(currentTime);
// video underflow may be useful for monitoring QoS
this.tech_.trigger({type: 'usage', name: 'vhs-video-underflow'});
this.tech_.trigger({type: 'usage', name: 'hls-video-underflow'});
return true;
}
const nextRange = Ranges.findNextRange(buffered, currentTime);
// check for gap
if (nextRange.length > 0) {
const difference = nextRange.start(0) - currentTime;
this.logger_(`Stopped at ${currentTime}, setting timer for ${difference}, seeking ` +
`to ${nextRange.start(0)}`);
this.cancelTimer_();
this.timer_ = setTimeout(
this.skipTheGap_.bind(this),
difference * 1000,
currentTime
);
return true;
}
// All checks failed. Returning false to indicate failure to correct waiting
return false;
}
afterSeekableWindow_(seekable, currentTime, playlist, allowSeeksWithinUnsafeLiveWindow = false) {
if (!seekable.length) {
// we can't make a solid case if there's no seekable, default to false
return false;
}
let allowedEnd = seekable.end(seekable.length - 1) + Ranges.SAFE_TIME_DELTA;
const isLive = !playlist.endList;
if (isLive && allowSeeksWithinUnsafeLiveWindow) {
allowedEnd = seekable.end(seekable.length - 1) + (playlist.targetDuration * 3);
}
if (currentTime > allowedEnd) {
return true;
}
return false;
}
beforeSeekableWindow_(seekable, currentTime) {
if (seekable.length &&
// can't fall before 0 and 0 seekable start identifies VOD stream
seekable.start(0) > 0 &&
currentTime < seekable.start(0) - this.liveRangeSafeTimeDelta) {
return true;
}
return false;
}
videoUnderflow_({videoBuffered, audioBuffered, currentTime}) {
// audio only content will not have video underflow :)
if (!videoBuffered) {
return;
}
let gap;
// find a gap in demuxed content.
if (videoBuffered.length && audioBuffered.length) {
// in Chrome audio will continue to play for ~3s when we run out of video
// so we have to check that the video buffer did have some buffer in the
// past.
const lastVideoRange = Ranges.findRange(videoBuffered, currentTime - 3);
const videoRange = Ranges.findRange(videoBuffered, currentTime);
const audioRange = Ranges.findRange(audioBuffered, currentTime);
if (audioRange.length && !videoRange.length && lastVideoRange.length) {
gap = {start: lastVideoRange.end(0), end: audioRange.end(0)};
}
// find a gap in muxed content.
} else {
const nextRange = Ranges.findNextRange(videoBuffered, currentTime);
// Even if there is no available next range, there is still a possibility we are
// stuck in a gap due to video underflow.
if (!nextRange.length) {
gap = this.gapFromVideoUnderflow_(videoBuffered, currentTime);
}
}
if (gap) {
this.logger_(`Encountered a gap in video from ${gap.start} to ${gap.end}. ` +
`Seeking to current time ${currentTime}`);
return true;
}
return false;
}
/**
* Timer callback. If playback still has not proceeded, then we seek
* to the start of the next buffered region.
*
* @private
*/
skipTheGap_(scheduledCurrentTime) {
const buffered = this.tech_.buffered();
const currentTime = this.tech_.currentTime();
const nextRange = Ranges.findNextRange(buffered, currentTime);
this.cancelTimer_();
if (nextRange.length === 0 ||
currentTime !== scheduledCurrentTime) {
return;
}
this.logger_(
'skipTheGap_:',
'currentTime:', currentTime,
'scheduled currentTime:', scheduledCurrentTime,
'nextRange start:', nextRange.start(0)
);
// only seek if we still have not played
this.tech_.setCurrentTime(nextRange.start(0) + Ranges.TIME_FUDGE_FACTOR);
this.tech_.trigger({type: 'usage', name: 'vhs-gap-skip'});
this.tech_.trigger({type: 'usage', name: 'hls-gap-skip'});
}
gapFromVideoUnderflow_(buffered, currentTime) {
// At least in Chrome, if there is a gap in the video buffer, the audio will continue
// playing for ~3 seconds after the video gap starts. This is done to account for
// video buffer underflow/underrun (note that this is not done when there is audio
// buffer underflow/underrun -- in that case the video will stop as soon as it
// encounters the gap, as audio stalls are more noticeable/jarring to a user than
// video stalls). The player's time will reflect the playthrough of audio, so the
// time will appear as if we are in a buffered region, even if we are stuck in a
// "gap."
//
// Example:
// video buffer: 0 => 10.1, 10.2 => 20
// audio buffer: 0 => 20
// overall buffer: 0 => 10.1, 10.2 => 20
// current time: 13
//
// Chrome's video froze at 10 seconds, where the video buffer encountered the gap,
// however, the audio continued playing until it reached ~3 seconds past the gap
// (13 seconds), at which point it stops as well. Since current time is past the
// gap, findNextRange will return no ranges.
//
// To check for this issue, we see if there is a gap that starts somewhere within
// a 3 second range (3 seconds +/- 1 second) back from our current time.
const gaps = Ranges.findGaps(buffered);
for (let i = 0; i < gaps.length; i++) {
const start = gaps.start(i);
const end = gaps.end(i);
// gap is starts no more than 4 seconds back
if (currentTime - start < 4 && currentTime - start > 2) {
return {
start,
end
};
}
}
return null;
}
}

View file

@ -0,0 +1,862 @@
/**
* @file playlist-loader.js
*
* A state machine that manages the loading, caching, and updating of
* M3U8 playlists.
*
*/
import { resolveUrl, resolveManifestRedirect } from './resolve-url';
import videojs from 'video.js';
import window from 'global/window';
import logger from './util/logger';
import {
parseManifest,
addPropertiesToMaster,
masterForMedia,
setupMediaPlaylist,
forEachMediaGroup
} from './manifest';
import {getKnownPartCount} from './playlist.js';
const { mergeOptions, EventTarget } = videojs;
const addLLHLSQueryDirectives = (uri, media) => {
if (media.endList) {
return uri;
}
const query = [];
if (media.serverControl && media.serverControl.canBlockReload) {
const {preloadSegment} = media;
// next msn is a zero based value, length is not.
let nextMSN = media.mediaSequence + media.segments.length;
// If preload segment has parts then it is likely
// that we are going to request a part of that preload segment.
// the logic below is used to determine that.
if (preloadSegment) {
const parts = preloadSegment.parts || [];
// _HLS_part is a zero based index
const nextPart = getKnownPartCount(media) - 1;
// if nextPart is > -1 and not equal to just the
// length of parts, then we know we had part preload hints
// and we need to add the _HLS_part= query
if (nextPart > -1 && nextPart !== (parts.length - 1)) {
// add existing parts to our preload hints
query.push(`_HLS_part=${nextPart}`);
}
// this if statement makes sure that we request the msn
// of the preload segment if:
// 1. the preload segment had parts (and was not yet a full segment)
// but was added to our segments array
// 2. the preload segment had preload hints for parts that are not in
// the manifest yet.
// in all other cases we want the segment after the preload segment
// which will be given by using media.segments.length because it is 1 based
// rather than 0 based.
if (nextPart > -1 || parts.length) {
nextMSN--;
}
}
// add _HLS_msn= in front of any _HLS_part query
query.unshift(`_HLS_msn=${nextMSN}`);
}
if (media.serverControl && media.serverControl.canSkipUntil) {
// add _HLS_skip= infront of all other queries.
query.unshift('_HLS_skip=' + (media.serverControl.canSkipDateranges ? 'v2' : 'YES'));
}
query.forEach(function(str, i) {
const symbol = i === 0 ? '?' : '&';
uri += `${symbol}${str}`;
});
return uri;
};
/**
* Returns a new segment object with properties and
* the parts array merged.
*
* @param {Object} a the old segment
* @param {Object} b the new segment
*
* @return {Object} the merged segment
*/
export const updateSegment = (a, b) => {
if (!a) {
return b;
}
const result = mergeOptions(a, b);
// if only the old segment has preload hints
// and the new one does not, remove preload hints.
if (a.preloadHints && !b.preloadHints) {
delete result.preloadHints;
}
// if only the old segment has parts
// then the parts are no longer valid
if (a.parts && !b.parts) {
delete result.parts;
// if both segments have parts
// copy part propeties from the old segment
// to the new one.
} else if (a.parts && b.parts) {
for (let i = 0; i < b.parts.length; i++) {
if (a.parts && a.parts[i]) {
result.parts[i] = mergeOptions(a.parts[i], b.parts[i]);
}
}
}
// set skipped to false for segments that have
// have had information merged from the old segment.
if (!a.skipped && b.skipped) {
result.skipped = false;
}
// set preload to false for segments that have
// had information added in the new segment.
if (a.preload && !b.preload) {
result.preload = false;
}
return result;
};
/**
* Returns a new array of segments that is the result of merging
* properties from an older list of segments onto an updated
* list. No properties on the updated playlist will be ovewritten.
*
* @param {Array} original the outdated list of segments
* @param {Array} update the updated list of segments
* @param {number=} offset the index of the first update
* segment in the original segment list. For non-live playlists,
* this should always be zero and does not need to be
* specified. For live playlists, it should be the difference
* between the media sequence numbers in the original and updated
* playlists.
* @return {Array} a list of merged segment objects
*/
export const updateSegments = (original, update, offset) => {
const oldSegments = original.slice();
const newSegments = update.slice();
offset = offset || 0;
const result = [];
let currentMap;
for (let newIndex = 0; newIndex < newSegments.length; newIndex++) {
const oldSegment = oldSegments[newIndex + offset];
const newSegment = newSegments[newIndex];
if (oldSegment) {
currentMap = oldSegment.map || currentMap;
result.push(updateSegment(oldSegment, newSegment));
} else {
// carry over map to new segment if it is missing
if (currentMap && !newSegment.map) {
newSegment.map = currentMap;
}
result.push(newSegment);
}
}
return result;
};
export const resolveSegmentUris = (segment, baseUri) => {
// preloadSegment will not have a uri at all
// as the segment isn't actually in the manifest yet, only parts
if (!segment.resolvedUri && segment.uri) {
segment.resolvedUri = resolveUrl(baseUri, segment.uri);
}
if (segment.key && !segment.key.resolvedUri) {
segment.key.resolvedUri = resolveUrl(baseUri, segment.key.uri);
}
if (segment.map && !segment.map.resolvedUri) {
segment.map.resolvedUri = resolveUrl(baseUri, segment.map.uri);
}
if (segment.map && segment.map.key && !segment.map.key.resolvedUri) {
segment.map.key.resolvedUri = resolveUrl(baseUri, segment.map.key.uri);
}
if (segment.parts && segment.parts.length) {
segment.parts.forEach((p) => {
if (p.resolvedUri) {
return;
}
p.resolvedUri = resolveUrl(baseUri, p.uri);
});
}
if (segment.preloadHints && segment.preloadHints.length) {
segment.preloadHints.forEach((p) => {
if (p.resolvedUri) {
return;
}
p.resolvedUri = resolveUrl(baseUri, p.uri);
});
}
};
const getAllSegments = function(media) {
const segments = media.segments || [];
const preloadSegment = media.preloadSegment;
// a preloadSegment with only preloadHints is not currently
// a usable segment, only include a preloadSegment that has
// parts.
if (preloadSegment && preloadSegment.parts && preloadSegment.parts.length) {
// if preloadHints has a MAP that means that the
// init segment is going to change. We cannot use any of the parts
// from this preload segment.
if (preloadSegment.preloadHints) {
for (let i = 0; i < preloadSegment.preloadHints.length; i++) {
if (preloadSegment.preloadHints[i].type === 'MAP') {
return segments;
}
}
}
// set the duration for our preload segment to target duration.
preloadSegment.duration = media.targetDuration;
preloadSegment.preload = true;
segments.push(preloadSegment);
}
return segments;
};
// consider the playlist unchanged if the playlist object is the same or
// the number of segments is equal, the media sequence number is unchanged,
// and this playlist hasn't become the end of the playlist
export const isPlaylistUnchanged = (a, b) => a === b ||
(a.segments && b.segments && a.segments.length === b.segments.length &&
a.endList === b.endList &&
a.mediaSequence === b.mediaSequence);
/**
* Returns a new master playlist that is the result of merging an
* updated media playlist into the original version. If the
* updated media playlist does not match any of the playlist
* entries in the original master playlist, null is returned.
*
* @param {Object} master a parsed master M3U8 object
* @param {Object} media a parsed media M3U8 object
* @return {Object} a new object that represents the original
* master playlist with the updated media playlist merged in, or
* null if the merge produced no change.
*/
export const updateMaster = (master, newMedia, unchangedCheck = isPlaylistUnchanged) => {
const result = mergeOptions(master, {});
const oldMedia = result.playlists[newMedia.id];
if (!oldMedia) {
return null;
}
if (unchangedCheck(oldMedia, newMedia)) {
return null;
}
newMedia.segments = getAllSegments(newMedia);
const mergedPlaylist = mergeOptions(oldMedia, newMedia);
// always use the new media's preload segment
if (mergedPlaylist.preloadSegment && !newMedia.preloadSegment) {
delete mergedPlaylist.preloadSegment;
}
// if the update could overlap existing segment information, merge the two segment lists
if (oldMedia.segments) {
if (newMedia.skip) {
newMedia.segments = newMedia.segments || [];
// add back in objects for skipped segments, so that we merge
// old properties into the new segments
for (let i = 0; i < newMedia.skip.skippedSegments; i++) {
newMedia.segments.unshift({skipped: true});
}
}
mergedPlaylist.segments = updateSegments(
oldMedia.segments,
newMedia.segments,
newMedia.mediaSequence - oldMedia.mediaSequence
);
}
// resolve any segment URIs to prevent us from having to do it later
mergedPlaylist.segments.forEach((segment) => {
resolveSegmentUris(segment, mergedPlaylist.resolvedUri);
});
// TODO Right now in the playlists array there are two references to each playlist, one
// that is referenced by index, and one by URI. The index reference may no longer be
// necessary.
for (let i = 0; i < result.playlists.length; i++) {
if (result.playlists[i].id === newMedia.id) {
result.playlists[i] = mergedPlaylist;
}
}
result.playlists[newMedia.id] = mergedPlaylist;
// URI reference added for backwards compatibility
result.playlists[newMedia.uri] = mergedPlaylist;
// update media group playlist references.
forEachMediaGroup(master, (properties, mediaType, groupKey, labelKey) => {
if (!properties.playlists) {
return;
}
for (let i = 0; i < properties.playlists.length; i++) {
if (newMedia.id === properties.playlists[i].id) {
properties.playlists[i] = newMedia;
}
}
});
return result;
};
/**
* Calculates the time to wait before refreshing a live playlist
*
* @param {Object} media
* The current media
* @param {boolean} update
* True if there were any updates from the last refresh, false otherwise
* @return {number}
* The time in ms to wait before refreshing the live playlist
*/
export const refreshDelay = (media, update) => {
const lastSegment = media.segments[media.segments.length - 1];
const lastPart = lastSegment && lastSegment.parts && lastSegment.parts[lastSegment.parts.length - 1];
const lastDuration = lastPart && lastPart.duration || lastSegment && lastSegment.duration;
if (update && lastDuration) {
return lastDuration * 1000;
}
// if the playlist is unchanged since the last reload or last segment duration
// cannot be determined, try again after half the target duration
return (media.partTargetDuration || media.targetDuration || 10) * 500;
};
/**
* Load a playlist from a remote location
*
* @class PlaylistLoader
* @extends Stream
* @param {string|Object} src url or object of manifest
* @param {boolean} withCredentials the withCredentials xhr option
* @class
*/
export default class PlaylistLoader extends EventTarget {
constructor(src, vhs, options = { }) {
super();
if (!src) {
throw new Error('A non-empty playlist URL or object is required');
}
this.logger_ = logger('PlaylistLoader');
const { withCredentials = false, handleManifestRedirects = false } = options;
this.src = src;
this.vhs_ = vhs;
this.withCredentials = withCredentials;
this.handleManifestRedirects = handleManifestRedirects;
const vhsOptions = vhs.options_;
this.customTagParsers = (vhsOptions && vhsOptions.customTagParsers) || [];
this.customTagMappers = (vhsOptions && vhsOptions.customTagMappers) || [];
this.experimentalLLHLS = (vhsOptions && vhsOptions.experimentalLLHLS) || false;
// initialize the loader state
this.state = 'HAVE_NOTHING';
// live playlist staleness timeout
this.handleMediaupdatetimeout_ = this.handleMediaupdatetimeout_.bind(this);
this.on('mediaupdatetimeout', this.handleMediaupdatetimeout_);
}
handleMediaupdatetimeout_() {
if (this.state !== 'HAVE_METADATA') {
// only refresh the media playlist if no other activity is going on
return;
}
const media = this.media();
let uri = resolveUrl(this.master.uri, media.uri);
if (this.experimentalLLHLS) {
uri = addLLHLSQueryDirectives(uri, media);
}
this.state = 'HAVE_CURRENT_METADATA';
this.request = this.vhs_.xhr({
uri,
withCredentials: this.withCredentials
}, (error, req) => {
// disposed
if (!this.request) {
return;
}
if (error) {
return this.playlistRequestError(this.request, this.media(), 'HAVE_METADATA');
}
this.haveMetadata({
playlistString: this.request.responseText,
url: this.media().uri,
id: this.media().id
});
});
}
playlistRequestError(xhr, playlist, startingState) {
const {
uri,
id
} = playlist;
// any in-flight request is now finished
this.request = null;
if (startingState) {
this.state = startingState;
}
this.error = {
playlist: this.master.playlists[id],
status: xhr.status,
message: `HLS playlist request error at URL: ${uri}.`,
responseText: xhr.responseText,
code: (xhr.status >= 500) ? 4 : 2
};
this.trigger('error');
}
parseManifest_({url, manifestString}) {
return parseManifest({
onwarn: ({message}) => this.logger_(`m3u8-parser warn for ${url}: ${message}`),
oninfo: ({message}) => this.logger_(`m3u8-parser info for ${url}: ${message}`),
manifestString,
customTagParsers: this.customTagParsers,
customTagMappers: this.customTagMappers,
experimentalLLHLS: this.experimentalLLHLS
});
}
/**
* Update the playlist loader's state in response to a new or updated playlist.
*
* @param {string} [playlistString]
* Playlist string (if playlistObject is not provided)
* @param {Object} [playlistObject]
* Playlist object (if playlistString is not provided)
* @param {string} url
* URL of playlist
* @param {string} id
* ID to use for playlist
*/
haveMetadata({ playlistString, playlistObject, url, id }) {
// any in-flight request is now finished
this.request = null;
this.state = 'HAVE_METADATA';
const playlist = playlistObject || this.parseManifest_({
url,
manifestString: playlistString
});
playlist.lastRequest = Date.now();
setupMediaPlaylist({
playlist,
uri: url,
id
});
// merge this playlist into the master
const update = updateMaster(this.master, playlist);
this.targetDuration = playlist.partTargetDuration || playlist.targetDuration;
if (update) {
this.master = update;
this.media_ = this.master.playlists[id];
} else {
this.trigger('playlistunchanged');
}
// refresh live playlists after a target duration passes
if (!this.media().endList) {
window.clearTimeout(this.mediaUpdateTimeout);
this.mediaUpdateTimeout = window.setTimeout(() => {
this.trigger('mediaupdatetimeout');
}, refreshDelay(this.media(), !!update));
}
this.trigger('loadedplaylist');
}
/**
* Abort any outstanding work and clean up.
*/
dispose() {
this.trigger('dispose');
this.stopRequest();
window.clearTimeout(this.mediaUpdateTimeout);
window.clearTimeout(this.finalRenditionTimeout);
this.off();
}
stopRequest() {
if (this.request) {
const oldRequest = this.request;
this.request = null;
oldRequest.onreadystatechange = null;
oldRequest.abort();
}
}
/**
* When called without any arguments, returns the currently
* active media playlist. When called with a single argument,
* triggers the playlist loader to asynchronously switch to the
* specified media playlist. Calling this method while the
* loader is in the HAVE_NOTHING causes an error to be emitted
* but otherwise has no effect.
*
* @param {Object=} playlist the parsed media playlist
* object to switch to
* @param {boolean=} shouldDelay whether we should delay the request by half target duration
*
* @return {Playlist} the current loaded media
*/
media(playlist, shouldDelay) {
// getter
if (!playlist) {
return this.media_;
}
// setter
if (this.state === 'HAVE_NOTHING') {
throw new Error('Cannot switch media playlist from ' + this.state);
}
// find the playlist object if the target playlist has been
// specified by URI
if (typeof playlist === 'string') {
if (!this.master.playlists[playlist]) {
throw new Error('Unknown playlist URI: ' + playlist);
}
playlist = this.master.playlists[playlist];
}
window.clearTimeout(this.finalRenditionTimeout);
if (shouldDelay) {
const delay = ((playlist.partTargetDuration || playlist.targetDuration) / 2) * 1000 || 5 * 1000;
this.finalRenditionTimeout =
window.setTimeout(this.media.bind(this, playlist, false), delay);
return;
}
const startingState = this.state;
const mediaChange = !this.media_ || playlist.id !== this.media_.id;
const masterPlaylistRef = this.master.playlists[playlist.id];
// switch to fully loaded playlists immediately
if (masterPlaylistRef && masterPlaylistRef.endList ||
// handle the case of a playlist object (e.g., if using vhs-json with a resolved
// media playlist or, for the case of demuxed audio, a resolved audio media group)
(playlist.endList && playlist.segments.length)) {
// abort outstanding playlist requests
if (this.request) {
this.request.onreadystatechange = null;
this.request.abort();
this.request = null;
}
this.state = 'HAVE_METADATA';
this.media_ = playlist;
// trigger media change if the active media has been updated
if (mediaChange) {
this.trigger('mediachanging');
if (startingState === 'HAVE_MASTER') {
// The initial playlist was a master manifest, and the first media selected was
// also provided (in the form of a resolved playlist object) as part of the
// source object (rather than just a URL). Therefore, since the media playlist
// doesn't need to be requested, loadedmetadata won't trigger as part of the
// normal flow, and needs an explicit trigger here.
this.trigger('loadedmetadata');
} else {
this.trigger('mediachange');
}
}
return;
}
// switching to the active playlist is a no-op
if (!mediaChange) {
return;
}
this.state = 'SWITCHING_MEDIA';
// there is already an outstanding playlist request
if (this.request) {
if (playlist.resolvedUri === this.request.url) {
// requesting to switch to the same playlist multiple times
// has no effect after the first
return;
}
this.request.onreadystatechange = null;
this.request.abort();
this.request = null;
}
// request the new playlist
if (this.media_) {
this.trigger('mediachanging');
}
this.request = this.vhs_.xhr({
uri: playlist.resolvedUri,
withCredentials: this.withCredentials
}, (error, req) => {
// disposed
if (!this.request) {
return;
}
playlist.lastRequest = Date.now();
playlist.resolvedUri = resolveManifestRedirect(this.handleManifestRedirects, playlist.resolvedUri, req);
if (error) {
return this.playlistRequestError(this.request, playlist, startingState);
}
this.haveMetadata({
playlistString: req.responseText,
url: playlist.uri,
id: playlist.id
});
// fire loadedmetadata the first time a media playlist is loaded
if (startingState === 'HAVE_MASTER') {
this.trigger('loadedmetadata');
} else {
this.trigger('mediachange');
}
});
}
/**
* pause loading of the playlist
*/
pause() {
this.stopRequest();
window.clearTimeout(this.mediaUpdateTimeout);
if (this.state === 'HAVE_NOTHING') {
// If we pause the loader before any data has been retrieved, its as if we never
// started, so reset to an unstarted state.
this.started = false;
}
// Need to restore state now that no activity is happening
if (this.state === 'SWITCHING_MEDIA') {
// if the loader was in the process of switching media, it should either return to
// HAVE_MASTER or HAVE_METADATA depending on if the loader has loaded a media
// playlist yet. This is determined by the existence of loader.media_
if (this.media_) {
this.state = 'HAVE_METADATA';
} else {
this.state = 'HAVE_MASTER';
}
} else if (this.state === 'HAVE_CURRENT_METADATA') {
this.state = 'HAVE_METADATA';
}
}
/**
* start loading of the playlist
*/
load(shouldDelay) {
window.clearTimeout(this.mediaUpdateTimeout);
const media = this.media();
if (shouldDelay) {
const delay = media ? ((media.partTargetDuration || media.targetDuration) / 2) * 1000 : 5 * 1000;
this.mediaUpdateTimeout = window.setTimeout(() => this.load(), delay);
return;
}
if (!this.started) {
this.start();
return;
}
if (media && !media.endList) {
this.trigger('mediaupdatetimeout');
} else {
this.trigger('loadedplaylist');
}
}
/**
* start loading of the playlist
*/
start() {
this.started = true;
if (typeof this.src === 'object') {
// in the case of an entirely constructed manifest object (meaning there's no actual
// manifest on a server), default the uri to the page's href
if (!this.src.uri) {
this.src.uri = window.location.href;
}
// resolvedUri is added on internally after the initial request. Since there's no
// request for pre-resolved manifests, add on resolvedUri here.
this.src.resolvedUri = this.src.uri;
// Since a manifest object was passed in as the source (instead of a URL), the first
// request can be skipped (since the top level of the manifest, at a minimum, is
// already available as a parsed manifest object). However, if the manifest object
// represents a master playlist, some media playlists may need to be resolved before
// the starting segment list is available. Therefore, go directly to setup of the
// initial playlist, and let the normal flow continue from there.
//
// Note that the call to setup is asynchronous, as other sections of VHS may assume
// that the first request is asynchronous.
setTimeout(() => {
this.setupInitialPlaylist(this.src);
}, 0);
return;
}
// request the specified URL
this.request = this.vhs_.xhr({
uri: this.src,
withCredentials: this.withCredentials
}, (error, req) => {
// disposed
if (!this.request) {
return;
}
// clear the loader's request reference
this.request = null;
if (error) {
this.error = {
status: req.status,
message: `HLS playlist request error at URL: ${this.src}.`,
responseText: req.responseText,
// MEDIA_ERR_NETWORK
code: 2
};
if (this.state === 'HAVE_NOTHING') {
this.started = false;
}
return this.trigger('error');
}
this.src = resolveManifestRedirect(this.handleManifestRedirects, this.src, req);
const manifest = this.parseManifest_({
manifestString: req.responseText,
url: this.src
});
this.setupInitialPlaylist(manifest);
});
}
srcUri() {
return typeof this.src === 'string' ? this.src : this.src.uri;
}
/**
* Given a manifest object that's either a master or media playlist, trigger the proper
* events and set the state of the playlist loader.
*
* If the manifest object represents a master playlist, `loadedplaylist` will be
* triggered to allow listeners to select a playlist. If none is selected, the loader
* will default to the first one in the playlists array.
*
* If the manifest object represents a media playlist, `loadedplaylist` will be
* triggered followed by `loadedmetadata`, as the only available playlist is loaded.
*
* In the case of a media playlist, a master playlist object wrapper with one playlist
* will be created so that all logic can handle playlists in the same fashion (as an
* assumed manifest object schema).
*
* @param {Object} manifest
* The parsed manifest object
*/
setupInitialPlaylist(manifest) {
this.state = 'HAVE_MASTER';
if (manifest.playlists) {
this.master = manifest;
addPropertiesToMaster(this.master, this.srcUri());
// If the initial master playlist has playlists wtih segments already resolved,
// then resolve URIs in advance, as they are usually done after a playlist request,
// which may not happen if the playlist is resolved.
manifest.playlists.forEach((playlist) => {
playlist.segments = getAllSegments(playlist);
playlist.segments.forEach((segment) => {
resolveSegmentUris(segment, playlist.resolvedUri);
});
});
this.trigger('loadedplaylist');
if (!this.request) {
// no media playlist was specifically selected so start
// from the first listed one
this.media(this.master.playlists[0]);
}
return;
}
// In order to support media playlists passed in as vhs-json, the case where the uri
// is not provided as part of the manifest should be considered, and an appropriate
// default used.
const uri = this.srcUri() || window.location.href;
this.master = masterForMedia(manifest, uri);
this.haveMetadata({
playlistObject: manifest,
url: uri,
id: this.master.playlists[0].id
});
this.trigger('loadedmetadata');
}
}

View file

@ -0,0 +1,531 @@
import window from 'global/window';
import Config from './config';
import Playlist from './playlist';
import { codecsForPlaylist } from './util/codecs.js';
import logger from './util/logger';
const logFn = logger('PlaylistSelector');
const representationToString = function(representation) {
if (!representation || !representation.playlist) {
return;
}
const playlist = representation.playlist;
return JSON.stringify({
id: playlist.id,
bandwidth: representation.bandwidth,
width: representation.width,
height: representation.height,
codecs: playlist.attributes && playlist.attributes.CODECS || ''
});
};
// Utilities
/**
* Returns the CSS value for the specified property on an element
* using `getComputedStyle`. Firefox has a long-standing issue where
* getComputedStyle() may return null when running in an iframe with
* `display: none`.
*
* @see https://bugzilla.mozilla.org/show_bug.cgi?id=548397
* @param {HTMLElement} el the htmlelement to work on
* @param {string} the proprety to get the style for
*/
const safeGetComputedStyle = function(el, property) {
if (!el) {
return '';
}
const result = window.getComputedStyle(el);
if (!result) {
return '';
}
return result[property];
};
/**
* Resuable stable sort function
*
* @param {Playlists} array
* @param {Function} sortFn Different comparators
* @function stableSort
*/
const stableSort = function(array, sortFn) {
const newArray = array.slice();
array.sort(function(left, right) {
const cmp = sortFn(left, right);
if (cmp === 0) {
return newArray.indexOf(left) - newArray.indexOf(right);
}
return cmp;
});
};
/**
* A comparator function to sort two playlist object by bandwidth.
*
* @param {Object} left a media playlist object
* @param {Object} right a media playlist object
* @return {number} Greater than zero if the bandwidth attribute of
* left is greater than the corresponding attribute of right. Less
* than zero if the bandwidth of right is greater than left and
* exactly zero if the two are equal.
*/
export const comparePlaylistBandwidth = function(left, right) {
let leftBandwidth;
let rightBandwidth;
if (left.attributes.BANDWIDTH) {
leftBandwidth = left.attributes.BANDWIDTH;
}
leftBandwidth = leftBandwidth || window.Number.MAX_VALUE;
if (right.attributes.BANDWIDTH) {
rightBandwidth = right.attributes.BANDWIDTH;
}
rightBandwidth = rightBandwidth || window.Number.MAX_VALUE;
return leftBandwidth - rightBandwidth;
};
/**
* A comparator function to sort two playlist object by resolution (width).
*
* @param {Object} left a media playlist object
* @param {Object} right a media playlist object
* @return {number} Greater than zero if the resolution.width attribute of
* left is greater than the corresponding attribute of right. Less
* than zero if the resolution.width of right is greater than left and
* exactly zero if the two are equal.
*/
export const comparePlaylistResolution = function(left, right) {
let leftWidth;
let rightWidth;
if (left.attributes.RESOLUTION &&
left.attributes.RESOLUTION.width) {
leftWidth = left.attributes.RESOLUTION.width;
}
leftWidth = leftWidth || window.Number.MAX_VALUE;
if (right.attributes.RESOLUTION &&
right.attributes.RESOLUTION.width) {
rightWidth = right.attributes.RESOLUTION.width;
}
rightWidth = rightWidth || window.Number.MAX_VALUE;
// NOTE - Fallback to bandwidth sort as appropriate in cases where multiple renditions
// have the same media dimensions/ resolution
if (leftWidth === rightWidth &&
left.attributes.BANDWIDTH &&
right.attributes.BANDWIDTH) {
return left.attributes.BANDWIDTH - right.attributes.BANDWIDTH;
}
return leftWidth - rightWidth;
};
/**
* Chooses the appropriate media playlist based on bandwidth and player size
*
* @param {Object} master
* Object representation of the master manifest
* @param {number} playerBandwidth
* Current calculated bandwidth of the player
* @param {number} playerWidth
* Current width of the player element (should account for the device pixel ratio)
* @param {number} playerHeight
* Current height of the player element (should account for the device pixel ratio)
* @param {boolean} limitRenditionByPlayerDimensions
* True if the player width and height should be used during the selection, false otherwise
* @param {Object} masterPlaylistController
* the current masterPlaylistController object
* @return {Playlist} the highest bitrate playlist less than the
* currently detected bandwidth, accounting for some amount of
* bandwidth variance
*/
export let simpleSelector = function(
master,
playerBandwidth,
playerWidth,
playerHeight,
limitRenditionByPlayerDimensions,
masterPlaylistController
) {
// If we end up getting called before `master` is available, exit early
if (!master) {
return;
}
const options = {
bandwidth: playerBandwidth,
width: playerWidth,
height: playerHeight,
limitRenditionByPlayerDimensions
};
let playlists = master.playlists;
// if playlist is audio only, select between currently active audio group playlists.
if (Playlist.isAudioOnly(master)) {
playlists = masterPlaylistController.getAudioTrackPlaylists_();
// add audioOnly to options so that we log audioOnly: true
// at the buttom of this function for debugging.
options.audioOnly = true;
}
// convert the playlists to an intermediary representation to make comparisons easier
let sortedPlaylistReps = playlists.map((playlist) => {
let bandwidth;
const width = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.width;
const height = playlist.attributes && playlist.attributes.RESOLUTION && playlist.attributes.RESOLUTION.height;
bandwidth = playlist.attributes && playlist.attributes.BANDWIDTH;
bandwidth = bandwidth || window.Number.MAX_VALUE;
return {
bandwidth,
width,
height,
playlist
};
});
stableSort(sortedPlaylistReps, (left, right) => left.bandwidth - right.bandwidth);
// filter out any playlists that have been excluded due to
// incompatible configurations
sortedPlaylistReps = sortedPlaylistReps.filter((rep) => !Playlist.isIncompatible(rep.playlist));
// filter out any playlists that have been disabled manually through the representations
// api or blacklisted temporarily due to playback errors.
let enabledPlaylistReps = sortedPlaylistReps.filter((rep) => Playlist.isEnabled(rep.playlist));
if (!enabledPlaylistReps.length) {
// if there are no enabled playlists, then they have all been blacklisted or disabled
// by the user through the representations api. In this case, ignore blacklisting and
// fallback to what the user wants by using playlists the user has not disabled.
enabledPlaylistReps = sortedPlaylistReps.filter((rep) => !Playlist.isDisabled(rep.playlist));
}
// filter out any variant that has greater effective bitrate
// than the current estimated bandwidth
const bandwidthPlaylistReps = enabledPlaylistReps.filter((rep) => rep.bandwidth * Config.BANDWIDTH_VARIANCE < playerBandwidth);
let highestRemainingBandwidthRep =
bandwidthPlaylistReps[bandwidthPlaylistReps.length - 1];
// get all of the renditions with the same (highest) bandwidth
// and then taking the very first element
const bandwidthBestRep = bandwidthPlaylistReps.filter((rep) => rep.bandwidth === highestRemainingBandwidthRep.bandwidth)[0];
// if we're not going to limit renditions by player size, make an early decision.
if (limitRenditionByPlayerDimensions === false) {
const chosenRep = (
bandwidthBestRep ||
enabledPlaylistReps[0] ||
sortedPlaylistReps[0]
);
if (chosenRep && chosenRep.playlist) {
let type = 'sortedPlaylistReps';
if (bandwidthBestRep) {
type = 'bandwidthBestRep';
}
if (enabledPlaylistReps[0]) {
type = 'enabledPlaylistReps';
}
logFn(`choosing ${representationToString(chosenRep)} using ${type} with options`, options);
return chosenRep.playlist;
}
logFn('could not choose a playlist with options', options);
return null;
}
// filter out playlists without resolution information
const haveResolution = bandwidthPlaylistReps.filter((rep) => rep.width && rep.height);
// sort variants by resolution
stableSort(haveResolution, (left, right) => left.width - right.width);
// if we have the exact resolution as the player use it
const resolutionBestRepList = haveResolution.filter((rep) => rep.width === playerWidth && rep.height === playerHeight);
highestRemainingBandwidthRep = resolutionBestRepList[resolutionBestRepList.length - 1];
// ensure that we pick the highest bandwidth variant that have exact resolution
const resolutionBestRep = resolutionBestRepList.filter((rep) => rep.bandwidth === highestRemainingBandwidthRep.bandwidth)[0];
let resolutionPlusOneList;
let resolutionPlusOneSmallest;
let resolutionPlusOneRep;
// find the smallest variant that is larger than the player
// if there is no match of exact resolution
if (!resolutionBestRep) {
resolutionPlusOneList = haveResolution.filter((rep) => rep.width > playerWidth || rep.height > playerHeight);
// find all the variants have the same smallest resolution
resolutionPlusOneSmallest = resolutionPlusOneList.filter((rep) => rep.width === resolutionPlusOneList[0].width &&
rep.height === resolutionPlusOneList[0].height);
// ensure that we also pick the highest bandwidth variant that
// is just-larger-than the video player
highestRemainingBandwidthRep =
resolutionPlusOneSmallest[resolutionPlusOneSmallest.length - 1];
resolutionPlusOneRep = resolutionPlusOneSmallest.filter((rep) => rep.bandwidth === highestRemainingBandwidthRep.bandwidth)[0];
}
// fallback chain of variants
const chosenRep = (
resolutionPlusOneRep ||
resolutionBestRep ||
bandwidthBestRep ||
enabledPlaylistReps[0] ||
sortedPlaylistReps[0]
);
if (chosenRep && chosenRep.playlist) {
let type = 'sortedPlaylistReps';
if (resolutionPlusOneRep) {
type = 'resolutionPlusOneRep';
} else if (resolutionBestRep) {
type = 'resolutionBestRep';
} else if (bandwidthBestRep) {
type = 'bandwidthBestRep';
} else if (enabledPlaylistReps[0]) {
type = 'enabledPlaylistReps';
}
logFn(`choosing ${representationToString(chosenRep)} using ${type} with options`, options);
return chosenRep.playlist;
}
logFn('could not choose a playlist with options', options);
return null;
};
export const TEST_ONLY_SIMPLE_SELECTOR = (newSimpleSelector) => {
const oldSimpleSelector = simpleSelector;
simpleSelector = newSimpleSelector;
return function resetSimpleSelector() {
simpleSelector = oldSimpleSelector;
};
};
// Playlist Selectors
/**
* Chooses the appropriate media playlist based on the most recent
* bandwidth estimate and the player size.
*
* Expects to be called within the context of an instance of VhsHandler
*
* @return {Playlist} the highest bitrate playlist less than the
* currently detected bandwidth, accounting for some amount of
* bandwidth variance
*/
export const lastBandwidthSelector = function() {
const pixelRatio = this.useDevicePixelRatio ? window.devicePixelRatio || 1 : 1;
return simpleSelector(
this.playlists.master,
this.systemBandwidth,
parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio,
parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio,
this.limitRenditionByPlayerDimensions,
this.masterPlaylistController_
);
};
/**
* Chooses the appropriate media playlist based on an
* exponential-weighted moving average of the bandwidth after
* filtering for player size.
*
* Expects to be called within the context of an instance of VhsHandler
*
* @param {number} decay - a number between 0 and 1. Higher values of
* this parameter will cause previous bandwidth estimates to lose
* significance more quickly.
* @return {Function} a function which can be invoked to create a new
* playlist selector function.
* @see https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
*/
export const movingAverageBandwidthSelector = function(decay) {
let average = -1;
let lastSystemBandwidth = -1;
if (decay < 0 || decay > 1) {
throw new Error('Moving average bandwidth decay must be between 0 and 1.');
}
return function() {
const pixelRatio = this.useDevicePixelRatio ? window.devicePixelRatio || 1 : 1;
if (average < 0) {
average = this.systemBandwidth;
lastSystemBandwidth = this.systemBandwidth;
}
// stop the average value from decaying for every 250ms
// when the systemBandwidth is constant
// and
// stop average from setting to a very low value when the
// systemBandwidth becomes 0 in case of chunk cancellation
if (this.systemBandwidth > 0 && this.systemBandwidth !== lastSystemBandwidth) {
average = decay * this.systemBandwidth + (1 - decay) * average;
lastSystemBandwidth = this.systemBandwidth;
}
return simpleSelector(
this.playlists.master,
average,
parseInt(safeGetComputedStyle(this.tech_.el(), 'width'), 10) * pixelRatio,
parseInt(safeGetComputedStyle(this.tech_.el(), 'height'), 10) * pixelRatio,
this.limitRenditionByPlayerDimensions,
this.masterPlaylistController_
);
};
};
/**
* Chooses the appropriate media playlist based on the potential to rebuffer
*
* @param {Object} settings
* Object of information required to use this selector
* @param {Object} settings.master
* Object representation of the master manifest
* @param {number} settings.currentTime
* The current time of the player
* @param {number} settings.bandwidth
* Current measured bandwidth
* @param {number} settings.duration
* Duration of the media
* @param {number} settings.segmentDuration
* Segment duration to be used in round trip time calculations
* @param {number} settings.timeUntilRebuffer
* Time left in seconds until the player has to rebuffer
* @param {number} settings.currentTimeline
* The current timeline segments are being loaded from
* @param {SyncController} settings.syncController
* SyncController for determining if we have a sync point for a given playlist
* @return {Object|null}
* {Object} return.playlist
* The highest bandwidth playlist with the least amount of rebuffering
* {Number} return.rebufferingImpact
* The amount of time in seconds switching to this playlist will rebuffer. A
* negative value means that switching will cause zero rebuffering.
*/
export const minRebufferMaxBandwidthSelector = function(settings) {
const {
master,
currentTime,
bandwidth,
duration,
segmentDuration,
timeUntilRebuffer,
currentTimeline,
syncController
} = settings;
// filter out any playlists that have been excluded due to
// incompatible configurations
const compatiblePlaylists = master.playlists.filter(playlist => !Playlist.isIncompatible(playlist));
// filter out any playlists that have been disabled manually through the representations
// api or blacklisted temporarily due to playback errors.
let enabledPlaylists = compatiblePlaylists.filter(Playlist.isEnabled);
if (!enabledPlaylists.length) {
// if there are no enabled playlists, then they have all been blacklisted or disabled
// by the user through the representations api. In this case, ignore blacklisting and
// fallback to what the user wants by using playlists the user has not disabled.
enabledPlaylists = compatiblePlaylists.filter(playlist => !Playlist.isDisabled(playlist));
}
const bandwidthPlaylists =
enabledPlaylists.filter(Playlist.hasAttribute.bind(null, 'BANDWIDTH'));
const rebufferingEstimates = bandwidthPlaylists.map((playlist) => {
const syncPoint = syncController.getSyncPoint(
playlist,
duration,
currentTimeline,
currentTime
);
// If there is no sync point for this playlist, switching to it will require a
// sync request first. This will double the request time
const numRequests = syncPoint ? 1 : 2;
const requestTimeEstimate = Playlist.estimateSegmentRequestTime(
segmentDuration,
bandwidth,
playlist
);
const rebufferingImpact = (requestTimeEstimate * numRequests) - timeUntilRebuffer;
return {
playlist,
rebufferingImpact
};
});
const noRebufferingPlaylists = rebufferingEstimates.filter((estimate) => estimate.rebufferingImpact <= 0);
// Sort by bandwidth DESC
stableSort(
noRebufferingPlaylists,
(a, b) => comparePlaylistBandwidth(b.playlist, a.playlist)
);
if (noRebufferingPlaylists.length) {
return noRebufferingPlaylists[0];
}
stableSort(rebufferingEstimates, (a, b) => a.rebufferingImpact - b.rebufferingImpact);
return rebufferingEstimates[0] || null;
};
/**
* Chooses the appropriate media playlist, which in this case is the lowest bitrate
* one with video. If no renditions with video exist, return the lowest audio rendition.
*
* Expects to be called within the context of an instance of VhsHandler
*
* @return {Object|null}
* {Object} return.playlist
* The lowest bitrate playlist that contains a video codec. If no such rendition
* exists pick the lowest audio rendition.
*/
export const lowestBitrateCompatibleVariantSelector = function() {
// filter out any playlists that have been excluded due to
// incompatible configurations or playback errors
const playlists = this.playlists.master.playlists.filter(Playlist.isEnabled);
// Sort ascending by bitrate
stableSort(
playlists,
(a, b) => comparePlaylistBandwidth(a, b)
);
// Parse and assume that playlists with no video codec have no video
// (this is not necessarily true, although it is generally true).
//
// If an entire manifest has no valid videos everything will get filtered
// out.
const playlistsWithVideo = playlists.filter(playlist => !!codecsForPlaylist(this.playlists.master, playlist).video);
return playlistsWithVideo[0] || null;
};

730
node_modules/@videojs/http-streaming/src/playlist.js generated vendored Normal file
View file

@ -0,0 +1,730 @@
/**
* @file playlist.js
*
* Playlist related utilities.
*/
import videojs from 'video.js';
import window from 'global/window';
import {isAudioCodec} from '@videojs/vhs-utils/es/codecs.js';
import {TIME_FUDGE_FACTOR} from './ranges.js';
const {createTimeRange} = videojs;
/**
* A function to get a combined list of parts and segments with durations
* and indexes.
*
* @param {Playlist} playlist the playlist to get the list for.
*
* @return {Array} The part/segment list.
*/
export const getPartsAndSegments = (playlist) => (playlist.segments || []).reduce((acc, segment, si) => {
if (segment.parts) {
segment.parts.forEach(function(part, pi) {
acc.push({duration: part.duration, segmentIndex: si, partIndex: pi, part, segment});
});
} else {
acc.push({duration: segment.duration, segmentIndex: si, partIndex: null, segment, part: null});
}
return acc;
}, []);
export const getLastParts = (media) => {
const lastSegment = media.segments && media.segments.length && media.segments[media.segments.length - 1];
return lastSegment && lastSegment.parts || [];
};
export const getKnownPartCount = ({preloadSegment}) => {
if (!preloadSegment) {
return;
}
const {parts, preloadHints} = preloadSegment;
let partCount = (preloadHints || [])
.reduce((count, hint) => count + (hint.type === 'PART' ? 1 : 0), 0);
partCount += (parts && parts.length) ? parts.length : 0;
return partCount;
};
/**
* Get the number of seconds to delay from the end of a
* live playlist.
*
* @param {Playlist} master the master playlist
* @param {Playlist} media the media playlist
* @return {number} the hold back in seconds.
*/
export const liveEdgeDelay = (master, media) => {
if (media.endList) {
return 0;
}
// dash suggestedPresentationDelay trumps everything
if (master && master.suggestedPresentationDelay) {
return master.suggestedPresentationDelay;
}
const hasParts = getLastParts(media).length > 0;
// look for "part" delays from ll-hls first
if (hasParts && media.serverControl && media.serverControl.partHoldBack) {
return media.serverControl.partHoldBack;
} else if (hasParts && media.partTargetDuration) {
return media.partTargetDuration * 3;
// finally look for full segment delays
} else if (media.serverControl && media.serverControl.holdBack) {
return media.serverControl.holdBack;
} else if (media.targetDuration) {
return media.targetDuration * 3;
}
return 0;
};
/**
* walk backward until we find a duration we can use
* or return a failure
*
* @param {Playlist} playlist the playlist to walk through
* @param {Number} endSequence the mediaSequence to stop walking on
*/
const backwardDuration = function(playlist, endSequence) {
let result = 0;
let i = endSequence - playlist.mediaSequence;
// if a start time is available for segment immediately following
// the interval, use it
let segment = playlist.segments[i];
// Walk backward until we find the latest segment with timeline
// information that is earlier than endSequence
if (segment) {
if (typeof segment.start !== 'undefined') {
return { result: segment.start, precise: true };
}
if (typeof segment.end !== 'undefined') {
return {
result: segment.end - segment.duration,
precise: true
};
}
}
while (i--) {
segment = playlist.segments[i];
if (typeof segment.end !== 'undefined') {
return { result: result + segment.end, precise: true };
}
result += segment.duration;
if (typeof segment.start !== 'undefined') {
return { result: result + segment.start, precise: true };
}
}
return { result, precise: false };
};
/**
* walk forward until we find a duration we can use
* or return a failure
*
* @param {Playlist} playlist the playlist to walk through
* @param {number} endSequence the mediaSequence to stop walking on
*/
const forwardDuration = function(playlist, endSequence) {
let result = 0;
let segment;
let i = endSequence - playlist.mediaSequence;
// Walk forward until we find the earliest segment with timeline
// information
for (; i < playlist.segments.length; i++) {
segment = playlist.segments[i];
if (typeof segment.start !== 'undefined') {
return {
result: segment.start - result,
precise: true
};
}
result += segment.duration;
if (typeof segment.end !== 'undefined') {
return {
result: segment.end - result,
precise: true
};
}
}
// indicate we didn't find a useful duration estimate
return { result: -1, precise: false };
};
/**
* Calculate the media duration from the segments associated with a
* playlist. The duration of a subinterval of the available segments
* may be calculated by specifying an end index.
*
* @param {Object} playlist a media playlist object
* @param {number=} endSequence an exclusive upper boundary
* for the playlist. Defaults to playlist length.
* @param {number} expired the amount of time that has dropped
* off the front of the playlist in a live scenario
* @return {number} the duration between the first available segment
* and end index.
*/
const intervalDuration = function(playlist, endSequence, expired) {
if (typeof endSequence === 'undefined') {
endSequence = playlist.mediaSequence + playlist.segments.length;
}
if (endSequence < playlist.mediaSequence) {
return 0;
}
// do a backward walk to estimate the duration
const backward = backwardDuration(playlist, endSequence);
if (backward.precise) {
// if we were able to base our duration estimate on timing
// information provided directly from the Media Source, return
// it
return backward.result;
}
// walk forward to see if a precise duration estimate can be made
// that way
const forward = forwardDuration(playlist, endSequence);
if (forward.precise) {
// we found a segment that has been buffered and so it's
// position is known precisely
return forward.result;
}
// return the less-precise, playlist-based duration estimate
return backward.result + expired;
};
/**
* Calculates the duration of a playlist. If a start and end index
* are specified, the duration will be for the subset of the media
* timeline between those two indices. The total duration for live
* playlists is always Infinity.
*
* @param {Object} playlist a media playlist object
* @param {number=} endSequence an exclusive upper
* boundary for the playlist. Defaults to the playlist media
* sequence number plus its length.
* @param {number=} expired the amount of time that has
* dropped off the front of the playlist in a live scenario
* @return {number} the duration between the start index and end
* index.
*/
export const duration = function(playlist, endSequence, expired) {
if (!playlist) {
return 0;
}
if (typeof expired !== 'number') {
expired = 0;
}
// if a slice of the total duration is not requested, use
// playlist-level duration indicators when they're present
if (typeof endSequence === 'undefined') {
// if present, use the duration specified in the playlist
if (playlist.totalDuration) {
return playlist.totalDuration;
}
// duration should be Infinity for live playlists
if (!playlist.endList) {
return window.Infinity;
}
}
// calculate the total duration based on the segment durations
return intervalDuration(
playlist,
endSequence,
expired
);
};
/**
* Calculate the time between two indexes in the current playlist
* neight the start- nor the end-index need to be within the current
* playlist in which case, the targetDuration of the playlist is used
* to approximate the durations of the segments
*
* @param {Array} options.durationList list to iterate over for durations.
* @param {number} options.defaultDuration duration to use for elements before or after the durationList
* @param {number} options.startIndex partsAndSegments index to start
* @param {number} options.endIndex partsAndSegments index to end.
* @return {number} the number of seconds between startIndex and endIndex
*/
export const sumDurations = function({defaultDuration, durationList, startIndex, endIndex}) {
let durations = 0;
if (startIndex > endIndex) {
[startIndex, endIndex] = [endIndex, startIndex];
}
if (startIndex < 0) {
for (let i = startIndex; i < Math.min(0, endIndex); i++) {
durations += defaultDuration;
}
startIndex = 0;
}
for (let i = startIndex; i < endIndex; i++) {
durations += durationList[i].duration;
}
return durations;
};
/**
* Calculates the playlist end time
*
* @param {Object} playlist a media playlist object
* @param {number=} expired the amount of time that has
* dropped off the front of the playlist in a live scenario
* @param {boolean|false} useSafeLiveEnd a boolean value indicating whether or not the
* playlist end calculation should consider the safe live end
* (truncate the playlist end by three segments). This is normally
* used for calculating the end of the playlist's seekable range.
* This takes into account the value of liveEdgePadding.
* Setting liveEdgePadding to 0 is equivalent to setting this to false.
* @param {number} liveEdgePadding a number indicating how far from the end of the playlist we should be in seconds.
* If this is provided, it is used in the safe live end calculation.
* Setting useSafeLiveEnd=false or liveEdgePadding=0 are equivalent.
* Corresponds to suggestedPresentationDelay in DASH manifests.
* @return {number} the end time of playlist
* @function playlistEnd
*/
export const playlistEnd = function(playlist, expired, useSafeLiveEnd, liveEdgePadding) {
if (!playlist || !playlist.segments) {
return null;
}
if (playlist.endList) {
return duration(playlist);
}
if (expired === null) {
return null;
}
expired = expired || 0;
let lastSegmentTime = intervalDuration(
playlist,
playlist.mediaSequence + playlist.segments.length,
expired
);
if (useSafeLiveEnd) {
liveEdgePadding = typeof liveEdgePadding === 'number' ? liveEdgePadding : liveEdgeDelay(null, playlist);
lastSegmentTime -= liveEdgePadding;
}
// don't return a time less than zero
return Math.max(0, lastSegmentTime);
};
/**
* Calculates the interval of time that is currently seekable in a
* playlist. The returned time ranges are relative to the earliest
* moment in the specified playlist that is still available. A full
* seekable implementation for live streams would need to offset
* these values by the duration of content that has expired from the
* stream.
*
* @param {Object} playlist a media playlist object
* dropped off the front of the playlist in a live scenario
* @param {number=} expired the amount of time that has
* dropped off the front of the playlist in a live scenario
* @param {number} liveEdgePadding how far from the end of the playlist we should be in seconds.
* Corresponds to suggestedPresentationDelay in DASH manifests.
* @return {TimeRanges} the periods of time that are valid targets
* for seeking
*/
export const seekable = function(playlist, expired, liveEdgePadding) {
const useSafeLiveEnd = true;
const seekableStart = expired || 0;
const seekableEnd = playlistEnd(playlist, expired, useSafeLiveEnd, liveEdgePadding);
if (seekableEnd === null) {
return createTimeRange();
}
return createTimeRange(seekableStart, seekableEnd);
};
/**
* Determine the index and estimated starting time of the segment that
* contains a specified playback position in a media playlist.
*
* @param {Object} options.playlist the media playlist to query
* @param {number} options.currentTime The number of seconds since the earliest
* possible position to determine the containing segment for
* @param {number} options.startTime the time when the segment/part starts
* @param {number} options.startingSegmentIndex the segment index to start looking at.
* @param {number?} [options.startingPartIndex] the part index to look at within the segment.
*
* @return {Object} an object with partIndex, segmentIndex, and startTime.
*/
export const getMediaInfoForTime = function({
playlist,
currentTime,
startingSegmentIndex,
startingPartIndex,
startTime
}) {
let time = currentTime - startTime;
const partsAndSegments = getPartsAndSegments(playlist);
let startIndex = 0;
for (let i = 0; i < partsAndSegments.length; i++) {
const partAndSegment = partsAndSegments[i];
if (startingSegmentIndex !== partAndSegment.segmentIndex) {
continue;
}
// skip this if part index does not match.
if (typeof startingPartIndex === 'number' && typeof partAndSegment.partIndex === 'number' && startingPartIndex !== partAndSegment.partIndex) {
continue;
}
startIndex = i;
break;
}
if (time < 0) {
// Walk backward from startIndex in the playlist, adding durations
// until we find a segment that contains `time` and return it
if (startIndex > 0) {
for (let i = startIndex - 1; i >= 0; i--) {
const partAndSegment = partsAndSegments[i];
time += partAndSegment.duration;
// TODO: consider not using TIME_FUDGE_FACTOR at all here
if ((time + TIME_FUDGE_FACTOR) > 0) {
return {
partIndex: partAndSegment.partIndex,
segmentIndex: partAndSegment.segmentIndex,
startTime: startTime - sumDurations({
defaultDuration: playlist.targetDuration,
durationList: partsAndSegments,
startIndex,
endIndex: i
})
};
}
}
}
// We were unable to find a good segment within the playlist
// so select the first segment
return {
partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
startTime: currentTime
};
}
// When startIndex is negative, we first walk forward to first segment
// adding target durations. If we "run out of time" before getting to
// the first segment, return the first segment
if (startIndex < 0) {
for (let i = startIndex; i < 0; i++) {
time -= playlist.targetDuration;
if (time < 0) {
return {
partIndex: partsAndSegments[0] && partsAndSegments[0].partIndex || null,
segmentIndex: partsAndSegments[0] && partsAndSegments[0].segmentIndex || 0,
startTime: currentTime
};
}
}
startIndex = 0;
}
// Walk forward from startIndex in the playlist, subtracting durations
// until we find a segment that contains `time` and return it
for (let i = startIndex; i < partsAndSegments.length; i++) {
const partAndSegment = partsAndSegments[i];
time -= partAndSegment.duration;
// TODO: consider not using TIME_FUDGE_FACTOR at all here
if ((time - TIME_FUDGE_FACTOR) < 0) {
return {
partIndex: partAndSegment.partIndex,
segmentIndex: partAndSegment.segmentIndex,
startTime: startTime + sumDurations({
defaultDuration: playlist.targetDuration,
durationList: partsAndSegments,
startIndex,
endIndex: i
})
};
}
}
// We are out of possible candidates so load the last one...
return {
segmentIndex: partsAndSegments[partsAndSegments.length - 1].segmentIndex,
partIndex: partsAndSegments[partsAndSegments.length - 1].partIndex,
startTime: currentTime
};
};
/**
* Check whether the playlist is blacklisted or not.
*
* @param {Object} playlist the media playlist object
* @return {boolean} whether the playlist is blacklisted or not
* @function isBlacklisted
*/
export const isBlacklisted = function(playlist) {
return playlist.excludeUntil && playlist.excludeUntil > Date.now();
};
/**
* Check whether the playlist is compatible with current playback configuration or has
* been blacklisted permanently for being incompatible.
*
* @param {Object} playlist the media playlist object
* @return {boolean} whether the playlist is incompatible or not
* @function isIncompatible
*/
export const isIncompatible = function(playlist) {
return playlist.excludeUntil && playlist.excludeUntil === Infinity;
};
/**
* Check whether the playlist is enabled or not.
*
* @param {Object} playlist the media playlist object
* @return {boolean} whether the playlist is enabled or not
* @function isEnabled
*/
export const isEnabled = function(playlist) {
const blacklisted = isBlacklisted(playlist);
return (!playlist.disabled && !blacklisted);
};
/**
* Check whether the playlist has been manually disabled through the representations api.
*
* @param {Object} playlist the media playlist object
* @return {boolean} whether the playlist is disabled manually or not
* @function isDisabled
*/
export const isDisabled = function(playlist) {
return playlist.disabled;
};
/**
* Returns whether the current playlist is an AES encrypted HLS stream
*
* @return {boolean} true if it's an AES encrypted HLS stream
*/
export const isAes = function(media) {
for (let i = 0; i < media.segments.length; i++) {
if (media.segments[i].key) {
return true;
}
}
return false;
};
/**
* Checks if the playlist has a value for the specified attribute
*
* @param {string} attr
* Attribute to check for
* @param {Object} playlist
* The media playlist object
* @return {boolean}
* Whether the playlist contains a value for the attribute or not
* @function hasAttribute
*/
export const hasAttribute = function(attr, playlist) {
return playlist.attributes && playlist.attributes[attr];
};
/**
* Estimates the time required to complete a segment download from the specified playlist
*
* @param {number} segmentDuration
* Duration of requested segment
* @param {number} bandwidth
* Current measured bandwidth of the player
* @param {Object} playlist
* The media playlist object
* @param {number=} bytesReceived
* Number of bytes already received for the request. Defaults to 0
* @return {number|NaN}
* The estimated time to request the segment. NaN if bandwidth information for
* the given playlist is unavailable
* @function estimateSegmentRequestTime
*/
export const estimateSegmentRequestTime = function(
segmentDuration,
bandwidth,
playlist,
bytesReceived = 0
) {
if (!hasAttribute('BANDWIDTH', playlist)) {
return NaN;
}
const size = segmentDuration * playlist.attributes.BANDWIDTH;
return (size - (bytesReceived * 8)) / bandwidth;
};
/*
* Returns whether the current playlist is the lowest rendition
*
* @return {Boolean} true if on lowest rendition
*/
export const isLowestEnabledRendition = (master, media) => {
if (master.playlists.length === 1) {
return true;
}
const currentBandwidth = media.attributes.BANDWIDTH || Number.MAX_VALUE;
return (master.playlists.filter((playlist) => {
if (!isEnabled(playlist)) {
return false;
}
return (playlist.attributes.BANDWIDTH || 0) < currentBandwidth;
}).length === 0);
};
export const playlistMatch = (a, b) => {
// both playlits are null
// or only one playlist is non-null
// no match
if (!a && !b || (!a && b) || (a && !b)) {
return false;
}
// playlist objects are the same, match
if (a === b) {
return true;
}
// first try to use id as it should be the most
// accurate
if (a.id && b.id && a.id === b.id) {
return true;
}
// next try to use reslovedUri as it should be the
// second most accurate.
if (a.resolvedUri && b.resolvedUri && a.resolvedUri === b.resolvedUri) {
return true;
}
// finally try to use uri as it should be accurate
// but might miss a few cases for relative uris
if (a.uri && b.uri && a.uri === b.uri) {
return true;
}
return false;
};
const someAudioVariant = function(master, callback) {
const AUDIO = master && master.mediaGroups && master.mediaGroups.AUDIO || {};
let found = false;
for (const groupName in AUDIO) {
for (const label in AUDIO[groupName]) {
found = callback(AUDIO[groupName][label]);
if (found) {
break;
}
}
if (found) {
break;
}
}
return !!found;
};
export const isAudioOnly = (master) => {
// we are audio only if we have no main playlists but do
// have media group playlists.
if (!master || !master.playlists || !master.playlists.length) {
// without audio variants or playlists this
// is not an audio only master.
const found = someAudioVariant(master, (variant) =>
(variant.playlists && variant.playlists.length) || variant.uri);
return found;
}
// if every playlist has only an audio codec it is audio only
for (let i = 0; i < master.playlists.length; i++) {
const playlist = master.playlists[i];
const CODECS = playlist.attributes && playlist.attributes.CODECS;
// all codecs are audio, this is an audio playlist.
if (CODECS && CODECS.split(',').every((c) => isAudioCodec(c))) {
continue;
}
// playlist is in an audio group it is audio only
const found = someAudioVariant(master, (variant) => playlistMatch(playlist, variant));
if (found) {
continue;
}
// if we make it here this playlist isn't audio and we
// are not audio only
return false;
}
// if we make it past every playlist without returning, then
// this is an audio only playlist.
return true;
};
// exports
export default {
liveEdgeDelay,
duration,
seekable,
getMediaInfoForTime,
isEnabled,
isDisabled,
isBlacklisted,
isIncompatible,
playlistEnd,
isAes,
hasAttribute,
estimateSegmentRequestTime,
isLowestEnabledRendition,
isAudioOnly,
playlistMatch
};

447
node_modules/@videojs/http-streaming/src/ranges.js generated vendored Normal file
View file

@ -0,0 +1,447 @@
/**
* ranges
*
* Utilities for working with TimeRanges.
*
*/
import videojs from 'video.js';
// Fudge factor to account for TimeRanges rounding
export const TIME_FUDGE_FACTOR = 1 / 30;
// Comparisons between time values such as current time and the end of the buffered range
// can be misleading because of precision differences or when the current media has poorly
// aligned audio and video, which can cause values to be slightly off from what you would
// expect. This value is what we consider to be safe to use in such comparisons to account
// for these scenarios.
export const SAFE_TIME_DELTA = TIME_FUDGE_FACTOR * 3;
/**
* Clamps a value to within a range
*
* @param {number} num - the value to clamp
* @param {number} start - the start of the range to clamp within, inclusive
* @param {number} end - the end of the range to clamp within, inclusive
* @return {number}
*/
const clamp = function(num, [start, end]) {
return Math.min(Math.max(start, num), end);
};
const filterRanges = function(timeRanges, predicate) {
const results = [];
let i;
if (timeRanges && timeRanges.length) {
// Search for ranges that match the predicate
for (i = 0; i < timeRanges.length; i++) {
if (predicate(timeRanges.start(i), timeRanges.end(i))) {
results.push([timeRanges.start(i), timeRanges.end(i)]);
}
}
}
return videojs.createTimeRanges(results);
};
/**
* Attempts to find the buffered TimeRange that contains the specified
* time.
*
* @param {TimeRanges} buffered - the TimeRanges object to query
* @param {number} time - the time to filter on.
* @return {TimeRanges} a new TimeRanges object
*/
export const findRange = function(buffered, time) {
return filterRanges(buffered, function(start, end) {
return start - SAFE_TIME_DELTA <= time &&
end + SAFE_TIME_DELTA >= time;
});
};
/**
* Returns the TimeRanges that begin later than the specified time.
*
* @param {TimeRanges} timeRanges - the TimeRanges object to query
* @param {number} time - the time to filter on.
* @return {TimeRanges} a new TimeRanges object.
*/
export const findNextRange = function(timeRanges, time) {
return filterRanges(timeRanges, function(start) {
return start - TIME_FUDGE_FACTOR >= time;
});
};
/**
* Returns gaps within a list of TimeRanges
*
* @param {TimeRanges} buffered - the TimeRanges object
* @return {TimeRanges} a TimeRanges object of gaps
*/
export const findGaps = function(buffered) {
if (buffered.length < 2) {
return videojs.createTimeRanges();
}
const ranges = [];
for (let i = 1; i < buffered.length; i++) {
const start = buffered.end(i - 1);
const end = buffered.start(i);
ranges.push([start, end]);
}
return videojs.createTimeRanges(ranges);
};
/**
* Search for a likely end time for the segment that was just appened
* based on the state of the `buffered` property before and after the
* append. If we fin only one such uncommon end-point return it.
*
* @param {TimeRanges} original - the buffered time ranges before the update
* @param {TimeRanges} update - the buffered time ranges after the update
* @return {number|null} the end time added between `original` and `update`,
* or null if one cannot be unambiguously determined.
*/
export const findSoleUncommonTimeRangesEnd = function(original, update) {
let i;
let start;
let end;
const result = [];
const edges = [];
// In order to qualify as a possible candidate, the end point must:
// 1) Not have already existed in the `original` ranges
// 2) Not result from the shrinking of a range that already existed
// in the `original` ranges
// 3) Not be contained inside of a range that existed in `original`
const overlapsCurrentEnd = function(span) {
return (span[0] <= end && span[1] >= end);
};
if (original) {
// Save all the edges in the `original` TimeRanges object
for (i = 0; i < original.length; i++) {
start = original.start(i);
end = original.end(i);
edges.push([start, end]);
}
}
if (update) {
// Save any end-points in `update` that are not in the `original`
// TimeRanges object
for (i = 0; i < update.length; i++) {
start = update.start(i);
end = update.end(i);
if (edges.some(overlapsCurrentEnd)) {
continue;
}
// at this point it must be a unique non-shrinking end edge
result.push(end);
}
}
// we err on the side of caution and return null if didn't find
// exactly *one* differing end edge in the search above
if (result.length !== 1) {
return null;
}
return result[0];
};
/**
* Calculate the intersection of two TimeRanges
*
* @param {TimeRanges} bufferA
* @param {TimeRanges} bufferB
* @return {TimeRanges} The interesection of `bufferA` with `bufferB`
*/
export const bufferIntersection = function(bufferA, bufferB) {
let start = null;
let end = null;
let arity = 0;
const extents = [];
const ranges = [];
if (!bufferA || !bufferA.length || !bufferB || !bufferB.length) {
return videojs.createTimeRange();
}
// Handle the case where we have both buffers and create an
// intersection of the two
let count = bufferA.length;
// A) Gather up all start and end times
while (count--) {
extents.push({time: bufferA.start(count), type: 'start'});
extents.push({time: bufferA.end(count), type: 'end'});
}
count = bufferB.length;
while (count--) {
extents.push({time: bufferB.start(count), type: 'start'});
extents.push({time: bufferB.end(count), type: 'end'});
}
// B) Sort them by time
extents.sort(function(a, b) {
return a.time - b.time;
});
// C) Go along one by one incrementing arity for start and decrementing
// arity for ends
for (count = 0; count < extents.length; count++) {
if (extents[count].type === 'start') {
arity++;
// D) If arity is ever incremented to 2 we are entering an
// overlapping range
if (arity === 2) {
start = extents[count].time;
}
} else if (extents[count].type === 'end') {
arity--;
// E) If arity is ever decremented to 1 we leaving an
// overlapping range
if (arity === 1) {
end = extents[count].time;
}
}
// F) Record overlapping ranges
if (start !== null && end !== null) {
ranges.push([start, end]);
start = null;
end = null;
}
}
return videojs.createTimeRanges(ranges);
};
/**
* Calculates the percentage of `segmentRange` that overlaps the
* `buffered` time ranges.
*
* @param {TimeRanges} segmentRange - the time range that the segment
* covers adjusted according to currentTime
* @param {TimeRanges} referenceRange - the original time range that the
* segment covers
* @param {number} currentTime - time in seconds where the current playback
* is at
* @param {TimeRanges} buffered - the currently buffered time ranges
* @return {number} percent of the segment currently buffered
*/
const calculateBufferedPercent = function(
adjustedRange,
referenceRange,
currentTime,
buffered
) {
const referenceDuration = referenceRange.end(0) - referenceRange.start(0);
const adjustedDuration = adjustedRange.end(0) - adjustedRange.start(0);
const bufferMissingFromAdjusted = referenceDuration - adjustedDuration;
const adjustedIntersection = bufferIntersection(adjustedRange, buffered);
const referenceIntersection = bufferIntersection(referenceRange, buffered);
let adjustedOverlap = 0;
let referenceOverlap = 0;
let count = adjustedIntersection.length;
while (count--) {
adjustedOverlap += adjustedIntersection.end(count) -
adjustedIntersection.start(count);
// If the current overlap segment starts at currentTime, then increase the
// overlap duration so that it actually starts at the beginning of referenceRange
// by including the difference between the two Range's durations
// This is a work around for the way Flash has no buffer before currentTime
// TODO: see if this is still necessary since Flash isn't included
if (adjustedIntersection.start(count) === currentTime) {
adjustedOverlap += bufferMissingFromAdjusted;
}
}
count = referenceIntersection.length;
while (count--) {
referenceOverlap += referenceIntersection.end(count) -
referenceIntersection.start(count);
}
// Use whichever value is larger for the percentage-buffered since that value
// is likely more accurate because the only way
return Math.max(adjustedOverlap, referenceOverlap) / referenceDuration * 100;
};
/**
* Return the amount of a range specified by the startOfSegment and segmentDuration
* overlaps the current buffered content.
*
* @param {number} startOfSegment - the time where the segment begins
* @param {number} segmentDuration - the duration of the segment in seconds
* @param {number} currentTime - time in seconds where the current playback
* is at
* @param {TimeRanges} buffered - the state of the buffer
* @return {number} percentage of the segment's time range that is
* already in `buffered`
*/
export const getSegmentBufferedPercent = function(
startOfSegment,
segmentDuration,
currentTime,
buffered
) {
const endOfSegment = startOfSegment + segmentDuration;
// The entire time range of the segment
const originalSegmentRange = videojs.createTimeRanges([[
startOfSegment,
endOfSegment
]]);
// The adjusted segment time range that is setup such that it starts
// no earlier than currentTime
// Flash has no notion of a back-buffer so adjustedSegmentRange adjusts
// for that and the function will still return 100% if a only half of a
// segment is actually in the buffer as long as the currentTime is also
// half-way through the segment
const adjustedSegmentRange = videojs.createTimeRanges([[
clamp(startOfSegment, [currentTime, endOfSegment]),
endOfSegment
]]);
// This condition happens when the currentTime is beyond the segment's
// end time
if (adjustedSegmentRange.start(0) === adjustedSegmentRange.end(0)) {
return 0;
}
const percent = calculateBufferedPercent(
adjustedSegmentRange,
originalSegmentRange,
currentTime,
buffered
);
// If the segment is reported as having a zero duration, return 0%
// since it is likely that we will need to fetch the segment
if (isNaN(percent) || percent === Infinity || percent === -Infinity) {
return 0;
}
return percent;
};
/**
* Gets a human readable string for a TimeRange
*
* @param {TimeRange} range
* @return {string} a human readable string
*/
export const printableRange = (range) => {
const strArr = [];
if (!range || !range.length) {
return '';
}
for (let i = 0; i < range.length; i++) {
strArr.push(range.start(i) + ' => ' + range.end(i));
}
return strArr.join(', ');
};
/**
* Calculates the amount of time left in seconds until the player hits the end of the
* buffer and causes a rebuffer
*
* @param {TimeRange} buffered
* The state of the buffer
* @param {Numnber} currentTime
* The current time of the player
* @param {number} playbackRate
* The current playback rate of the player. Defaults to 1.
* @return {number}
* Time until the player has to start rebuffering in seconds.
* @function timeUntilRebuffer
*/
export const timeUntilRebuffer = function(buffered, currentTime, playbackRate = 1) {
const bufferedEnd = buffered.length ? buffered.end(buffered.length - 1) : 0;
return (bufferedEnd - currentTime) / playbackRate;
};
/**
* Converts a TimeRanges object into an array representation
*
* @param {TimeRanges} timeRanges
* @return {Array}
*/
export const timeRangesToArray = (timeRanges) => {
const timeRangesList = [];
for (let i = 0; i < timeRanges.length; i++) {
timeRangesList.push({
start: timeRanges.start(i),
end: timeRanges.end(i)
});
}
return timeRangesList;
};
/**
* Determines if two time range objects are different.
*
* @param {TimeRange} a
* the first time range object to check
*
* @param {TimeRange} b
* the second time range object to check
*
* @return {Boolean}
* Whether the time range objects differ
*/
export const isRangeDifferent = function(a, b) {
// same object
if (a === b) {
return false;
}
// one or the other is undefined
if (!a && b || (!b && a)) {
return true;
}
// length is different
if (a.length !== b.length) {
return true;
}
// see if any start/end pair is different
for (let i = 0; i < a.length; i++) {
if (a.start(i) !== b.start(i) || a.end(i) !== b.end(i)) {
return true;
}
}
// if the length and every pair is the same
// this is the same time range
return false;
};
export const lastBufferedEnd = function(a) {
if (!a || !a.length || !a.end) {
return;
}
return a.end(a.length - 1);
};

View file

@ -0,0 +1,127 @@
import videojs from 'video.js';
const defaultOptions = {
errorInterval: 30,
getSource(next) {
const tech = this.tech({ IWillNotUseThisInPlugins: true });
const sourceObj = tech.currentSource_ || this.currentSource();
return next(sourceObj);
}
};
/**
* Main entry point for the plugin
*
* @param {Player} player a reference to a videojs Player instance
* @param {Object} [options] an object with plugin options
* @private
*/
const initPlugin = function(player, options) {
let lastCalled = 0;
let seekTo = 0;
const localOptions = videojs.mergeOptions(defaultOptions, options);
player.ready(() => {
player.trigger({type: 'usage', name: 'vhs-error-reload-initialized'});
player.trigger({type: 'usage', name: 'hls-error-reload-initialized'});
});
/**
* Player modifications to perform that must wait until `loadedmetadata`
* has been triggered
*
* @private
*/
const loadedMetadataHandler = function() {
if (seekTo) {
player.currentTime(seekTo);
}
};
/**
* Set the source on the player element, play, and seek if necessary
*
* @param {Object} sourceObj An object specifying the source url and mime-type to play
* @private
*/
const setSource = function(sourceObj) {
if (sourceObj === null || sourceObj === undefined) {
return;
}
seekTo = (player.duration() !== Infinity && player.currentTime()) || 0;
player.one('loadedmetadata', loadedMetadataHandler);
player.src(sourceObj);
player.trigger({type: 'usage', name: 'vhs-error-reload'});
player.trigger({type: 'usage', name: 'hls-error-reload'});
player.play();
};
/**
* Attempt to get a source from either the built-in getSource function
* or a custom function provided via the options
*
* @private
*/
const errorHandler = function() {
// Do not attempt to reload the source if a source-reload occurred before
// 'errorInterval' time has elapsed since the last source-reload
if (Date.now() - lastCalled < localOptions.errorInterval * 1000) {
player.trigger({type: 'usage', name: 'vhs-error-reload-canceled'});
player.trigger({type: 'usage', name: 'hls-error-reload-canceled'});
return;
}
if (!localOptions.getSource ||
typeof localOptions.getSource !== 'function') {
videojs.log.error('ERROR: reloadSourceOnError - The option getSource must be a function!');
return;
}
lastCalled = Date.now();
return localOptions.getSource.call(player, setSource);
};
/**
* Unbind any event handlers that were bound by the plugin
*
* @private
*/
const cleanupEvents = function() {
player.off('loadedmetadata', loadedMetadataHandler);
player.off('error', errorHandler);
player.off('dispose', cleanupEvents);
};
/**
* Cleanup before re-initializing the plugin
*
* @param {Object} [newOptions] an object with plugin options
* @private
*/
const reinitPlugin = function(newOptions) {
cleanupEvents();
initPlugin(player, newOptions);
};
player.on('error', errorHandler);
player.on('dispose', cleanupEvents);
// Overwrite the plugin function so that we can correctly cleanup before
// initializing the plugin
player.reloadSourceOnError = reinitPlugin;
};
/**
* Reload the source when an error is detected as long as there
* wasn't an error previously within the last 30 seconds
*
* @param {Object} [options] an object with plugin options
*/
const reloadSourceOnError = function(options) {
initPlugin(this, options);
};
export default reloadSourceOnError;

View file

@ -0,0 +1,113 @@
import { isIncompatible, isEnabled, isAudioOnly } from './playlist.js';
import { codecsForPlaylist } from './util/codecs.js';
/**
* Returns a function that acts as the Enable/disable playlist function.
*
* @param {PlaylistLoader} loader - The master playlist loader
* @param {string} playlistID - id of the playlist
* @param {Function} changePlaylistFn - A function to be called after a
* playlist's enabled-state has been changed. Will NOT be called if a
* playlist's enabled-state is unchanged
* @param {boolean=} enable - Value to set the playlist enabled-state to
* or if undefined returns the current enabled-state for the playlist
* @return {Function} Function for setting/getting enabled
*/
const enableFunction = (loader, playlistID, changePlaylistFn) => (enable) => {
const playlist = loader.master.playlists[playlistID];
const incompatible = isIncompatible(playlist);
const currentlyEnabled = isEnabled(playlist);
if (typeof enable === 'undefined') {
return currentlyEnabled;
}
if (enable) {
delete playlist.disabled;
} else {
playlist.disabled = true;
}
if (enable !== currentlyEnabled && !incompatible) {
// Ensure the outside world knows about our changes
changePlaylistFn();
if (enable) {
loader.trigger('renditionenabled');
} else {
loader.trigger('renditiondisabled');
}
}
return enable;
};
/**
* The representation object encapsulates the publicly visible information
* in a media playlist along with a setter/getter-type function (enabled)
* for changing the enabled-state of a particular playlist entry
*
* @class Representation
*/
class Representation {
constructor(vhsHandler, playlist, id) {
const {
masterPlaylistController_: mpc,
options_: { smoothQualityChange }
} = vhsHandler;
// Get a reference to a bound version of the quality change function
const changeType = smoothQualityChange ? 'smooth' : 'fast';
const qualityChangeFunction = mpc[`${changeType}QualityChange_`].bind(mpc);
// some playlist attributes are optional
if (playlist.attributes) {
const resolution = playlist.attributes.RESOLUTION;
this.width = resolution && resolution.width;
this.height = resolution && resolution.height;
this.bandwidth = playlist.attributes.BANDWIDTH;
}
this.codecs = codecsForPlaylist(mpc.master(), playlist);
this.playlist = playlist;
// The id is simply the ordinality of the media playlist
// within the master playlist
this.id = id;
// Partially-apply the enableFunction to create a playlist-
// specific variant
this.enabled = enableFunction(
vhsHandler.playlists,
playlist.id,
qualityChangeFunction
);
}
}
/**
* A mixin function that adds the `representations` api to an instance
* of the VhsHandler class
*
* @param {VhsHandler} vhsHandler - An instance of VhsHandler to add the
* representation API into
*/
const renditionSelectionMixin = function(vhsHandler) {
// Add a single API-specific function to the VhsHandler instance
vhsHandler.representations = () => {
const master = vhsHandler.masterPlaylistController_.master();
const playlists = isAudioOnly(master) ?
vhsHandler.masterPlaylistController_.getAudioTrackPlaylists_() :
master.playlists;
if (!playlists) {
return [];
}
return playlists
.filter((media) => !isIncompatible(media))
.map((e, i) => new Representation(vhsHandler, e, e.id));
};
};
export default renditionSelectionMixin;

View file

@ -0,0 +1,36 @@
/**
* @file resolve-url.js - Handling how URLs are resolved and manipulated
*/
import _resolveUrl from '@videojs/vhs-utils/es/resolve-url.js';
export const resolveUrl = _resolveUrl;
/**
* Checks whether xhr request was redirected and returns correct url depending
* on `handleManifestRedirects` option
*
* @api private
*
* @param {string} url - an url being requested
* @param {XMLHttpRequest} req - xhr request result
*
* @return {string}
*/
export const resolveManifestRedirect = (handleManifestRedirect, url, req) => {
// To understand how the responseURL below is set and generated:
// - https://fetch.spec.whatwg.org/#concept-response-url
// - https://fetch.spec.whatwg.org/#atomic-http-redirect-handling
if (
handleManifestRedirect &&
req &&
req.responseURL &&
url !== req.responseURL
) {
return req.responseURL;
}
return url;
};
export default resolveUrl;

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,271 @@
import TransmuxWorker from 'worker!./transmuxer-worker.js';
export const handleData_ = (event, transmuxedData, callback) => {
const {
type,
initSegment,
captions,
captionStreams,
metadata,
videoFrameDtsTime,
videoFramePtsTime
} = event.data.segment;
transmuxedData.buffer.push({
captions,
captionStreams,
metadata
});
const boxes = event.data.segment.boxes || {
data: event.data.segment.data
};
const result = {
type,
// cast ArrayBuffer to TypedArray
data: new Uint8Array(
boxes.data,
boxes.data.byteOffset,
boxes.data.byteLength
),
initSegment: new Uint8Array(
initSegment.data,
initSegment.byteOffset,
initSegment.byteLength
)
};
if (typeof videoFrameDtsTime !== 'undefined') {
result.videoFrameDtsTime = videoFrameDtsTime;
}
if (typeof videoFramePtsTime !== 'undefined') {
result.videoFramePtsTime = videoFramePtsTime;
}
callback(result);
};
export const handleDone_ = ({
transmuxedData,
callback
}) => {
// Previously we only returned data on data events,
// not on done events. Clear out the buffer to keep that consistent.
transmuxedData.buffer = [];
// all buffers should have been flushed from the muxer, so start processing anything we
// have received
callback(transmuxedData);
};
export const handleGopInfo_ = (event, transmuxedData) => {
transmuxedData.gopInfo = event.data.gopInfo;
};
export const processTransmux = (options) => {
const {
transmuxer,
bytes,
audioAppendStart,
gopsToAlignWith,
remux,
onData,
onTrackInfo,
onAudioTimingInfo,
onVideoTimingInfo,
onVideoSegmentTimingInfo,
onAudioSegmentTimingInfo,
onId3,
onCaptions,
onDone,
onEndedTimeline,
isEndOfTimeline
} = options;
const transmuxedData = {
buffer: []
};
let waitForEndedTimelineEvent = isEndOfTimeline;
const handleMessage = (event) => {
if (transmuxer.currentTransmux !== options) {
// disposed
return;
}
if (event.data.action === 'data') {
handleData_(event, transmuxedData, onData);
}
if (event.data.action === 'trackinfo') {
onTrackInfo(event.data.trackInfo);
}
if (event.data.action === 'gopInfo') {
handleGopInfo_(event, transmuxedData);
}
if (event.data.action === 'audioTimingInfo') {
onAudioTimingInfo(event.data.audioTimingInfo);
}
if (event.data.action === 'videoTimingInfo') {
onVideoTimingInfo(event.data.videoTimingInfo);
}
if (event.data.action === 'videoSegmentTimingInfo') {
onVideoSegmentTimingInfo(event.data.videoSegmentTimingInfo);
}
if (event.data.action === 'audioSegmentTimingInfo') {
onAudioSegmentTimingInfo(event.data.audioSegmentTimingInfo);
}
if (event.data.action === 'id3Frame') {
onId3([event.data.id3Frame], event.data.id3Frame.dispatchType);
}
if (event.data.action === 'caption') {
onCaptions(event.data.caption);
}
if (event.data.action === 'endedtimeline') {
waitForEndedTimelineEvent = false;
onEndedTimeline();
}
// wait for the transmuxed event since we may have audio and video
if (event.data.type !== 'transmuxed') {
return;
}
// If the "endedtimeline" event has not yet fired, and this segment represents the end
// of a timeline, that means there may still be data events before the segment
// processing can be considerred complete. In that case, the final event should be
// an "endedtimeline" event with the type "transmuxed."
if (waitForEndedTimelineEvent) {
return;
}
transmuxer.onmessage = null;
handleDone_({
transmuxedData,
callback: onDone
});
/* eslint-disable no-use-before-define */
dequeue(transmuxer);
/* eslint-enable */
};
transmuxer.onmessage = handleMessage;
if (audioAppendStart) {
transmuxer.postMessage({
action: 'setAudioAppendStart',
appendStart: audioAppendStart
});
}
// allow empty arrays to be passed to clear out GOPs
if (Array.isArray(gopsToAlignWith)) {
transmuxer.postMessage({
action: 'alignGopsWith',
gopsToAlignWith
});
}
if (typeof remux !== 'undefined') {
transmuxer.postMessage({
action: 'setRemux',
remux
});
}
if (bytes.byteLength) {
const buffer = bytes instanceof ArrayBuffer ? bytes : bytes.buffer;
const byteOffset = bytes instanceof ArrayBuffer ? 0 : bytes.byteOffset;
transmuxer.postMessage(
{
action: 'push',
// Send the typed-array of data as an ArrayBuffer so that
// it can be sent as a "Transferable" and avoid the costly
// memory copy
data: buffer,
// To recreate the original typed-array, we need information
// about what portion of the ArrayBuffer it was a view into
byteOffset,
byteLength: bytes.byteLength
},
[ buffer ]
);
}
if (isEndOfTimeline) {
transmuxer.postMessage({ action: 'endTimeline' });
}
// even if we didn't push any bytes, we have to make sure we flush in case we reached
// the end of the segment
transmuxer.postMessage({ action: 'flush' });
};
export const dequeue = (transmuxer) => {
transmuxer.currentTransmux = null;
if (transmuxer.transmuxQueue.length) {
transmuxer.currentTransmux = transmuxer.transmuxQueue.shift();
if (typeof transmuxer.currentTransmux === 'function') {
transmuxer.currentTransmux();
} else {
processTransmux(transmuxer.currentTransmux);
}
}
};
export const processAction = (transmuxer, action) => {
transmuxer.postMessage({ action });
dequeue(transmuxer);
};
export const enqueueAction = (action, transmuxer) => {
if (!transmuxer.currentTransmux) {
transmuxer.currentTransmux = action;
processAction(transmuxer, action);
return;
}
transmuxer.transmuxQueue.push(processAction.bind(null, transmuxer, action));
};
export const reset = (transmuxer) => {
enqueueAction('reset', transmuxer);
};
export const endTimeline = (transmuxer) => {
enqueueAction('endTimeline', transmuxer);
};
export const transmux = (options) => {
if (!options.transmuxer.currentTransmux) {
options.transmuxer.currentTransmux = options;
processTransmux(options);
return;
}
options.transmuxer.transmuxQueue.push(options);
};
export const createTransmuxer = (options) => {
const transmuxer = new TransmuxWorker();
transmuxer.currentTransmux = null;
transmuxer.transmuxQueue = [];
const term = transmuxer.terminate;
transmuxer.terminate = () => {
transmuxer.currentTransmux = null;
transmuxer.transmuxQueue.length = 0;
return term.call(transmuxer);
};
transmuxer.postMessage({action: 'init', options});
return transmuxer;
};
export default {
reset,
endTimeline,
transmux,
createTransmuxer
};

View file

@ -0,0 +1,867 @@
/**
* @file source-updater.js
*/
import videojs from 'video.js';
import logger from './util/logger';
import noop from './util/noop';
import { bufferIntersection } from './ranges.js';
import {getMimeForCodec} from '@videojs/vhs-utils/es/codecs.js';
import window from 'global/window';
import toTitleCase from './util/to-title-case.js';
import { QUOTA_EXCEEDED_ERR } from './error-codes';
const bufferTypes = [
'video',
'audio'
];
const updating = (type, sourceUpdater) => {
const sourceBuffer = sourceUpdater[`${type}Buffer`];
return (sourceBuffer && sourceBuffer.updating) || sourceUpdater.queuePending[type];
};
const nextQueueIndexOfType = (type, queue) => {
for (let i = 0; i < queue.length; i++) {
const queueEntry = queue[i];
if (queueEntry.type === 'mediaSource') {
// If the next entry is a media source entry (uses multiple source buffers), block
// processing to allow it to go through first.
return null;
}
if (queueEntry.type === type) {
return i;
}
}
return null;
};
const shiftQueue = (type, sourceUpdater) => {
if (sourceUpdater.queue.length === 0) {
return;
}
let queueIndex = 0;
let queueEntry = sourceUpdater.queue[queueIndex];
if (queueEntry.type === 'mediaSource') {
if (!sourceUpdater.updating() && sourceUpdater.mediaSource.readyState !== 'closed') {
sourceUpdater.queue.shift();
queueEntry.action(sourceUpdater);
if (queueEntry.doneFn) {
queueEntry.doneFn();
}
// Only specific source buffer actions must wait for async updateend events. Media
// Source actions process synchronously. Therefore, both audio and video source
// buffers are now clear to process the next queue entries.
shiftQueue('audio', sourceUpdater);
shiftQueue('video', sourceUpdater);
}
// Media Source actions require both source buffers, so if the media source action
// couldn't process yet (because one or both source buffers are busy), block other
// queue actions until both are available and the media source action can process.
return;
}
if (type === 'mediaSource') {
// If the queue was shifted by a media source action (this happens when pushing a
// media source action onto the queue), then it wasn't from an updateend event from an
// audio or video source buffer, so there's no change from previous state, and no
// processing should be done.
return;
}
// Media source queue entries don't need to consider whether the source updater is
// started (i.e., source buffers are created) as they don't need the source buffers, but
// source buffer queue entries do.
if (
!sourceUpdater.ready() ||
sourceUpdater.mediaSource.readyState === 'closed' ||
updating(type, sourceUpdater)
) {
return;
}
if (queueEntry.type !== type) {
queueIndex = nextQueueIndexOfType(type, sourceUpdater.queue);
if (queueIndex === null) {
// Either there's no queue entry that uses this source buffer type in the queue, or
// there's a media source queue entry before the next entry of this type, in which
// case wait for that action to process first.
return;
}
queueEntry = sourceUpdater.queue[queueIndex];
}
sourceUpdater.queue.splice(queueIndex, 1);
// Keep a record that this source buffer type is in use.
//
// The queue pending operation must be set before the action is performed in the event
// that the action results in a synchronous event that is acted upon. For instance, if
// an exception is thrown that can be handled, it's possible that new actions will be
// appended to an empty queue and immediately executed, but would not have the correct
// pending information if this property was set after the action was performed.
sourceUpdater.queuePending[type] = queueEntry;
queueEntry.action(type, sourceUpdater);
if (!queueEntry.doneFn) {
// synchronous operation, process next entry
sourceUpdater.queuePending[type] = null;
shiftQueue(type, sourceUpdater);
return;
}
};
const cleanupBuffer = (type, sourceUpdater) => {
const buffer = sourceUpdater[`${type}Buffer`];
const titleType = toTitleCase(type);
if (!buffer) {
return;
}
buffer.removeEventListener('updateend', sourceUpdater[`on${titleType}UpdateEnd_`]);
buffer.removeEventListener('error', sourceUpdater[`on${titleType}Error_`]);
sourceUpdater.codecs[type] = null;
sourceUpdater[`${type}Buffer`] = null;
};
const inSourceBuffers = (mediaSource, sourceBuffer) => mediaSource && sourceBuffer &&
Array.prototype.indexOf.call(mediaSource.sourceBuffers, sourceBuffer) !== -1;
const actions = {
appendBuffer: (bytes, segmentInfo, onError) => (type, sourceUpdater) => {
const sourceBuffer = sourceUpdater[`${type}Buffer`];
// can't do anything if the media source / source buffer is null
// or the media source does not contain this source buffer.
if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
return;
}
sourceUpdater.logger_(`Appending segment ${segmentInfo.mediaIndex}'s ${bytes.length} bytes to ${type}Buffer`);
try {
sourceBuffer.appendBuffer(bytes);
} catch (e) {
sourceUpdater.logger_(`Error with code ${e.code} ` +
(e.code === QUOTA_EXCEEDED_ERR ? '(QUOTA_EXCEEDED_ERR) ' : '') +
`when appending segment ${segmentInfo.mediaIndex} to ${type}Buffer`);
sourceUpdater.queuePending[type] = null;
onError(e);
}
},
remove: (start, end) => (type, sourceUpdater) => {
const sourceBuffer = sourceUpdater[`${type}Buffer`];
// can't do anything if the media source / source buffer is null
// or the media source does not contain this source buffer.
if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
return;
}
sourceUpdater.logger_(`Removing ${start} to ${end} from ${type}Buffer`);
try {
sourceBuffer.remove(start, end);
} catch (e) {
sourceUpdater.logger_(`Remove ${start} to ${end} from ${type}Buffer failed`);
}
},
timestampOffset: (offset) => (type, sourceUpdater) => {
const sourceBuffer = sourceUpdater[`${type}Buffer`];
// can't do anything if the media source / source buffer is null
// or the media source does not contain this source buffer.
if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
return;
}
sourceUpdater.logger_(`Setting ${type}timestampOffset to ${offset}`);
sourceBuffer.timestampOffset = offset;
},
callback: (callback) => (type, sourceUpdater) => {
callback();
},
endOfStream: (error) => (sourceUpdater) => {
if (sourceUpdater.mediaSource.readyState !== 'open') {
return;
}
sourceUpdater.logger_(`Calling mediaSource endOfStream(${error || ''})`);
try {
sourceUpdater.mediaSource.endOfStream(error);
} catch (e) {
videojs.log.warn('Failed to call media source endOfStream', e);
}
},
duration: (duration) => (sourceUpdater) => {
sourceUpdater.logger_(`Setting mediaSource duration to ${duration}`);
try {
sourceUpdater.mediaSource.duration = duration;
} catch (e) {
videojs.log.warn('Failed to set media source duration', e);
}
},
abort: () => (type, sourceUpdater) => {
if (sourceUpdater.mediaSource.readyState !== 'open') {
return;
}
const sourceBuffer = sourceUpdater[`${type}Buffer`];
// can't do anything if the media source / source buffer is null
// or the media source does not contain this source buffer.
if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
return;
}
sourceUpdater.logger_(`calling abort on ${type}Buffer`);
try {
sourceBuffer.abort();
} catch (e) {
videojs.log.warn(`Failed to abort on ${type}Buffer`, e);
}
},
addSourceBuffer: (type, codec) => (sourceUpdater) => {
const titleType = toTitleCase(type);
const mime = getMimeForCodec(codec);
sourceUpdater.logger_(`Adding ${type}Buffer with codec ${codec} to mediaSource`);
const sourceBuffer = sourceUpdater.mediaSource.addSourceBuffer(mime);
sourceBuffer.addEventListener('updateend', sourceUpdater[`on${titleType}UpdateEnd_`]);
sourceBuffer.addEventListener('error', sourceUpdater[`on${titleType}Error_`]);
sourceUpdater.codecs[type] = codec;
sourceUpdater[`${type}Buffer`] = sourceBuffer;
},
removeSourceBuffer: (type) => (sourceUpdater) => {
const sourceBuffer = sourceUpdater[`${type}Buffer`];
cleanupBuffer(type, sourceUpdater);
// can't do anything if the media source / source buffer is null
// or the media source does not contain this source buffer.
if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
return;
}
sourceUpdater.logger_(`Removing ${type}Buffer with codec ${sourceUpdater.codecs[type]} from mediaSource`);
try {
sourceUpdater.mediaSource.removeSourceBuffer(sourceBuffer);
} catch (e) {
videojs.log.warn(`Failed to removeSourceBuffer ${type}Buffer`, e);
}
},
changeType: (codec) => (type, sourceUpdater) => {
const sourceBuffer = sourceUpdater[`${type}Buffer`];
const mime = getMimeForCodec(codec);
// can't do anything if the media source / source buffer is null
// or the media source does not contain this source buffer.
if (!inSourceBuffers(sourceUpdater.mediaSource, sourceBuffer)) {
return;
}
// do not update codec if we don't need to.
if (sourceUpdater.codecs[type] === codec) {
return;
}
sourceUpdater.logger_(`changing ${type}Buffer codec from ${sourceUpdater.codecs[type]} to ${codec}`);
sourceBuffer.changeType(mime);
sourceUpdater.codecs[type] = codec;
}
};
const pushQueue = ({type, sourceUpdater, action, doneFn, name}) => {
sourceUpdater.queue.push({
type,
action,
doneFn,
name
});
shiftQueue(type, sourceUpdater);
};
const onUpdateend = (type, sourceUpdater) => (e) => {
// Although there should, in theory, be a pending action for any updateend receieved,
// there are some actions that may trigger updateend events without set definitions in
// the w3c spec. For instance, setting the duration on the media source may trigger
// updateend events on source buffers. This does not appear to be in the spec. As such,
// if we encounter an updateend without a corresponding pending action from our queue
// for that source buffer type, process the next action.
if (sourceUpdater.queuePending[type]) {
const doneFn = sourceUpdater.queuePending[type].doneFn;
sourceUpdater.queuePending[type] = null;
if (doneFn) {
// if there's an error, report it
doneFn(sourceUpdater[`${type}Error_`]);
}
}
shiftQueue(type, sourceUpdater);
};
/**
* A queue of callbacks to be serialized and applied when a
* MediaSource and its associated SourceBuffers are not in the
* updating state. It is used by the segment loader to update the
* underlying SourceBuffers when new data is loaded, for instance.
*
* @class SourceUpdater
* @param {MediaSource} mediaSource the MediaSource to create the SourceBuffer from
* @param {string} mimeType the desired MIME type of the underlying SourceBuffer
*/
export default class SourceUpdater extends videojs.EventTarget {
constructor(mediaSource) {
super();
this.mediaSource = mediaSource;
this.sourceopenListener_ = () => shiftQueue('mediaSource', this);
this.mediaSource.addEventListener('sourceopen', this.sourceopenListener_);
this.logger_ = logger('SourceUpdater');
// initial timestamp offset is 0
this.audioTimestampOffset_ = 0;
this.videoTimestampOffset_ = 0;
this.queue = [];
this.queuePending = {
audio: null,
video: null
};
this.delayedAudioAppendQueue_ = [];
this.videoAppendQueued_ = false;
this.codecs = {};
this.onVideoUpdateEnd_ = onUpdateend('video', this);
this.onAudioUpdateEnd_ = onUpdateend('audio', this);
this.onVideoError_ = (e) => {
// used for debugging
this.videoError_ = e;
};
this.onAudioError_ = (e) => {
// used for debugging
this.audioError_ = e;
};
this.createdSourceBuffers_ = false;
this.initializedEme_ = false;
this.triggeredReady_ = false;
}
initializedEme() {
this.initializedEme_ = true;
this.triggerReady();
}
hasCreatedSourceBuffers() {
// if false, likely waiting on one of the segment loaders to get enough data to create
// source buffers
return this.createdSourceBuffers_;
}
hasInitializedAnyEme() {
return this.initializedEme_;
}
ready() {
return this.hasCreatedSourceBuffers() && this.hasInitializedAnyEme();
}
createSourceBuffers(codecs) {
if (this.hasCreatedSourceBuffers()) {
// already created them before
return;
}
// the intial addOrChangeSourceBuffers will always be
// two add buffers.
this.addOrChangeSourceBuffers(codecs);
this.createdSourceBuffers_ = true;
this.trigger('createdsourcebuffers');
this.triggerReady();
}
triggerReady() {
// only allow ready to be triggered once, this prevents the case
// where:
// 1. we trigger createdsourcebuffers
// 2. ie 11 synchronously initializates eme
// 3. the synchronous initialization causes us to trigger ready
// 4. We go back to the ready check in createSourceBuffers and ready is triggered again.
if (this.ready() && !this.triggeredReady_) {
this.triggeredReady_ = true;
this.trigger('ready');
}
}
/**
* Add a type of source buffer to the media source.
*
* @param {string} type
* The type of source buffer to add.
*
* @param {string} codec
* The codec to add the source buffer with.
*/
addSourceBuffer(type, codec) {
pushQueue({
type: 'mediaSource',
sourceUpdater: this,
action: actions.addSourceBuffer(type, codec),
name: 'addSourceBuffer'
});
}
/**
* call abort on a source buffer.
*
* @param {string} type
* The type of source buffer to call abort on.
*/
abort(type) {
pushQueue({
type,
sourceUpdater: this,
action: actions.abort(type),
name: 'abort'
});
}
/**
* Call removeSourceBuffer and remove a specific type
* of source buffer on the mediaSource.
*
* @param {string} type
* The type of source buffer to remove.
*/
removeSourceBuffer(type) {
if (!this.canRemoveSourceBuffer()) {
videojs.log.error('removeSourceBuffer is not supported!');
return;
}
pushQueue({
type: 'mediaSource',
sourceUpdater: this,
action: actions.removeSourceBuffer(type),
name: 'removeSourceBuffer'
});
}
/**
* Whether or not the removeSourceBuffer function is supported
* on the mediaSource.
*
* @return {boolean}
* if removeSourceBuffer can be called.
*/
canRemoveSourceBuffer() {
// IE reports that it supports removeSourceBuffer, but often throws
// errors when attempting to use the function. So we report that it
// does not support removeSourceBuffer. As of Firefox 83 removeSourceBuffer
// throws errors, so we report that it does not support this as well.
return !videojs.browser.IE_VERSION && !videojs.browser.IS_FIREFOX && window.MediaSource &&
window.MediaSource.prototype &&
typeof window.MediaSource.prototype.removeSourceBuffer === 'function';
}
/**
* Whether or not the changeType function is supported
* on our SourceBuffers.
*
* @return {boolean}
* if changeType can be called.
*/
static canChangeType() {
return window.SourceBuffer &&
window.SourceBuffer.prototype &&
typeof window.SourceBuffer.prototype.changeType === 'function';
}
/**
* Whether or not the changeType function is supported
* on our SourceBuffers.
*
* @return {boolean}
* if changeType can be called.
*/
canChangeType() {
return this.constructor.canChangeType();
}
/**
* Call the changeType function on a source buffer, given the code and type.
*
* @param {string} type
* The type of source buffer to call changeType on.
*
* @param {string} codec
* The codec string to change type with on the source buffer.
*/
changeType(type, codec) {
if (!this.canChangeType()) {
videojs.log.error('changeType is not supported!');
return;
}
pushQueue({
type,
sourceUpdater: this,
action: actions.changeType(codec),
name: 'changeType'
});
}
/**
* Add source buffers with a codec or, if they are already created,
* call changeType on source buffers using changeType.
*
* @param {Object} codecs
* Codecs to switch to
*/
addOrChangeSourceBuffers(codecs) {
if (!codecs || typeof codecs !== 'object' || Object.keys(codecs).length === 0) {
throw new Error('Cannot addOrChangeSourceBuffers to undefined codecs');
}
Object.keys(codecs).forEach((type) => {
const codec = codecs[type];
if (!this.hasCreatedSourceBuffers()) {
return this.addSourceBuffer(type, codec);
}
if (this.canChangeType()) {
this.changeType(type, codec);
}
});
}
/**
* Queue an update to append an ArrayBuffer.
*
* @param {MediaObject} object containing audioBytes and/or videoBytes
* @param {Function} done the function to call when done
* @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-appendBuffer-void-ArrayBuffer-data
*/
appendBuffer(options, doneFn) {
const {segmentInfo, type, bytes} = options;
this.processedAppend_ = true;
if (type === 'audio' && this.videoBuffer && !this.videoAppendQueued_) {
this.delayedAudioAppendQueue_.push([options, doneFn]);
this.logger_(`delayed audio append of ${bytes.length} until video append`);
return;
}
// In the case of certain errors, for instance, QUOTA_EXCEEDED_ERR, updateend will
// not be fired. This means that the queue will be blocked until the next action
// taken by the segment-loader. Provide a mechanism for segment-loader to handle
// these errors by calling the doneFn with the specific error.
const onError = doneFn;
pushQueue({
type,
sourceUpdater: this,
action: actions.appendBuffer(bytes, segmentInfo || {mediaIndex: -1}, onError),
doneFn,
name: 'appendBuffer'
});
if (type === 'video') {
this.videoAppendQueued_ = true;
if (!this.delayedAudioAppendQueue_.length) {
return;
}
const queue = this.delayedAudioAppendQueue_.slice();
this.logger_(`queuing delayed audio ${queue.length} appendBuffers`);
this.delayedAudioAppendQueue_.length = 0;
queue.forEach((que) => {
this.appendBuffer.apply(this, que);
});
}
}
/**
* Get the audio buffer's buffered timerange.
*
* @return {TimeRange}
* The audio buffer's buffered time range
*/
audioBuffered() {
// no media source/source buffer or it isn't in the media sources
// source buffer list
if (!inSourceBuffers(this.mediaSource, this.audioBuffer)) {
return videojs.createTimeRange();
}
return this.audioBuffer.buffered ? this.audioBuffer.buffered :
videojs.createTimeRange();
}
/**
* Get the video buffer's buffered timerange.
*
* @return {TimeRange}
* The video buffer's buffered time range
*/
videoBuffered() {
// no media source/source buffer or it isn't in the media sources
// source buffer list
if (!inSourceBuffers(this.mediaSource, this.videoBuffer)) {
return videojs.createTimeRange();
}
return this.videoBuffer.buffered ? this.videoBuffer.buffered :
videojs.createTimeRange();
}
/**
* Get a combined video/audio buffer's buffered timerange.
*
* @return {TimeRange}
* the combined time range
*/
buffered() {
const video = inSourceBuffers(this.mediaSource, this.videoBuffer) ? this.videoBuffer : null;
const audio = inSourceBuffers(this.mediaSource, this.audioBuffer) ? this.audioBuffer : null;
if (audio && !video) {
return this.audioBuffered();
}
if (video && !audio) {
return this.videoBuffered();
}
return bufferIntersection(this.audioBuffered(), this.videoBuffered());
}
/**
* Add a callback to the queue that will set duration on the mediaSource.
*
* @param {number} duration
* The duration to set
*
* @param {Function} [doneFn]
* function to run after duration has been set.
*/
setDuration(duration, doneFn = noop) {
// In order to set the duration on the media source, it's necessary to wait for all
// source buffers to no longer be updating. "If the updating attribute equals true on
// any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
// abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
pushQueue({
type: 'mediaSource',
sourceUpdater: this,
action: actions.duration(duration),
name: 'duration',
doneFn
});
}
/**
* Add a mediaSource endOfStream call to the queue
*
* @param {Error} [error]
* Call endOfStream with an error
*
* @param {Function} [doneFn]
* A function that should be called when the
* endOfStream call has finished.
*/
endOfStream(error = null, doneFn = noop) {
if (typeof error !== 'string') {
error = undefined;
}
// In order to set the duration on the media source, it's necessary to wait for all
// source buffers to no longer be updating. "If the updating attribute equals true on
// any SourceBuffer in sourceBuffers, then throw an InvalidStateError exception and
// abort these steps." (source: https://www.w3.org/TR/media-source/#attributes).
pushQueue({
type: 'mediaSource',
sourceUpdater: this,
action: actions.endOfStream(error),
name: 'endOfStream',
doneFn
});
}
/**
* Queue an update to remove a time range from the buffer.
*
* @param {number} start where to start the removal
* @param {number} end where to end the removal
* @param {Function} [done=noop] optional callback to be executed when the remove
* operation is complete
* @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
*/
removeAudio(start, end, done = noop) {
if (!this.audioBuffered().length || this.audioBuffered().end(0) === 0) {
done();
return;
}
pushQueue({
type: 'audio',
sourceUpdater: this,
action: actions.remove(start, end),
doneFn: done,
name: 'remove'
});
}
/**
* Queue an update to remove a time range from the buffer.
*
* @param {number} start where to start the removal
* @param {number} end where to end the removal
* @param {Function} [done=noop] optional callback to be executed when the remove
* operation is complete
* @see http://www.w3.org/TR/media-source/#widl-SourceBuffer-remove-void-double-start-unrestricted-double-end
*/
removeVideo(start, end, done = noop) {
if (!this.videoBuffered().length || this.videoBuffered().end(0) === 0) {
done();
return;
}
pushQueue({
type: 'video',
sourceUpdater: this,
action: actions.remove(start, end),
doneFn: done,
name: 'remove'
});
}
/**
* Whether the underlying sourceBuffer is updating or not
*
* @return {boolean} the updating status of the SourceBuffer
*/
updating() {
// the audio/video source buffer is updating
if (updating('audio', this) || updating('video', this)) {
return true;
}
return false;
}
/**
* Set/get the timestampoffset on the audio SourceBuffer
*
* @return {number} the timestamp offset
*/
audioTimestampOffset(offset) {
if (typeof offset !== 'undefined' &&
this.audioBuffer &&
// no point in updating if it's the same
this.audioTimestampOffset_ !== offset) {
pushQueue({
type: 'audio',
sourceUpdater: this,
action: actions.timestampOffset(offset),
name: 'timestampOffset'
});
this.audioTimestampOffset_ = offset;
}
return this.audioTimestampOffset_;
}
/**
* Set/get the timestampoffset on the video SourceBuffer
*
* @return {number} the timestamp offset
*/
videoTimestampOffset(offset) {
if (typeof offset !== 'undefined' &&
this.videoBuffer &&
// no point in updating if it's the same
this.videoTimestampOffset !== offset) {
pushQueue({
type: 'video',
sourceUpdater: this,
action: actions.timestampOffset(offset),
name: 'timestampOffset'
});
this.videoTimestampOffset_ = offset;
}
return this.videoTimestampOffset_;
}
/**
* Add a function to the queue that will be called
* when it is its turn to run in the audio queue.
*
* @param {Function} callback
* The callback to queue.
*/
audioQueueCallback(callback) {
if (!this.audioBuffer) {
return;
}
pushQueue({
type: 'audio',
sourceUpdater: this,
action: actions.callback(callback),
name: 'callback'
});
}
/**
* Add a function to the queue that will be called
* when it is its turn to run in the video queue.
*
* @param {Function} callback
* The callback to queue.
*/
videoQueueCallback(callback) {
if (!this.videoBuffer) {
return;
}
pushQueue({
type: 'video',
sourceUpdater: this,
action: actions.callback(callback),
name: 'callback'
});
}
/**
* dispose of the source updater and the underlying sourceBuffer
*/
dispose() {
this.trigger('dispose');
bufferTypes.forEach((type) => {
this.abort(type);
if (this.canRemoveSourceBuffer()) {
this.removeSourceBuffer(type);
} else {
this[`${type}QueueCallback`](() => cleanupBuffer(type, this));
}
});
this.videoAppendQueued_ = false;
this.delayedAudioAppendQueue_.length = 0;
if (this.sourceopenListener_) {
this.mediaSource.removeEventListener('sourceopen', this.sourceopenListener_);
}
this.off();
}
}

View file

@ -0,0 +1,588 @@
/**
* @file sync-controller.js
*/
import {sumDurations, getPartsAndSegments} from './playlist';
import videojs from 'video.js';
import logger from './util/logger';
export const syncPointStrategies = [
// Stategy "VOD": Handle the VOD-case where the sync-point is *always*
// the equivalence display-time 0 === segment-index 0
{
name: 'VOD',
run: (syncController, playlist, duration, currentTimeline, currentTime) => {
if (duration !== Infinity) {
const syncPoint = {
time: 0,
segmentIndex: 0,
partIndex: null
};
return syncPoint;
}
return null;
}
},
// Stategy "ProgramDateTime": We have a program-date-time tag in this playlist
{
name: 'ProgramDateTime',
run: (syncController, playlist, duration, currentTimeline, currentTime) => {
if (!Object.keys(syncController.timelineToDatetimeMappings).length) {
return null;
}
let syncPoint = null;
let lastDistance = null;
const partsAndSegments = getPartsAndSegments(playlist);
currentTime = currentTime || 0;
for (let i = 0; i < partsAndSegments.length; i++) {
// start from the end and loop backwards for live
// or start from the front and loop forwards for non-live
const index = (playlist.endList || currentTime === 0) ? i : partsAndSegments.length - (i + 1);
const partAndSegment = partsAndSegments[index];
const segment = partAndSegment.segment;
const datetimeMapping =
syncController.timelineToDatetimeMappings[segment.timeline];
if (!datetimeMapping) {
continue;
}
if (segment.dateTimeObject) {
const segmentTime = segment.dateTimeObject.getTime() / 1000;
let start = segmentTime + datetimeMapping;
// take part duration into account.
if (segment.parts && typeof partAndSegment.partIndex === 'number') {
for (let z = 0; z < partAndSegment.partIndex; z++) {
start += segment.parts[z].duration;
}
}
const distance = Math.abs(currentTime - start);
// Once the distance begins to increase, or if distance is 0, we have passed
// currentTime and can stop looking for better candidates
if (lastDistance !== null && (distance === 0 || lastDistance < distance)) {
break;
}
lastDistance = distance;
syncPoint = {
time: start,
segmentIndex: partAndSegment.segmentIndex,
partIndex: partAndSegment.partIndex
};
}
}
return syncPoint;
}
},
// Stategy "Segment": We have a known time mapping for a timeline and a
// segment in the current timeline with timing data
{
name: 'Segment',
run: (syncController, playlist, duration, currentTimeline, currentTime) => {
let syncPoint = null;
let lastDistance = null;
currentTime = currentTime || 0;
const partsAndSegments = getPartsAndSegments(playlist);
for (let i = 0; i < partsAndSegments.length; i++) {
// start from the end and loop backwards for live
// or start from the front and loop forwards for non-live
const index = (playlist.endList || currentTime === 0) ? i : partsAndSegments.length - (i + 1);
const partAndSegment = partsAndSegments[index];
const segment = partAndSegment.segment;
const start = partAndSegment.part && partAndSegment.part.start || segment && segment.start;
if (segment.timeline === currentTimeline && typeof start !== 'undefined') {
const distance = Math.abs(currentTime - start);
// Once the distance begins to increase, we have passed
// currentTime and can stop looking for better candidates
if (lastDistance !== null && lastDistance < distance) {
break;
}
if (!syncPoint || lastDistance === null || lastDistance >= distance) {
lastDistance = distance;
syncPoint = {
time: start,
segmentIndex: partAndSegment.segmentIndex,
partIndex: partAndSegment.partIndex
};
}
}
}
return syncPoint;
}
},
// Stategy "Discontinuity": We have a discontinuity with a known
// display-time
{
name: 'Discontinuity',
run: (syncController, playlist, duration, currentTimeline, currentTime) => {
let syncPoint = null;
currentTime = currentTime || 0;
if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
let lastDistance = null;
for (let i = 0; i < playlist.discontinuityStarts.length; i++) {
const segmentIndex = playlist.discontinuityStarts[i];
const discontinuity = playlist.discontinuitySequence + i + 1;
const discontinuitySync = syncController.discontinuities[discontinuity];
if (discontinuitySync) {
const distance = Math.abs(currentTime - discontinuitySync.time);
// Once the distance begins to increase, we have passed
// currentTime and can stop looking for better candidates
if (lastDistance !== null && lastDistance < distance) {
break;
}
if (!syncPoint || lastDistance === null || lastDistance >= distance) {
lastDistance = distance;
syncPoint = {
time: discontinuitySync.time,
segmentIndex,
partIndex: null
};
}
}
}
}
return syncPoint;
}
},
// Stategy "Playlist": We have a playlist with a known mapping of
// segment index to display time
{
name: 'Playlist',
run: (syncController, playlist, duration, currentTimeline, currentTime) => {
if (playlist.syncInfo) {
const syncPoint = {
time: playlist.syncInfo.time,
segmentIndex: playlist.syncInfo.mediaSequence - playlist.mediaSequence,
partIndex: null
};
return syncPoint;
}
return null;
}
}
];
export default class SyncController extends videojs.EventTarget {
constructor(options = {}) {
super();
// ...for synching across variants
this.timelines = [];
this.discontinuities = [];
this.timelineToDatetimeMappings = {};
this.logger_ = logger('SyncController');
}
/**
* Find a sync-point for the playlist specified
*
* A sync-point is defined as a known mapping from display-time to
* a segment-index in the current playlist.
*
* @param {Playlist} playlist
* The playlist that needs a sync-point
* @param {number} duration
* Duration of the MediaSource (Infinite if playing a live source)
* @param {number} currentTimeline
* The last timeline from which a segment was loaded
* @return {Object}
* A sync-point object
*/
getSyncPoint(playlist, duration, currentTimeline, currentTime) {
const syncPoints = this.runStrategies_(
playlist,
duration,
currentTimeline,
currentTime
);
if (!syncPoints.length) {
// Signal that we need to attempt to get a sync-point manually
// by fetching a segment in the playlist and constructing
// a sync-point from that information
return null;
}
// Now find the sync-point that is closest to the currentTime because
// that should result in the most accurate guess about which segment
// to fetch
return this.selectSyncPoint_(syncPoints, { key: 'time', value: currentTime });
}
/**
* Calculate the amount of time that has expired off the playlist during playback
*
* @param {Playlist} playlist
* Playlist object to calculate expired from
* @param {number} duration
* Duration of the MediaSource (Infinity if playling a live source)
* @return {number|null}
* The amount of time that has expired off the playlist during playback. Null
* if no sync-points for the playlist can be found.
*/
getExpiredTime(playlist, duration) {
if (!playlist || !playlist.segments) {
return null;
}
const syncPoints = this.runStrategies_(
playlist,
duration,
playlist.discontinuitySequence,
0
);
// Without sync-points, there is not enough information to determine the expired time
if (!syncPoints.length) {
return null;
}
const syncPoint = this.selectSyncPoint_(syncPoints, {
key: 'segmentIndex',
value: 0
});
// If the sync-point is beyond the start of the playlist, we want to subtract the
// duration from index 0 to syncPoint.segmentIndex instead of adding.
if (syncPoint.segmentIndex > 0) {
syncPoint.time *= -1;
}
return Math.abs(syncPoint.time + sumDurations({
defaultDuration: playlist.targetDuration,
durationList: playlist.segments,
startIndex: syncPoint.segmentIndex,
endIndex: 0
}));
}
/**
* Runs each sync-point strategy and returns a list of sync-points returned by the
* strategies
*
* @private
* @param {Playlist} playlist
* The playlist that needs a sync-point
* @param {number} duration
* Duration of the MediaSource (Infinity if playing a live source)
* @param {number} currentTimeline
* The last timeline from which a segment was loaded
* @return {Array}
* A list of sync-point objects
*/
runStrategies_(playlist, duration, currentTimeline, currentTime) {
const syncPoints = [];
// Try to find a sync-point in by utilizing various strategies...
for (let i = 0; i < syncPointStrategies.length; i++) {
const strategy = syncPointStrategies[i];
const syncPoint = strategy.run(
this,
playlist,
duration,
currentTimeline,
currentTime
);
if (syncPoint) {
syncPoint.strategy = strategy.name;
syncPoints.push({
strategy: strategy.name,
syncPoint
});
}
}
return syncPoints;
}
/**
* Selects the sync-point nearest the specified target
*
* @private
* @param {Array} syncPoints
* List of sync-points to select from
* @param {Object} target
* Object specifying the property and value we are targeting
* @param {string} target.key
* Specifies the property to target. Must be either 'time' or 'segmentIndex'
* @param {number} target.value
* The value to target for the specified key.
* @return {Object}
* The sync-point nearest the target
*/
selectSyncPoint_(syncPoints, target) {
let bestSyncPoint = syncPoints[0].syncPoint;
let bestDistance = Math.abs(syncPoints[0].syncPoint[target.key] - target.value);
let bestStrategy = syncPoints[0].strategy;
for (let i = 1; i < syncPoints.length; i++) {
const newDistance = Math.abs(syncPoints[i].syncPoint[target.key] - target.value);
if (newDistance < bestDistance) {
bestDistance = newDistance;
bestSyncPoint = syncPoints[i].syncPoint;
bestStrategy = syncPoints[i].strategy;
}
}
this.logger_(`syncPoint for [${target.key}: ${target.value}] chosen with strategy` +
` [${bestStrategy}]: [time:${bestSyncPoint.time},` +
` segmentIndex:${bestSyncPoint.segmentIndex}` +
(typeof bestSyncPoint.partIndex === 'number' ? `,partIndex:${bestSyncPoint.partIndex}` : '') +
']');
return bestSyncPoint;
}
/**
* Save any meta-data present on the segments when segments leave
* the live window to the playlist to allow for synchronization at the
* playlist level later.
*
* @param {Playlist} oldPlaylist - The previous active playlist
* @param {Playlist} newPlaylist - The updated and most current playlist
*/
saveExpiredSegmentInfo(oldPlaylist, newPlaylist) {
const mediaSequenceDiff = newPlaylist.mediaSequence - oldPlaylist.mediaSequence;
// When a segment expires from the playlist and it has a start time
// save that information as a possible sync-point reference in future
for (let i = mediaSequenceDiff - 1; i >= 0; i--) {
const lastRemovedSegment = oldPlaylist.segments[i];
if (lastRemovedSegment && typeof lastRemovedSegment.start !== 'undefined') {
newPlaylist.syncInfo = {
mediaSequence: oldPlaylist.mediaSequence + i,
time: lastRemovedSegment.start
};
this.logger_(`playlist refresh sync: [time:${newPlaylist.syncInfo.time},` +
` mediaSequence: ${newPlaylist.syncInfo.mediaSequence}]`);
this.trigger('syncinfoupdate');
break;
}
}
}
/**
* Save the mapping from playlist's ProgramDateTime to display. This should only happen
* before segments start to load.
*
* @param {Playlist} playlist - The currently active playlist
*/
setDateTimeMappingForStart(playlist) {
// It's possible for the playlist to be updated before playback starts, meaning time
// zero is not yet set. If, during these playlist refreshes, a discontinuity is
// crossed, then the old time zero mapping (for the prior timeline) would be retained
// unless the mappings are cleared.
this.timelineToDatetimeMappings = {};
if (playlist.segments &&
playlist.segments.length &&
playlist.segments[0].dateTimeObject) {
const firstSegment = playlist.segments[0];
const playlistTimestamp = firstSegment.dateTimeObject.getTime() / 1000;
this.timelineToDatetimeMappings[firstSegment.timeline] = -playlistTimestamp;
}
}
/**
* Calculates and saves timeline mappings, playlist sync info, and segment timing values
* based on the latest timing information.
*
* @param {Object} options
* Options object
* @param {SegmentInfo} options.segmentInfo
* The current active request information
* @param {boolean} options.shouldSaveTimelineMapping
* If there's a timeline change, determines if the timeline mapping should be
* saved for timeline mapping and program date time mappings.
*/
saveSegmentTimingInfo({ segmentInfo, shouldSaveTimelineMapping }) {
const didCalculateSegmentTimeMapping = this.calculateSegmentTimeMapping_(
segmentInfo,
segmentInfo.timingInfo,
shouldSaveTimelineMapping
);
const segment = segmentInfo.segment;
if (didCalculateSegmentTimeMapping) {
this.saveDiscontinuitySyncInfo_(segmentInfo);
// If the playlist does not have sync information yet, record that information
// now with segment timing information
if (!segmentInfo.playlist.syncInfo) {
segmentInfo.playlist.syncInfo = {
mediaSequence: segmentInfo.playlist.mediaSequence + segmentInfo.mediaIndex,
time: segment.start
};
}
}
const dateTime = segment.dateTimeObject;
if (segment.discontinuity && shouldSaveTimelineMapping && dateTime) {
this.timelineToDatetimeMappings[segment.timeline] = -(dateTime.getTime() / 1000);
}
}
timestampOffsetForTimeline(timeline) {
if (typeof this.timelines[timeline] === 'undefined') {
return null;
}
return this.timelines[timeline].time;
}
mappingForTimeline(timeline) {
if (typeof this.timelines[timeline] === 'undefined') {
return null;
}
return this.timelines[timeline].mapping;
}
/**
* Use the "media time" for a segment to generate a mapping to "display time" and
* save that display time to the segment.
*
* @private
* @param {SegmentInfo} segmentInfo
* The current active request information
* @param {Object} timingInfo
* The start and end time of the current segment in "media time"
* @param {boolean} shouldSaveTimelineMapping
* If there's a timeline change, determines if the timeline mapping should be
* saved in timelines.
* @return {boolean}
* Returns false if segment time mapping could not be calculated
*/
calculateSegmentTimeMapping_(segmentInfo, timingInfo, shouldSaveTimelineMapping) {
// TODO: remove side effects
const segment = segmentInfo.segment;
const part = segmentInfo.part;
let mappingObj = this.timelines[segmentInfo.timeline];
let start;
let end;
if (typeof segmentInfo.timestampOffset === 'number') {
mappingObj = {
time: segmentInfo.startOfSegment,
mapping: segmentInfo.startOfSegment - timingInfo.start
};
if (shouldSaveTimelineMapping) {
this.timelines[segmentInfo.timeline] = mappingObj;
this.trigger('timestampoffset');
this.logger_(`time mapping for timeline ${segmentInfo.timeline}: ` +
`[time: ${mappingObj.time}] [mapping: ${mappingObj.mapping}]`);
}
start = segmentInfo.startOfSegment;
end = timingInfo.end + mappingObj.mapping;
} else if (mappingObj) {
start = timingInfo.start + mappingObj.mapping;
end = timingInfo.end + mappingObj.mapping;
} else {
return false;
}
if (part) {
part.start = start;
part.end = end;
}
// If we don't have a segment start yet or the start value we got
// is less than our current segment.start value, save a new start value.
// We have to do this because parts will have segment timing info saved
// multiple times and we want segment start to be the earliest part start
// value for that segment.
if (!segment.start || start < segment.start) {
segment.start = start;
}
segment.end = end;
return true;
}
/**
* Each time we have discontinuity in the playlist, attempt to calculate the location
* in display of the start of the discontinuity and save that. We also save an accuracy
* value so that we save values with the most accuracy (closest to 0.)
*
* @private
* @param {SegmentInfo} segmentInfo - The current active request information
*/
saveDiscontinuitySyncInfo_(segmentInfo) {
const playlist = segmentInfo.playlist;
const segment = segmentInfo.segment;
// If the current segment is a discontinuity then we know exactly where
// the start of the range and it's accuracy is 0 (greater accuracy values
// mean more approximation)
if (segment.discontinuity) {
this.discontinuities[segment.timeline] = {
time: segment.start,
accuracy: 0
};
} else if (playlist.discontinuityStarts && playlist.discontinuityStarts.length) {
// Search for future discontinuities that we can provide better timing
// information for and save that information for sync purposes
for (let i = 0; i < playlist.discontinuityStarts.length; i++) {
const segmentIndex = playlist.discontinuityStarts[i];
const discontinuity = playlist.discontinuitySequence + i + 1;
const mediaIndexDiff = segmentIndex - segmentInfo.mediaIndex;
const accuracy = Math.abs(mediaIndexDiff);
if (!this.discontinuities[discontinuity] ||
this.discontinuities[discontinuity].accuracy > accuracy) {
let time;
if (mediaIndexDiff < 0) {
time = segment.start - sumDurations({
defaultDuration: playlist.targetDuration,
durationList: playlist.segments,
startIndex: segmentInfo.mediaIndex,
endIndex: segmentIndex
});
} else {
time = segment.end + sumDurations({
defaultDuration: playlist.targetDuration,
durationList: playlist.segments,
startIndex: segmentInfo.mediaIndex + 1,
endIndex: segmentIndex
});
}
this.discontinuities[discontinuity] = {
time,
accuracy
};
}
}
}
}
dispose() {
this.trigger('dispose');
this.off();
}
}

View file

@ -0,0 +1,48 @@
import videojs from 'video.js';
/**
* The TimelineChangeController acts as a source for segment loaders to listen for and
* keep track of latest and pending timeline changes. This is useful to ensure proper
* sync, as each loader may need to make a consideration for what timeline the other
* loader is on before making changes which could impact the other loader's media.
*
* @class TimelineChangeController
* @extends videojs.EventTarget
*/
export default class TimelineChangeController extends videojs.EventTarget {
constructor() {
super();
this.pendingTimelineChanges_ = {};
this.lastTimelineChanges_ = {};
}
clearPendingTimelineChange(type) {
this.pendingTimelineChanges_[type] = null;
this.trigger('pendingtimelinechange');
}
pendingTimelineChange({ type, from, to }) {
if (typeof from === 'number' && typeof to === 'number') {
this.pendingTimelineChanges_[type] = { type, from, to };
this.trigger('pendingtimelinechange');
}
return this.pendingTimelineChanges_[type];
}
lastTimelineChange({ type, from, to }) {
if (typeof from === 'number' && typeof to === 'number') {
this.lastTimelineChanges_[type] = { type, from, to };
delete this.pendingTimelineChanges_[type];
this.trigger('timelinechange');
}
return this.lastTimelineChanges_[type];
}
dispose() {
this.trigger('dispose');
this.pendingTimelineChanges_ = {};
this.lastTimelineChanges_ = {};
this.off();
}
}

View file

@ -0,0 +1,373 @@
/* global self */
/**
* @file transmuxer-worker.js
*/
/**
* videojs-contrib-media-sources
*
* Copyright (c) 2015 Brightcove
* All rights reserved.
*
* Handles communication between the browser-world and the mux.js
* transmuxer running inside of a WebWorker by exposing a simple
* message-based interface to a Transmuxer object.
*/
import {Transmuxer} from 'mux.js/lib/mp4/transmuxer';
import CaptionParser from 'mux.js/lib/mp4/caption-parser';
import mp4probe from 'mux.js/lib/mp4/probe';
import tsInspector from 'mux.js/lib/tools/ts-inspector.js';
import {
ONE_SECOND_IN_TS,
secondsToVideoTs,
videoTsToSeconds
} from 'mux.js/lib/utils/clock';
/**
* Re-emits transmuxer events by converting them into messages to the
* world outside the worker.
*
* @param {Object} transmuxer the transmuxer to wire events on
* @private
*/
const wireTransmuxerEvents = function(self, transmuxer) {
transmuxer.on('data', function(segment) {
// transfer ownership of the underlying ArrayBuffer
// instead of doing a copy to save memory
// ArrayBuffers are transferable but generic TypedArrays are not
// @link https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Using_web_workers#Passing_data_by_transferring_ownership_(transferable_objects)
const initArray = segment.initSegment;
segment.initSegment = {
data: initArray.buffer,
byteOffset: initArray.byteOffset,
byteLength: initArray.byteLength
};
const typedArray = segment.data;
segment.data = typedArray.buffer;
self.postMessage({
action: 'data',
segment,
byteOffset: typedArray.byteOffset,
byteLength: typedArray.byteLength
}, [segment.data]);
});
transmuxer.on('done', function(data) {
self.postMessage({ action: 'done' });
});
transmuxer.on('gopInfo', function(gopInfo) {
self.postMessage({
action: 'gopInfo',
gopInfo
});
});
transmuxer.on('videoSegmentTimingInfo', function(timingInfo) {
const videoSegmentTimingInfo = {
start: {
decode: videoTsToSeconds(timingInfo.start.dts),
presentation: videoTsToSeconds(timingInfo.start.pts)
},
end: {
decode: videoTsToSeconds(timingInfo.end.dts),
presentation: videoTsToSeconds(timingInfo.end.pts)
},
baseMediaDecodeTime: videoTsToSeconds(timingInfo.baseMediaDecodeTime)
};
if (timingInfo.prependedContentDuration) {
videoSegmentTimingInfo.prependedContentDuration = videoTsToSeconds(timingInfo.prependedContentDuration);
}
self.postMessage({
action: 'videoSegmentTimingInfo',
videoSegmentTimingInfo
});
});
transmuxer.on('audioSegmentTimingInfo', function(timingInfo) {
// Note that all times for [audio/video]SegmentTimingInfo events are in video clock
const audioSegmentTimingInfo = {
start: {
decode: videoTsToSeconds(timingInfo.start.dts),
presentation: videoTsToSeconds(timingInfo.start.pts)
},
end: {
decode: videoTsToSeconds(timingInfo.end.dts),
presentation: videoTsToSeconds(timingInfo.end.pts)
},
baseMediaDecodeTime: videoTsToSeconds(timingInfo.baseMediaDecodeTime)
};
if (timingInfo.prependedContentDuration) {
audioSegmentTimingInfo.prependedContentDuration =
videoTsToSeconds(timingInfo.prependedContentDuration);
}
self.postMessage({
action: 'audioSegmentTimingInfo',
audioSegmentTimingInfo
});
});
transmuxer.on('id3Frame', function(id3Frame) {
self.postMessage({
action: 'id3Frame',
id3Frame
});
});
transmuxer.on('caption', function(caption) {
self.postMessage({
action: 'caption',
caption
});
});
transmuxer.on('trackinfo', function(trackInfo) {
self.postMessage({
action: 'trackinfo',
trackInfo
});
});
transmuxer.on('audioTimingInfo', function(audioTimingInfo) {
// convert to video TS since we prioritize video time over audio
self.postMessage({
action: 'audioTimingInfo',
audioTimingInfo: {
start: videoTsToSeconds(audioTimingInfo.start),
end: videoTsToSeconds(audioTimingInfo.end)
}
});
});
transmuxer.on('videoTimingInfo', function(videoTimingInfo) {
self.postMessage({
action: 'videoTimingInfo',
videoTimingInfo: {
start: videoTsToSeconds(videoTimingInfo.start),
end: videoTsToSeconds(videoTimingInfo.end)
}
});
});
};
/**
* All incoming messages route through this hash. If no function exists
* to handle an incoming message, then we ignore the message.
*
* @class MessageHandlers
* @param {Object} options the options to initialize with
*/
class MessageHandlers {
constructor(self, options) {
this.options = options || {};
this.self = self;
this.init();
}
/**
* initialize our web worker and wire all the events.
*/
init() {
if (this.transmuxer) {
this.transmuxer.dispose();
}
this.transmuxer = new Transmuxer(this.options);
wireTransmuxerEvents(this.self, this.transmuxer);
}
pushMp4Captions(data) {
if (!this.captionParser) {
this.captionParser = new CaptionParser();
this.captionParser.init();
}
const segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
const parsed = this.captionParser.parse(
segment,
data.trackIds,
data.timescales
);
this.self.postMessage({
action: 'mp4Captions',
captions: parsed && parsed.captions || [],
data: segment.buffer
}, [segment.buffer]);
}
probeMp4StartTime({timescales, data}) {
const startTime = mp4probe.startTime(timescales, data);
this.self.postMessage({
action: 'probeMp4StartTime',
startTime,
data
}, [data.buffer]);
}
probeMp4Tracks({data}) {
const tracks = mp4probe.tracks(data);
this.self.postMessage({
action: 'probeMp4Tracks',
tracks,
data
}, [data.buffer]);
}
/**
* Probe an mpeg2-ts segment to determine the start time of the segment in it's
* internal "media time," as well as whether it contains video and/or audio.
*
* @private
* @param {Uint8Array} bytes - segment bytes
* @param {number} baseStartTime
* Relative reference timestamp used when adjusting frame timestamps for rollover.
* This value should be in seconds, as it's converted to a 90khz clock within the
* function body.
* @return {Object} The start time of the current segment in "media time" as well as
* whether it contains video and/or audio
*/
probeTs({data, baseStartTime}) {
const tsStartTime = (typeof baseStartTime === 'number' && !isNaN(baseStartTime)) ?
(baseStartTime * ONE_SECOND_IN_TS) :
void 0;
const timeInfo = tsInspector.inspect(data, tsStartTime);
let result = null;
if (timeInfo) {
result = {
// each type's time info comes back as an array of 2 times, start and end
hasVideo: timeInfo.video && timeInfo.video.length === 2 || false,
hasAudio: timeInfo.audio && timeInfo.audio.length === 2 || false
};
if (result.hasVideo) {
result.videoStart = timeInfo.video[0].ptsTime;
}
if (result.hasAudio) {
result.audioStart = timeInfo.audio[0].ptsTime;
}
}
this.self.postMessage({
action: 'probeTs',
result,
data
}, [data.buffer]);
}
clearAllMp4Captions() {
if (this.captionParser) {
this.captionParser.clearAllCaptions();
}
}
clearParsedMp4Captions() {
if (this.captionParser) {
this.captionParser.clearParsedCaptions();
}
}
/**
* Adds data (a ts segment) to the start of the transmuxer pipeline for
* processing.
*
* @param {ArrayBuffer} data data to push into the muxer
*/
push(data) {
// Cast array buffer to correct type for transmuxer
const segment = new Uint8Array(data.data, data.byteOffset, data.byteLength);
this.transmuxer.push(segment);
}
/**
* Recreate the transmuxer so that the next segment added via `push`
* start with a fresh transmuxer.
*/
reset() {
this.transmuxer.reset();
}
/**
* Set the value that will be used as the `baseMediaDecodeTime` time for the
* next segment pushed in. Subsequent segments will have their `baseMediaDecodeTime`
* set relative to the first based on the PTS values.
*
* @param {Object} data used to set the timestamp offset in the muxer
*/
setTimestampOffset(data) {
const timestampOffset = data.timestampOffset || 0;
this.transmuxer.setBaseMediaDecodeTime(Math.round(secondsToVideoTs(timestampOffset)));
}
setAudioAppendStart(data) {
this.transmuxer.setAudioAppendStart(Math.ceil(secondsToVideoTs(data.appendStart)));
}
setRemux(data) {
this.transmuxer.setRemux(data.remux);
}
/**
* Forces the pipeline to finish processing the last segment and emit it's
* results.
*
* @param {Object} data event data, not really used
*/
flush(data) {
this.transmuxer.flush();
// transmuxed done action is fired after both audio/video pipelines are flushed
self.postMessage({
action: 'done',
type: 'transmuxed'
});
}
endTimeline() {
this.transmuxer.endTimeline();
// transmuxed endedtimeline action is fired after both audio/video pipelines end their
// timelines
self.postMessage({
action: 'endedtimeline',
type: 'transmuxed'
});
}
alignGopsWith(data) {
this.transmuxer.alignGopsWith(data.gopsToAlignWith.slice());
}
}
/**
* Our web worker interface so that things can talk to mux.js
* that will be running in a web worker. the scope is passed to this by
* webworkify.
*
* @param {Object} self the scope for the web worker
*/
self.onmessage = function(event) {
if (event.data.action === 'init' && event.data.options) {
this.messageHandlers = new MessageHandlers(self, event.data.options);
return;
}
if (!this.messageHandlers) {
this.messageHandlers = new MessageHandlers(self);
}
if (event.data && event.data.action && event.data.action !== 'init') {
if (this.messageHandlers[event.data.action]) {
this.messageHandlers[event.data.action](event.data);
}
}
};

File diff suppressed because it is too large Load diff

128
node_modules/@videojs/http-streaming/src/util/codecs.js generated vendored Normal file
View file

@ -0,0 +1,128 @@
/**
* @file - codecs.js - Handles tasks regarding codec strings such as translating them to
* codec strings, or translating codec strings into objects that can be examined.
*/
import {
translateLegacyCodec,
parseCodecs,
codecsFromDefault
} from '@videojs/vhs-utils/es/codecs.js';
import logger from './logger.js';
const logFn = logger('CodecUtils');
/**
* Returns a set of codec strings parsed from the playlist or the default
* codec strings if no codecs were specified in the playlist
*
* @param {Playlist} media the current media playlist
* @return {Object} an object with the video and audio codecs
*/
const getCodecs = function(media) {
// if the codecs were explicitly specified, use them instead of the
// defaults
const mediaAttributes = media.attributes || {};
if (mediaAttributes.CODECS) {
return parseCodecs(mediaAttributes.CODECS);
}
};
export const isMaat = (master, media) => {
const mediaAttributes = media.attributes || {};
return master && master.mediaGroups && master.mediaGroups.AUDIO &&
mediaAttributes.AUDIO &&
master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
};
export const isMuxed = (master, media) => {
if (!isMaat(master, media)) {
return true;
}
const mediaAttributes = media.attributes || {};
const audioGroup = master.mediaGroups.AUDIO[mediaAttributes.AUDIO];
for (const groupId in audioGroup) {
// If an audio group has a URI (the case for HLS, as HLS will use external playlists),
// or there are listed playlists (the case for DASH, as the manifest will have already
// provided all of the details necessary to generate the audio playlist, as opposed to
// HLS' externally requested playlists), then the content is demuxed.
if (!audioGroup[groupId].uri && !audioGroup[groupId].playlists) {
return true;
}
}
return false;
};
export const unwrapCodecList = function(codecList) {
const codecs = {};
codecList.forEach(({mediaType, type, details}) => {
codecs[mediaType] = codecs[mediaType] || [];
codecs[mediaType].push(translateLegacyCodec(`${type}${details}`));
});
Object.keys(codecs).forEach(function(mediaType) {
if (codecs[mediaType].length > 1) {
logFn(`multiple ${mediaType} codecs found as attributes: ${codecs[mediaType].join(', ')}. Setting playlist codecs to null so that we wait for mux.js to probe segments for real codecs.`);
codecs[mediaType] = null;
return;
}
codecs[mediaType] = codecs[mediaType][0];
});
return codecs;
};
export const codecCount = function(codecObj) {
let count = 0;
if (codecObj.audio) {
count++;
}
if (codecObj.video) {
count++;
}
return count;
};
/**
* Calculates the codec strings for a working configuration of
* SourceBuffers to play variant streams in a master playlist. If
* there is no possible working configuration, an empty object will be
* returned.
*
* @param master {Object} the m3u8 object for the master playlist
* @param media {Object} the m3u8 object for the variant playlist
* @return {Object} the codec strings.
*
* @private
*/
export const codecsForPlaylist = function(master, media) {
const mediaAttributes = media.attributes || {};
const codecInfo = unwrapCodecList(getCodecs(media) || []);
// HLS with multiple-audio tracks must always get an audio codec.
// Put another way, there is no way to have a video-only multiple-audio HLS!
if (isMaat(master, media) && !codecInfo.audio) {
if (!isMuxed(master, media)) {
// It is possible for codecs to be specified on the audio media group playlist but
// not on the rendition playlist. This is mostly the case for DASH, where audio and
// video are always separate (and separately specified).
const defaultCodecs = unwrapCodecList(codecsFromDefault(master, mediaAttributes.AUDIO) || []);
if (defaultCodecs.audio) {
codecInfo.audio = defaultCodecs.audio;
}
}
}
return codecInfo;
};

View file

@ -0,0 +1,86 @@
import {getId3Offset} from '@videojs/vhs-utils/es/id3-helpers';
import {detectContainerForBytes} from '@videojs/vhs-utils/es/containers';
import {stringToBytes, concatTypedArrays} from '@videojs/vhs-utils/es/byte-helpers';
import {callbackWrapper} from '../xhr';
// calls back if the request is readyState DONE
// which will only happen if the request is complete.
const callbackOnCompleted = (request, cb) => {
if (request.readyState === 4) {
return cb();
}
return;
};
const containerRequest = (uri, xhr, cb) => {
let bytes = [];
let id3Offset;
let finished = false;
const endRequestAndCallback = function(err, req, type, _bytes) {
req.abort();
finished = true;
return cb(err, req, type, _bytes);
};
const progressListener = function(error, request) {
if (finished) {
return;
}
if (error) {
return endRequestAndCallback(error, request, '', bytes);
}
// grap the new part of content that was just downloaded
const newPart = request.responseText.substring(
bytes && bytes.byteLength || 0,
request.responseText.length
);
// add that onto bytes
bytes = concatTypedArrays(bytes, stringToBytes(newPart, true));
id3Offset = id3Offset || getId3Offset(bytes);
// we need at least 10 bytes to determine a type
// or we need at least two bytes after an id3Offset
if (bytes.length < 10 || (id3Offset && bytes.length < id3Offset + 2)) {
return callbackOnCompleted(request, () => endRequestAndCallback(error, request, '', bytes));
}
const type = detectContainerForBytes(bytes);
// if this looks like a ts segment but we don't have enough data
// to see the second sync byte, wait until we have enough data
// before declaring it ts
if (type === 'ts' && bytes.length < 188) {
return callbackOnCompleted(request, () => endRequestAndCallback(error, request, '', bytes));
}
// this may be an unsynced ts segment
// wait for 376 bytes before detecting no container
if (!type && bytes.length < 376) {
return callbackOnCompleted(request, () => endRequestAndCallback(error, request, '', bytes));
}
return endRequestAndCallback(null, request, type, bytes);
};
const options = {
uri,
beforeSend(request) {
// this forces the browser to pass the bytes to us unprocessed
request.overrideMimeType('text/plain; charset=x-user-defined');
request.addEventListener('progress', function({total, loaded}) {
return callbackWrapper(request, null, {statusCode: request.status}, progressListener);
});
}
};
const request = xhr(options, function(error, response) {
return callbackWrapper(request, error, response, progressListener);
});
return request;
};
export default containerRequest;

119
node_modules/@videojs/http-streaming/src/util/gops.js generated vendored Normal file
View file

@ -0,0 +1,119 @@
import { ONE_SECOND_IN_TS } from 'mux.js/lib/utils/clock';
/**
* Returns a list of gops in the buffer that have a pts value of 3 seconds or more in
* front of current time.
*
* @param {Array} buffer
* The current buffer of gop information
* @param {number} currentTime
* The current time
* @param {Double} mapping
* Offset to map display time to stream presentation time
* @return {Array}
* List of gops considered safe to append over
*/
export const gopsSafeToAlignWith = (buffer, currentTime, mapping) => {
if (typeof currentTime === 'undefined' || currentTime === null || !buffer.length) {
return [];
}
// pts value for current time + 3 seconds to give a bit more wiggle room
const currentTimePts = Math.ceil((currentTime - mapping + 3) * ONE_SECOND_IN_TS);
let i;
for (i = 0; i < buffer.length; i++) {
if (buffer[i].pts > currentTimePts) {
break;
}
}
return buffer.slice(i);
};
/**
* Appends gop information (timing and byteLength) received by the transmuxer for the
* gops appended in the last call to appendBuffer
*
* @param {Array} buffer
* The current buffer of gop information
* @param {Array} gops
* List of new gop information
* @param {boolean} replace
* If true, replace the buffer with the new gop information. If false, append the
* new gop information to the buffer in the right location of time.
* @return {Array}
* Updated list of gop information
*/
export const updateGopBuffer = (buffer, gops, replace) => {
if (!gops.length) {
return buffer;
}
if (replace) {
// If we are in safe append mode, then completely overwrite the gop buffer
// with the most recent appeneded data. This will make sure that when appending
// future segments, we only try to align with gops that are both ahead of current
// time and in the last segment appended.
return gops.slice();
}
const start = gops[0].pts;
let i = 0;
for (i; i < buffer.length; i++) {
if (buffer[i].pts >= start) {
break;
}
}
return buffer.slice(0, i).concat(gops);
};
/**
* Removes gop information in buffer that overlaps with provided start and end
*
* @param {Array} buffer
* The current buffer of gop information
* @param {Double} start
* position to start the remove at
* @param {Double} end
* position to end the remove at
* @param {Double} mapping
* Offset to map display time to stream presentation time
*/
export const removeGopBuffer = (buffer, start, end, mapping) => {
const startPts = Math.ceil((start - mapping) * ONE_SECOND_IN_TS);
const endPts = Math.ceil((end - mapping) * ONE_SECOND_IN_TS);
const updatedBuffer = buffer.slice();
let i = buffer.length;
while (i--) {
if (buffer[i].pts <= endPts) {
break;
}
}
if (i === -1) {
// no removal because end of remove range is before start of buffer
return updatedBuffer;
}
let j = i + 1;
while (j--) {
if (buffer[j].pts <= startPts) {
break;
}
}
// clamp remove range start to 0 index
j = Math.max(j, 0);
updatedBuffer.splice(j, i - j + 1);
return updatedBuffer;
};

View file

@ -0,0 +1,11 @@
import videojs from 'video.js';
const logger = (source) => {
if (videojs.log.debug) {
return videojs.log.debug.bind(videojs, 'VHS:', `${source} >`);
}
return function() {};
};
export default logger;

View file

@ -0,0 +1 @@
export default function noop() {}

View file

@ -0,0 +1,23 @@
/**
* Combine all segments into a single Uint8Array
*
* @param {Object} segmentObj
* @return {Uint8Array} concatenated bytes
* @private
*/
export const concatSegments = (segmentObj) => {
let offset = 0;
let tempBuffer;
if (segmentObj.bytes) {
tempBuffer = new Uint8Array(segmentObj.bytes);
// combine the individual segments into one large typed-array
segmentObj.segments.forEach((segment) => {
tempBuffer.set(segment, offset);
offset += segment.byteLength;
});
}
return tempBuffer;
};

View file

@ -0,0 +1,41 @@
const shallowEqual = function(a, b) {
// if both are undefined
// or one or the other is undefined
// they are not equal
if ((!a && !b) || (!a && b) || (a && !b)) {
return false;
}
// they are the same object and thus, equal
if (a === b) {
return true;
}
// sort keys so we can make sure they have
// all the same keys later.
const akeys = Object.keys(a).sort();
const bkeys = Object.keys(b).sort();
// different number of keys, not equal
if (akeys.length !== bkeys.length) {
return false;
}
for (let i = 0; i < akeys.length; i++) {
const key = akeys[i];
// different sorted keys, not equal
if (key !== bkeys[i]) {
return false;
}
// different values, not equal
if (a[key] !== b[key]) {
return false;
}
}
return true;
};
export default shallowEqual;

View file

@ -0,0 +1,9 @@
export const stringToArrayBuffer = (string) => {
const view = new Uint8Array(new ArrayBuffer(string.length));
for (let i = 0; i < string.length; i++) {
view[i] = string.charCodeAt(i);
}
return view.buffer;
};

View file

@ -0,0 +1,2 @@
export const uint8ToUtf8 = (uintArray) =>
decodeURIComponent(escape(String.fromCharCode.apply(null, uintArray)));

View file

@ -0,0 +1,311 @@
/**
* @file text-tracks.js
*/
import window from 'global/window';
import videojs from 'video.js';
/**
* Create captions text tracks on video.js if they do not exist
*
* @param {Object} inbandTextTracks a reference to current inbandTextTracks
* @param {Object} tech the video.js tech
* @param {Object} captionStream the caption stream to create
* @private
*/
export const createCaptionsTrackIfNotExists = function(inbandTextTracks, tech, captionStream) {
if (!inbandTextTracks[captionStream]) {
tech.trigger({type: 'usage', name: 'vhs-608'});
tech.trigger({type: 'usage', name: 'hls-608'});
let instreamId = captionStream;
// we need to translate SERVICEn for 708 to how mux.js currently labels them
if (/^cc708_/.test(captionStream)) {
instreamId = 'SERVICE' + captionStream.split('_')[1];
}
const track = tech.textTracks().getTrackById(instreamId);
if (track) {
// Resuse an existing track with a CC# id because this was
// very likely created by videojs-contrib-hls from information
// in the m3u8 for us to use
inbandTextTracks[captionStream] = track;
} else {
// This section gets called when we have caption services that aren't specified in the manifest.
// Manifest level caption services are handled in media-groups.js under CLOSED-CAPTIONS.
const captionServices = tech.options_.vhs && tech.options_.vhs.captionServices || {};
let label = captionStream;
let language = captionStream;
let def = false;
const captionService = captionServices[instreamId];
if (captionService) {
label = captionService.label;
language = captionService.language;
def = captionService.default;
}
// Otherwise, create a track with the default `CC#` label and
// without a language
inbandTextTracks[captionStream] = tech.addRemoteTextTrack({
kind: 'captions',
id: instreamId,
// TODO: investigate why this doesn't seem to turn the caption on by default
default: def,
label,
language
}, false).track;
}
}
};
/**
* Add caption text track data to a source handler given an array of captions
*
* @param {Object}
* @param {Object} inbandTextTracks the inband text tracks
* @param {number} timestampOffset the timestamp offset of the source buffer
* @param {Array} captionArray an array of caption data
* @private
*/
export const addCaptionData = function({
inbandTextTracks,
captionArray,
timestampOffset
}) {
if (!captionArray) {
return;
}
const Cue = window.WebKitDataCue || window.VTTCue;
captionArray.forEach((caption) => {
const track = caption.stream;
inbandTextTracks[track].addCue(new Cue(
caption.startTime + timestampOffset,
caption.endTime + timestampOffset,
caption.text
));
});
};
/**
* Define properties on a cue for backwards compatability,
* but warn the user that the way that they are using it
* is depricated and will be removed at a later date.
*
* @param {Cue} cue the cue to add the properties on
* @private
*/
const deprecateOldCue = function(cue) {
Object.defineProperties(cue.frame, {
id: {
get() {
videojs.log.warn('cue.frame.id is deprecated. Use cue.value.key instead.');
return cue.value.key;
}
},
value: {
get() {
videojs.log.warn('cue.frame.value is deprecated. Use cue.value.data instead.');
return cue.value.data;
}
},
privateData: {
get() {
videojs.log.warn('cue.frame.privateData is deprecated. Use cue.value.data instead.');
return cue.value.data;
}
}
});
};
/**
* Add metadata text track data to a source handler given an array of metadata
*
* @param {Object}
* @param {Object} inbandTextTracks the inband text tracks
* @param {Array} metadataArray an array of meta data
* @param {number} timestampOffset the timestamp offset of the source buffer
* @param {number} videoDuration the duration of the video
* @private
*/
export const addMetadata = ({
inbandTextTracks,
metadataArray,
timestampOffset,
videoDuration
}) => {
if (!metadataArray) {
return;
}
const Cue = window.WebKitDataCue || window.VTTCue;
const metadataTrack = inbandTextTracks.metadataTrack_;
if (!metadataTrack) {
return;
}
metadataArray.forEach((metadata) => {
const time = metadata.cueTime + timestampOffset;
// if time isn't a finite number between 0 and Infinity, like NaN,
// ignore this bit of metadata.
// This likely occurs when you have an non-timed ID3 tag like TIT2,
// which is the "Title/Songname/Content description" frame
if (typeof time !== 'number' || window.isNaN(time) || time < 0 || !(time < Infinity)) {
return;
}
metadata.frames.forEach((frame) => {
const cue = new Cue(
time,
time,
frame.value || frame.url || frame.data || ''
);
cue.frame = frame;
cue.value = frame;
deprecateOldCue(cue);
metadataTrack.addCue(cue);
});
});
if (!metadataTrack.cues || !metadataTrack.cues.length) {
return;
}
// Updating the metadeta cues so that
// the endTime of each cue is the startTime of the next cue
// the endTime of last cue is the duration of the video
const cues = metadataTrack.cues;
const cuesArray = [];
// Create a copy of the TextTrackCueList...
// ...disregarding cues with a falsey value
for (let i = 0; i < cues.length; i++) {
if (cues[i]) {
cuesArray.push(cues[i]);
}
}
// Group cues by their startTime value
const cuesGroupedByStartTime = cuesArray.reduce((obj, cue) => {
const timeSlot = obj[cue.startTime] || [];
timeSlot.push(cue);
obj[cue.startTime] = timeSlot;
return obj;
}, {});
// Sort startTimes by ascending order
const sortedStartTimes = Object.keys(cuesGroupedByStartTime)
.sort((a, b) => Number(a) - Number(b));
// Map each cue group's endTime to the next group's startTime
sortedStartTimes.forEach((startTime, idx) => {
const cueGroup = cuesGroupedByStartTime[startTime];
const nextTime = Number(sortedStartTimes[idx + 1]) || videoDuration;
// Map each cue's endTime the next group's startTime
cueGroup.forEach((cue) => {
cue.endTime = nextTime;
});
});
};
/**
* Create metadata text track on video.js if it does not exist
*
* @param {Object} inbandTextTracks a reference to current inbandTextTracks
* @param {string} dispatchType the inband metadata track dispatch type
* @param {Object} tech the video.js tech
* @private
*/
export const createMetadataTrackIfNotExists = (inbandTextTracks, dispatchType, tech) => {
if (inbandTextTracks.metadataTrack_) {
return;
}
inbandTextTracks.metadataTrack_ = tech.addRemoteTextTrack({
kind: 'metadata',
label: 'Timed Metadata'
}, false).track;
inbandTextTracks.metadataTrack_.inBandMetadataTrackDispatchType = dispatchType;
};
/**
* Remove cues from a track on video.js.
*
* @param {Double} start start of where we should remove the cue
* @param {Double} end end of where the we should remove the cue
* @param {Object} track the text track to remove the cues from
* @private
*/
export const removeCuesFromTrack = function(start, end, track) {
let i;
let cue;
if (!track) {
return;
}
if (!track.cues) {
return;
}
i = track.cues.length;
while (i--) {
cue = track.cues[i];
// Remove any cue within the provided start and end time
if (cue.startTime >= start && cue.endTime <= end) {
track.removeCue(cue);
}
}
};
/**
* Remove duplicate cues from a track on video.js (a cue is considered a
* duplicate if it has the same time interval and text as another)
*
* @param {Object} track the text track to remove the duplicate cues from
* @private
*/
export const removeDuplicateCuesFromTrack = function(track) {
const cues = track.cues;
if (!cues) {
return;
}
for (let i = 0; i < cues.length; i++) {
const duplicates = [];
let occurrences = 0;
for (let j = 0; j < cues.length; j++) {
if (
cues[i].startTime === cues[j].startTime &&
cues[i].endTime === cues[j].endTime &&
cues[i].text === cues[j].text
) {
occurrences++;
if (occurrences > 1) {
duplicates.push(cues[j]);
}
}
}
if (duplicates.length) {
duplicates.forEach(dupe => track.removeCue(dupe));
}
}
};

402
node_modules/@videojs/http-streaming/src/util/time.js generated vendored Normal file
View file

@ -0,0 +1,402 @@
// TODO handle fmp4 case where the timing info is accurate and doesn't involve transmux
/**
* @file time.js
*/
import Playlist from '../playlist';
// Add 25% to the segment duration to account for small discrepencies in segment timing.
// 25% was arbitrarily chosen, and may need to be refined over time.
const SEGMENT_END_FUDGE_PERCENT = 0.25;
/**
* Converts a player time (any time that can be gotten/set from player.currentTime(),
* e.g., any time within player.seekable().start(0) to player.seekable().end(0)) to a
* program time (any time referencing the real world (e.g., EXT-X-PROGRAM-DATE-TIME)).
*
* The containing segment is required as the EXT-X-PROGRAM-DATE-TIME serves as an "anchor
* point" (a point where we have a mapping from program time to player time, with player
* time being the post transmux start of the segment).
*
* For more details, see [this doc](../../docs/program-time-from-player-time.md).
*
* @param {number} playerTime the player time
* @param {Object} segment the segment which contains the player time
* @return {Date} program time
*/
export const playerTimeToProgramTime = (playerTime, segment) => {
if (!segment.dateTimeObject) {
// Can't convert without an "anchor point" for the program time (i.e., a time that can
// be used to map the start of a segment with a real world time).
return null;
}
const transmuxerPrependedSeconds = segment.videoTimingInfo.transmuxerPrependedSeconds;
const transmuxedStart = segment.videoTimingInfo.transmuxedPresentationStart;
// get the start of the content from before old content is prepended
const startOfSegment = transmuxedStart + transmuxerPrependedSeconds;
const offsetFromSegmentStart = playerTime - startOfSegment;
return new Date(segment.dateTimeObject.getTime() + offsetFromSegmentStart * 1000);
};
export const originalSegmentVideoDuration = (videoTimingInfo) => {
return videoTimingInfo.transmuxedPresentationEnd -
videoTimingInfo.transmuxedPresentationStart -
videoTimingInfo.transmuxerPrependedSeconds;
};
/**
* Finds a segment that contains the time requested given as an ISO-8601 string. The
* returned segment might be an estimate or an accurate match.
*
* @param {string} programTime The ISO-8601 programTime to find a match for
* @param {Object} playlist A playlist object to search within
*/
export const findSegmentForProgramTime = (programTime, playlist) => {
// Assumptions:
// - verifyProgramDateTimeTags has already been run
// - live streams have been started
let dateTimeObject;
try {
dateTimeObject = new Date(programTime);
} catch (e) {
return null;
}
if (!playlist || !playlist.segments || playlist.segments.length === 0) {
return null;
}
let segment = playlist.segments[0];
if (dateTimeObject < segment.dateTimeObject) {
// Requested time is before stream start.
return null;
}
for (let i = 0; i < playlist.segments.length - 1; i++) {
segment = playlist.segments[i];
const nextSegmentStart = playlist.segments[i + 1].dateTimeObject;
if (dateTimeObject < nextSegmentStart) {
break;
}
}
const lastSegment = playlist.segments[playlist.segments.length - 1];
const lastSegmentStart = lastSegment.dateTimeObject;
const lastSegmentDuration = lastSegment.videoTimingInfo ?
originalSegmentVideoDuration(lastSegment.videoTimingInfo) :
lastSegment.duration + lastSegment.duration * SEGMENT_END_FUDGE_PERCENT;
const lastSegmentEnd =
new Date(lastSegmentStart.getTime() + lastSegmentDuration * 1000);
if (dateTimeObject > lastSegmentEnd) {
// Beyond the end of the stream, or our best guess of the end of the stream.
return null;
}
if (dateTimeObject > lastSegmentStart) {
segment = lastSegment;
}
return {
segment,
estimatedStart: segment.videoTimingInfo ?
segment.videoTimingInfo.transmuxedPresentationStart :
Playlist.duration(
playlist,
playlist.mediaSequence + playlist.segments.indexOf(segment)
),
// Although, given that all segments have accurate date time objects, the segment
// selected should be accurate, unless the video has been transmuxed at some point
// (determined by the presence of the videoTimingInfo object), the segment's "player
// time" (the start time in the player) can't be considered accurate.
type: segment.videoTimingInfo ? 'accurate' : 'estimate'
};
};
/**
* Finds a segment that contains the given player time(in seconds).
*
* @param {number} time The player time to find a match for
* @param {Object} playlist A playlist object to search within
*/
export const findSegmentForPlayerTime = (time, playlist) => {
// Assumptions:
// - there will always be a segment.duration
// - we can start from zero
// - segments are in time order
if (!playlist || !playlist.segments || playlist.segments.length === 0) {
return null;
}
let segmentEnd = 0;
let segment;
for (let i = 0; i < playlist.segments.length; i++) {
segment = playlist.segments[i];
// videoTimingInfo is set after the segment is downloaded and transmuxed, and
// should contain the most accurate values we have for the segment's player times.
//
// Use the accurate transmuxedPresentationEnd value if it is available, otherwise fall
// back to an estimate based on the manifest derived (inaccurate) segment.duration, to
// calculate an end value.
segmentEnd = segment.videoTimingInfo ?
segment.videoTimingInfo.transmuxedPresentationEnd : segmentEnd + segment.duration;
if (time <= segmentEnd) {
break;
}
}
const lastSegment = playlist.segments[playlist.segments.length - 1];
if (lastSegment.videoTimingInfo &&
lastSegment.videoTimingInfo.transmuxedPresentationEnd < time) {
// The time requested is beyond the stream end.
return null;
}
if (time > segmentEnd) {
// The time is within or beyond the last segment.
//
// Check to see if the time is beyond a reasonable guess of the end of the stream.
if (time > segmentEnd + (lastSegment.duration * SEGMENT_END_FUDGE_PERCENT)) {
// Technically, because the duration value is only an estimate, the time may still
// exist in the last segment, however, there isn't enough information to make even
// a reasonable estimate.
return null;
}
segment = lastSegment;
}
return {
segment,
estimatedStart: segment.videoTimingInfo ?
segment.videoTimingInfo.transmuxedPresentationStart : segmentEnd - segment.duration,
// Because videoTimingInfo is only set after transmux, it is the only way to get
// accurate timing values.
type: segment.videoTimingInfo ? 'accurate' : 'estimate'
};
};
/**
* Gives the offset of the comparisonTimestamp from the programTime timestamp in seconds.
* If the offset returned is positive, the programTime occurs after the
* comparisonTimestamp.
* If the offset is negative, the programTime occurs before the comparisonTimestamp.
*
* @param {string} comparisonTimeStamp An ISO-8601 timestamp to compare against
* @param {string} programTime The programTime as an ISO-8601 string
* @return {number} offset
*/
export const getOffsetFromTimestamp = (comparisonTimeStamp, programTime) => {
let segmentDateTime;
let programDateTime;
try {
segmentDateTime = new Date(comparisonTimeStamp);
programDateTime = new Date(programTime);
} catch (e) {
// TODO handle error
}
const segmentTimeEpoch = segmentDateTime.getTime();
const programTimeEpoch = programDateTime.getTime();
return (programTimeEpoch - segmentTimeEpoch) / 1000;
};
/**
* Checks that all segments in this playlist have programDateTime tags.
*
* @param {Object} playlist A playlist object
*/
export const verifyProgramDateTimeTags = (playlist) => {
if (!playlist.segments || playlist.segments.length === 0) {
return false;
}
for (let i = 0; i < playlist.segments.length; i++) {
const segment = playlist.segments[i];
if (!segment.dateTimeObject) {
return false;
}
}
return true;
};
/**
* Returns the programTime of the media given a playlist and a playerTime.
* The playlist must have programDateTime tags for a programDateTime tag to be returned.
* If the segments containing the time requested have not been buffered yet, an estimate
* may be returned to the callback.
*
* @param {Object} args
* @param {Object} args.playlist A playlist object to search within
* @param {number} time A playerTime in seconds
* @param {Function} callback(err, programTime)
* @return {string} err.message A detailed error message
* @return {Object} programTime
* @return {number} programTime.mediaSeconds The streamTime in seconds
* @return {string} programTime.programDateTime The programTime as an ISO-8601 String
*/
export const getProgramTime = ({
playlist,
time = undefined,
callback
}) => {
if (!callback) {
throw new Error('getProgramTime: callback must be provided');
}
if (!playlist || time === undefined) {
return callback({
message: 'getProgramTime: playlist and time must be provided'
});
}
const matchedSegment = findSegmentForPlayerTime(time, playlist);
if (!matchedSegment) {
return callback({
message: 'valid programTime was not found'
});
}
if (matchedSegment.type === 'estimate') {
return callback({
message:
'Accurate programTime could not be determined.' +
' Please seek to e.seekTime and try again',
seekTime: matchedSegment.estimatedStart
});
}
const programTimeObject = {
mediaSeconds: time
};
const programTime = playerTimeToProgramTime(time, matchedSegment.segment);
if (programTime) {
programTimeObject.programDateTime = programTime.toISOString();
}
return callback(null, programTimeObject);
};
/**
* Seeks in the player to a time that matches the given programTime ISO-8601 string.
*
* @param {Object} args
* @param {string} args.programTime A programTime to seek to as an ISO-8601 String
* @param {Object} args.playlist A playlist to look within
* @param {number} args.retryCount The number of times to try for an accurate seek. Default is 2.
* @param {Function} args.seekTo A method to perform a seek
* @param {boolean} args.pauseAfterSeek Whether to end in a paused state after seeking. Default is true.
* @param {Object} args.tech The tech to seek on
* @param {Function} args.callback(err, newTime) A callback to return the new time to
* @return {string} err.message A detailed error message
* @return {number} newTime The exact time that was seeked to in seconds
*/
export const seekToProgramTime = ({
programTime,
playlist,
retryCount = 2,
seekTo,
pauseAfterSeek = true,
tech,
callback
}) => {
if (!callback) {
throw new Error('seekToProgramTime: callback must be provided');
}
if (typeof programTime === 'undefined' || !playlist || !seekTo) {
return callback({
message: 'seekToProgramTime: programTime, seekTo and playlist must be provided'
});
}
if (!playlist.endList && !tech.hasStarted_) {
return callback({
message: 'player must be playing a live stream to start buffering'
});
}
if (!verifyProgramDateTimeTags(playlist)) {
return callback({
message: 'programDateTime tags must be provided in the manifest ' + playlist.resolvedUri
});
}
const matchedSegment = findSegmentForProgramTime(programTime, playlist);
// no match
if (!matchedSegment) {
return callback({
message: `${programTime} was not found in the stream`
});
}
const segment = matchedSegment.segment;
const mediaOffset = getOffsetFromTimestamp(
segment.dateTimeObject,
programTime
);
if (matchedSegment.type === 'estimate') {
// we've run out of retries
if (retryCount === 0) {
return callback({
message: `${programTime} is not buffered yet. Try again`
});
}
seekTo(matchedSegment.estimatedStart + mediaOffset);
tech.one('seeked', () => {
seekToProgramTime({
programTime,
playlist,
retryCount: retryCount - 1,
seekTo,
pauseAfterSeek,
tech,
callback
});
});
return;
}
// Since the segment.start value is determined from the buffered end or ending time
// of the prior segment, the seekToTime doesn't need to account for any transmuxer
// modifications.
const seekToTime = segment.start + mediaOffset;
const seekedCallback = () => {
return callback(null, tech.currentTime());
};
// listen for seeked event
tech.one('seeked', seekedCallback);
// pause before seeking as video.js will restore this state
if (pauseAfterSeek) {
tech.pause();
}
seekTo(seekToTime);
};

View file

@ -0,0 +1,9 @@
const toTitleCase = function(string) {
if (typeof string !== 'string') {
return string;
}
return string.replace(/./, (w) => w.toUpperCase());
};
export default toTitleCase;

View file

@ -0,0 +1,42 @@
export const workerCallback = function(options) {
const transmuxer = options.transmuxer;
const endAction = options.endAction || options.action;
const callback = options.callback;
const message = Object.assign({}, options, {endAction: null, transmuxer: null, callback: null});
const listenForEndEvent = (event) => {
if (event.data.action !== endAction) {
return;
}
transmuxer.removeEventListener('message', listenForEndEvent);
// transfer ownership of bytes back to us.
if (event.data.data) {
event.data.data = new Uint8Array(
event.data.data,
options.byteOffset || 0,
options.byteLength || event.data.data.byteLength
);
if (options.data) {
options.data = event.data.data;
}
}
callback(event.data);
};
transmuxer.addEventListener('message', listenForEndEvent);
if (options.data) {
const isArrayBuffer = options.data instanceof ArrayBuffer;
message.byteOffset = isArrayBuffer ? 0 : options.data.byteOffset;
message.byteLength = options.data.byteLength;
const transfers = [isArrayBuffer ? options.data : options.data.buffer];
transmuxer.postMessage(message, transfers);
} else {
transmuxer.postMessage(message);
}
};

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,491 @@
/**
* @file vtt-segment-loader.js
*/
import SegmentLoader from './segment-loader';
import videojs from 'video.js';
import window from 'global/window';
import { removeCuesFromTrack, removeDuplicateCuesFromTrack } from './util/text-tracks';
import { initSegmentId } from './bin-utils';
import { uint8ToUtf8 } from './util/string';
import { REQUEST_ERRORS } from './media-segment-request';
import { ONE_SECOND_IN_TS } from 'mux.js/lib/utils/clock';
const VTT_LINE_TERMINATORS =
new Uint8Array('\n\n'.split('').map(char => char.charCodeAt(0)));
/**
* An object that manages segment loading and appending.
*
* @class VTTSegmentLoader
* @param {Object} options required and optional options
* @extends videojs.EventTarget
*/
export default class VTTSegmentLoader extends SegmentLoader {
constructor(settings, options = {}) {
super(settings, options);
// SegmentLoader requires a MediaSource be specified or it will throw an error;
// however, VTTSegmentLoader has no need of a media source, so delete the reference
this.mediaSource_ = null;
this.subtitlesTrack_ = null;
this.loaderType_ = 'subtitle';
this.featuresNativeTextTracks_ = settings.featuresNativeTextTracks;
// The VTT segment will have its own time mappings. Saving VTT segment timing info in
// the sync controller leads to improper behavior.
this.shouldSaveSegmentTimingInfo_ = false;
}
createTransmuxer_() {
// don't need to transmux any subtitles
return null;
}
/**
* Indicates which time ranges are buffered
*
* @return {TimeRange}
* TimeRange object representing the current buffered ranges
*/
buffered_() {
if (!this.subtitlesTrack_ || !this.subtitlesTrack_.cues || !this.subtitlesTrack_.cues.length) {
return videojs.createTimeRanges();
}
const cues = this.subtitlesTrack_.cues;
const start = cues[0].startTime;
const end = cues[cues.length - 1].startTime;
return videojs.createTimeRanges([[start, end]]);
}
/**
* Gets and sets init segment for the provided map
*
* @param {Object} map
* The map object representing the init segment to get or set
* @param {boolean=} set
* If true, the init segment for the provided map should be saved
* @return {Object}
* map object for desired init segment
*/
initSegmentForMap(map, set = false) {
if (!map) {
return null;
}
const id = initSegmentId(map);
let storedMap = this.initSegments_[id];
if (set && !storedMap && map.bytes) {
// append WebVTT line terminators to the media initialization segment if it exists
// to follow the WebVTT spec (https://w3c.github.io/webvtt/#file-structure) that
// requires two or more WebVTT line terminators between the WebVTT header and the
// rest of the file
const combinedByteLength = VTT_LINE_TERMINATORS.byteLength + map.bytes.byteLength;
const combinedSegment = new Uint8Array(combinedByteLength);
combinedSegment.set(map.bytes);
combinedSegment.set(VTT_LINE_TERMINATORS, map.bytes.byteLength);
this.initSegments_[id] = storedMap = {
resolvedUri: map.resolvedUri,
byterange: map.byterange,
bytes: combinedSegment
};
}
return storedMap || map;
}
/**
* Returns true if all configuration required for loading is present, otherwise false.
*
* @return {boolean} True if the all configuration is ready for loading
* @private
*/
couldBeginLoading_() {
return this.playlist_ &&
this.subtitlesTrack_ &&
!this.paused();
}
/**
* Once all the starting parameters have been specified, begin
* operation. This method should only be invoked from the INIT
* state.
*
* @private
*/
init_() {
this.state = 'READY';
this.resetEverything();
return this.monitorBuffer_();
}
/**
* Set a subtitle track on the segment loader to add subtitles to
*
* @param {TextTrack=} track
* The text track to add loaded subtitles to
* @return {TextTrack}
* Returns the subtitles track
*/
track(track) {
if (typeof track === 'undefined') {
return this.subtitlesTrack_;
}
this.subtitlesTrack_ = track;
// if we were unpaused but waiting for a sourceUpdater, start
// buffering now
if (this.state === 'INIT' && this.couldBeginLoading_()) {
this.init_();
}
return this.subtitlesTrack_;
}
/**
* Remove any data in the source buffer between start and end times
*
* @param {number} start - the start time of the region to remove from the buffer
* @param {number} end - the end time of the region to remove from the buffer
*/
remove(start, end) {
removeCuesFromTrack(start, end, this.subtitlesTrack_);
}
/**
* fill the buffer with segements unless the sourceBuffers are
* currently updating
*
* Note: this function should only ever be called by monitorBuffer_
* and never directly
*
* @private
*/
fillBuffer_() {
// see if we need to begin loading immediately
const segmentInfo = this.chooseNextRequest_();
if (!segmentInfo) {
return;
}
if (this.syncController_.timestampOffsetForTimeline(segmentInfo.timeline) === null) {
// We don't have the timestamp offset that we need to sync subtitles.
// Rerun on a timestamp offset or user interaction.
const checkTimestampOffset = () => {
this.state = 'READY';
if (!this.paused()) {
// if not paused, queue a buffer check as soon as possible
this.monitorBuffer_();
}
};
this.syncController_.one('timestampoffset', checkTimestampOffset);
this.state = 'WAITING_ON_TIMELINE';
return;
}
this.loadSegment_(segmentInfo);
}
// never set a timestamp offset for vtt segments.
timestampOffsetForSegment_() {
return null;
}
chooseNextRequest_() {
return this.skipEmptySegments_(super.chooseNextRequest_());
}
/**
* Prevents the segment loader from requesting segments we know contain no subtitles
* by walking forward until we find the next segment that we don't know whether it is
* empty or not.
*
* @param {Object} segmentInfo
* a segment info object that describes the current segment
* @return {Object}
* a segment info object that describes the current segment
*/
skipEmptySegments_(segmentInfo) {
while (segmentInfo && segmentInfo.segment.empty) {
// stop at the last possible segmentInfo
if (segmentInfo.mediaIndex + 1 >= segmentInfo.playlist.segments.length) {
segmentInfo = null;
break;
}
segmentInfo = this.generateSegmentInfo_({
playlist: segmentInfo.playlist,
mediaIndex: segmentInfo.mediaIndex + 1,
startOfSegment: segmentInfo.startOfSegment + segmentInfo.duration,
isSyncRequest: segmentInfo.isSyncRequest
});
}
return segmentInfo;
}
stopForError(error) {
this.error(error);
this.state = 'READY';
this.pause();
this.trigger('error');
}
/**
* append a decrypted segement to the SourceBuffer through a SourceUpdater
*
* @private
*/
segmentRequestFinished_(error, simpleSegment, result) {
if (!this.subtitlesTrack_) {
this.state = 'READY';
return;
}
this.saveTransferStats_(simpleSegment.stats);
// the request was aborted
if (!this.pendingSegment_) {
this.state = 'READY';
this.mediaRequestsAborted += 1;
return;
}
if (error) {
if (error.code === REQUEST_ERRORS.TIMEOUT) {
this.handleTimeout_();
}
if (error.code === REQUEST_ERRORS.ABORTED) {
this.mediaRequestsAborted += 1;
} else {
this.mediaRequestsErrored += 1;
}
this.stopForError(error);
return;
}
const segmentInfo = this.pendingSegment_;
// although the VTT segment loader bandwidth isn't really used, it's good to
// maintain functionality between segment loaders
this.saveBandwidthRelatedStats_(segmentInfo.duration, simpleSegment.stats);
this.state = 'APPENDING';
// used for tests
this.trigger('appending');
const segment = segmentInfo.segment;
if (segment.map) {
segment.map.bytes = simpleSegment.map.bytes;
}
segmentInfo.bytes = simpleSegment.bytes;
// Make sure that vttjs has loaded, otherwise, wait till it finished loading
if (typeof window.WebVTT !== 'function' &&
this.subtitlesTrack_ &&
this.subtitlesTrack_.tech_) {
let loadHandler;
const errorHandler = () => {
this.subtitlesTrack_.tech_.off('vttjsloaded', loadHandler);
this.stopForError({
message: 'Error loading vtt.js'
});
return;
};
loadHandler = () => {
this.subtitlesTrack_.tech_.off('vttjserror', errorHandler);
this.segmentRequestFinished_(error, simpleSegment, result);
};
this.state = 'WAITING_ON_VTTJS';
this.subtitlesTrack_.tech_.one('vttjsloaded', loadHandler);
this.subtitlesTrack_.tech_.one('vttjserror', errorHandler);
return;
}
segment.requested = true;
try {
this.parseVTTCues_(segmentInfo);
} catch (e) {
this.stopForError({
message: e.message
});
return;
}
this.updateTimeMapping_(
segmentInfo,
this.syncController_.timelines[segmentInfo.timeline],
this.playlist_
);
if (segmentInfo.cues.length) {
segmentInfo.timingInfo = {
start: segmentInfo.cues[0].startTime,
end: segmentInfo.cues[segmentInfo.cues.length - 1].endTime
};
} else {
segmentInfo.timingInfo = {
start: segmentInfo.startOfSegment,
end: segmentInfo.startOfSegment + segmentInfo.duration
};
}
if (segmentInfo.isSyncRequest) {
this.trigger('syncinfoupdate');
this.pendingSegment_ = null;
this.state = 'READY';
return;
}
segmentInfo.byteLength = segmentInfo.bytes.byteLength;
this.mediaSecondsLoaded += segment.duration;
// Create VTTCue instances for each cue in the new segment and add them to
// the subtitle track
segmentInfo.cues.forEach((cue) => {
this.subtitlesTrack_.addCue(this.featuresNativeTextTracks_ ?
new window.VTTCue(cue.startTime, cue.endTime, cue.text) :
cue);
});
// Remove any duplicate cues from the subtitle track. The WebVTT spec allows
// cues to have identical time-intervals, but if the text is also identical
// we can safely assume it is a duplicate that can be removed (ex. when a cue
// "overlaps" VTT segments)
removeDuplicateCuesFromTrack(this.subtitlesTrack_);
this.handleAppendsDone_();
}
handleData_() {
// noop as we shouldn't be getting video/audio data captions
// that we do not support here.
}
updateTimingInfoEnd_() {
// noop
}
/**
* Uses the WebVTT parser to parse the segment response
*
* @param {Object} segmentInfo
* a segment info object that describes the current segment
* @private
*/
parseVTTCues_(segmentInfo) {
let decoder;
let decodeBytesToString = false;
if (typeof window.TextDecoder === 'function') {
decoder = new window.TextDecoder('utf8');
} else {
decoder = window.WebVTT.StringDecoder();
decodeBytesToString = true;
}
const parser = new window.WebVTT.Parser(
window,
window.vttjs,
decoder
);
segmentInfo.cues = [];
segmentInfo.timestampmap = { MPEGTS: 0, LOCAL: 0 };
parser.oncue = segmentInfo.cues.push.bind(segmentInfo.cues);
parser.ontimestampmap = (map) => {
segmentInfo.timestampmap = map;
};
parser.onparsingerror = (error) => {
videojs.log.warn('Error encountered when parsing cues: ' + error.message);
};
if (segmentInfo.segment.map) {
let mapData = segmentInfo.segment.map.bytes;
if (decodeBytesToString) {
mapData = uint8ToUtf8(mapData);
}
parser.parse(mapData);
}
let segmentData = segmentInfo.bytes;
if (decodeBytesToString) {
segmentData = uint8ToUtf8(segmentData);
}
parser.parse(segmentData);
parser.flush();
}
/**
* Updates the start and end times of any cues parsed by the WebVTT parser using
* the information parsed from the X-TIMESTAMP-MAP header and a TS to media time mapping
* from the SyncController
*
* @param {Object} segmentInfo
* a segment info object that describes the current segment
* @param {Object} mappingObj
* object containing a mapping from TS to media time
* @param {Object} playlist
* the playlist object containing the segment
* @private
*/
updateTimeMapping_(segmentInfo, mappingObj, playlist) {
const segment = segmentInfo.segment;
if (!mappingObj) {
// If the sync controller does not have a mapping of TS to Media Time for the
// timeline, then we don't have enough information to update the cue
// start/end times
return;
}
if (!segmentInfo.cues.length) {
// If there are no cues, we also do not have enough information to figure out
// segment timing. Mark that the segment contains no cues so we don't re-request
// an empty segment.
segment.empty = true;
return;
}
const timestampmap = segmentInfo.timestampmap;
const diff = (timestampmap.MPEGTS / ONE_SECOND_IN_TS) - timestampmap.LOCAL + mappingObj.mapping;
segmentInfo.cues.forEach((cue) => {
// First convert cue time to TS time using the timestamp-map provided within the vtt
cue.startTime += diff;
cue.endTime += diff;
});
if (!playlist.syncInfo) {
const firstStart = segmentInfo.cues[0].startTime;
const lastStart = segmentInfo.cues[segmentInfo.cues.length - 1].startTime;
playlist.syncInfo = {
mediaSequence: playlist.mediaSequence + segmentInfo.mediaIndex,
time: Math.min(firstStart, lastStart - segment.duration)
};
}
}
}

133
node_modules/@videojs/http-streaming/src/xhr.js generated vendored Normal file
View file

@ -0,0 +1,133 @@
/**
* @file xhr.js
*/
/**
* A wrapper for videojs.xhr that tracks bandwidth.
*
* @param {Object} options options for the XHR
* @param {Function} callback the callback to call when done
* @return {Request} the xhr request that is going to be made
*/
import videojs from 'video.js';
const {
xhr: videojsXHR,
mergeOptions
} = videojs;
const callbackWrapper = function(request, error, response, callback) {
const reqResponse = request.responseType === 'arraybuffer' ? request.response : request.responseText;
if (!error && reqResponse) {
request.responseTime = Date.now();
request.roundTripTime = request.responseTime - request.requestTime;
request.bytesReceived = reqResponse.byteLength || reqResponse.length;
if (!request.bandwidth) {
request.bandwidth =
Math.floor((request.bytesReceived / request.roundTripTime) * 8 * 1000);
}
}
if (response.headers) {
request.responseHeaders = response.headers;
}
// videojs.xhr now uses a specific code on the error
// object to signal that a request has timed out instead
// of setting a boolean on the request object
if (error && error.code === 'ETIMEDOUT') {
request.timedout = true;
}
// videojs.xhr no longer considers status codes outside of 200 and 0
// (for file uris) to be errors, but the old XHR did, so emulate that
// behavior. Status 206 may be used in response to byterange requests.
if (!error &&
!request.aborted &&
response.statusCode !== 200 &&
response.statusCode !== 206 &&
response.statusCode !== 0) {
error = new Error('XHR Failed with a response of: ' +
(request && (reqResponse || request.responseText)));
}
callback(error, request);
};
const xhrFactory = function() {
const xhr = function XhrFunction(options, callback) {
// Add a default timeout
options = mergeOptions({
timeout: 45e3
}, options);
// Allow an optional user-specified function to modify the option
// object before we construct the xhr request
const beforeRequest = XhrFunction.beforeRequest || videojs.Vhs.xhr.beforeRequest;
if (beforeRequest && typeof beforeRequest === 'function') {
const newOptions = beforeRequest(options);
if (newOptions) {
options = newOptions;
}
}
// Use the standard videojs.xhr() method unless `videojs.Vhs.xhr` has been overriden
// TODO: switch back to videojs.Vhs.xhr.name === 'XhrFunction' when we drop IE11
const xhrMethod = videojs.Vhs.xhr.original === true ? videojsXHR : videojs.Vhs.xhr;
const request = xhrMethod(options, function(error, response) {
return callbackWrapper(request, error, response, callback);
});
const originalAbort = request.abort;
request.abort = function() {
request.aborted = true;
return originalAbort.apply(request, arguments);
};
request.uri = options.uri;
request.requestTime = Date.now();
return request;
};
xhr.original = true;
return xhr;
};
/**
* Turns segment byterange into a string suitable for use in
* HTTP Range requests
*
* @param {Object} byterange - an object with two values defining the start and end
* of a byte-range
*/
const byterangeStr = function(byterange) {
// `byterangeEnd` is one less than `offset + length` because the HTTP range
// header uses inclusive ranges
const byterangeEnd = byterange.offset + byterange.length - 1;
const byterangeStart = byterange.offset;
return 'bytes=' + byterangeStart + '-' + byterangeEnd;
};
/**
* Defines headers for use in the xhr request for a particular segment.
*
* @param {Object} segment - a simplified copy of the segmentInfo object
* from SegmentLoader
*/
const segmentXhrHeaders = function(segment) {
const headers = {};
if (segment.byterange) {
headers.Range = byterangeStr(segment.byterange);
}
return headers;
};
export {segmentXhrHeaders, callbackWrapper, xhrFactory};
export default xhrFactory;