First release

This commit is contained in:
Owen Quinlan 2021-07-02 19:29:34 +10:00
commit fa6c85266e
2339 changed files with 761050 additions and 0 deletions

264
node_modules/@videojs/vhs-utils/es/byte-helpers.js generated vendored Normal file
View file

@ -0,0 +1,264 @@
import window from 'global/window'; // const log2 = Math.log2 ? Math.log2 : (x) => (Math.log(x) / Math.log(2));
var repeat = function repeat(str, len) {
var acc = '';
while (len--) {
acc += str;
}
return acc;
}; // count the number of bits it would take to represent a number
// we used to do this with log2 but BigInt does not support builtin math
// Math.ceil(log2(x));
export var countBits = function countBits(x) {
return x.toString(2).length;
}; // count the number of whole bytes it would take to represent a number
export var countBytes = function countBytes(x) {
return Math.ceil(countBits(x) / 8);
};
export var padStart = function padStart(b, len, str) {
if (str === void 0) {
str = ' ';
}
return (repeat(str, len) + b.toString()).slice(-len);
};
export var isTypedArray = function isTypedArray(obj) {
return ArrayBuffer.isView(obj);
};
export var toUint8 = function toUint8(bytes) {
if (bytes instanceof Uint8Array) {
return bytes;
}
if (!Array.isArray(bytes) && !isTypedArray(bytes) && !(bytes instanceof ArrayBuffer)) {
// any non-number or NaN leads to empty uint8array
// eslint-disable-next-line
if (typeof bytes !== 'number' || typeof bytes === 'number' && bytes !== bytes) {
bytes = 0;
} else {
bytes = [bytes];
}
}
return new Uint8Array(bytes && bytes.buffer || bytes, bytes && bytes.byteOffset || 0, bytes && bytes.byteLength || 0);
};
export var toHexString = function toHexString(bytes) {
bytes = toUint8(bytes);
var str = '';
for (var i = 0; i < bytes.length; i++) {
str += padStart(bytes[i].toString(16), 2, '0');
}
return str;
};
export var toBinaryString = function toBinaryString(bytes) {
bytes = toUint8(bytes);
var str = '';
for (var i = 0; i < bytes.length; i++) {
str += padStart(bytes[i].toString(2), 8, '0');
}
return str;
};
var BigInt = window.BigInt || Number;
var BYTE_TABLE = [BigInt('0x1'), BigInt('0x100'), BigInt('0x10000'), BigInt('0x1000000'), BigInt('0x100000000'), BigInt('0x10000000000'), BigInt('0x1000000000000'), BigInt('0x100000000000000'), BigInt('0x10000000000000000')];
export var ENDIANNESS = function () {
var a = new Uint16Array([0xFFCC]);
var b = new Uint8Array(a.buffer, a.byteOffset, a.byteLength);
if (b[0] === 0xFF) {
return 'big';
}
if (b[0] === 0xCC) {
return 'little';
}
return 'unknown';
}();
export var IS_BIG_ENDIAN = ENDIANNESS === 'big';
export var IS_LITTLE_ENDIAN = ENDIANNESS === 'little';
export var bytesToNumber = function bytesToNumber(bytes, _temp) {
var _ref = _temp === void 0 ? {} : _temp,
_ref$signed = _ref.signed,
signed = _ref$signed === void 0 ? false : _ref$signed,
_ref$le = _ref.le,
le = _ref$le === void 0 ? false : _ref$le;
bytes = toUint8(bytes);
var fn = le ? 'reduce' : 'reduceRight';
var obj = bytes[fn] ? bytes[fn] : Array.prototype[fn];
var number = obj.call(bytes, function (total, byte, i) {
var exponent = le ? i : Math.abs(i + 1 - bytes.length);
return total + BigInt(byte) * BYTE_TABLE[exponent];
}, BigInt(0));
if (signed) {
var max = BYTE_TABLE[bytes.length] / BigInt(2) - BigInt(1);
number = BigInt(number);
if (number > max) {
number -= max;
number -= max;
number -= BigInt(2);
}
}
return Number(number);
};
export var numberToBytes = function numberToBytes(number, _temp2) {
var _ref2 = _temp2 === void 0 ? {} : _temp2,
_ref2$le = _ref2.le,
le = _ref2$le === void 0 ? false : _ref2$le;
// eslint-disable-next-line
if (typeof number !== 'bigint' && typeof number !== 'number' || typeof number === 'number' && number !== number) {
number = 0;
}
number = BigInt(number);
var byteCount = countBytes(number);
var bytes = new Uint8Array(new ArrayBuffer(byteCount));
for (var i = 0; i < byteCount; i++) {
var byteIndex = le ? i : Math.abs(i + 1 - bytes.length);
bytes[byteIndex] = Number(number / BYTE_TABLE[i] & BigInt(0xFF));
if (number < 0) {
bytes[byteIndex] = Math.abs(~bytes[byteIndex]);
bytes[byteIndex] -= i === 0 ? 1 : 2;
}
}
return bytes;
};
export var bytesToString = function bytesToString(bytes) {
if (!bytes) {
return '';
} // TODO: should toUint8 handle cases where we only have 8 bytes
// but report more since this is a Uint16+ Array?
bytes = Array.prototype.slice.call(bytes);
var string = String.fromCharCode.apply(null, toUint8(bytes));
try {
return decodeURIComponent(escape(string));
} catch (e) {// if decodeURIComponent/escape fails, we are dealing with partial
// or full non string data. Just return the potentially garbled string.
}
return string;
};
export var stringToBytes = function stringToBytes(string, stringIsBytes) {
if (typeof string !== 'string' && string && typeof string.toString === 'function') {
string = string.toString();
}
if (typeof string !== 'string') {
return new Uint8Array();
} // If the string already is bytes, we don't have to do this
// otherwise we do this so that we split multi length characters
// into individual bytes
if (!stringIsBytes) {
string = unescape(encodeURIComponent(string));
}
var view = new Uint8Array(string.length);
for (var i = 0; i < string.length; i++) {
view[i] = string.charCodeAt(i);
}
return view;
};
export var concatTypedArrays = function concatTypedArrays() {
for (var _len = arguments.length, buffers = new Array(_len), _key = 0; _key < _len; _key++) {
buffers[_key] = arguments[_key];
}
buffers = buffers.filter(function (b) {
return b && (b.byteLength || b.length) && typeof b !== 'string';
});
if (buffers.length <= 1) {
// for 0 length we will return empty uint8
// for 1 length we return the first uint8
return toUint8(buffers[0]);
}
var totalLen = buffers.reduce(function (total, buf, i) {
return total + (buf.byteLength || buf.length);
}, 0);
var tempBuffer = new Uint8Array(totalLen);
var offset = 0;
buffers.forEach(function (buf) {
buf = toUint8(buf);
tempBuffer.set(buf, offset);
offset += buf.byteLength;
});
return tempBuffer;
};
/**
* Check if the bytes "b" are contained within bytes "a".
*
* @param {Uint8Array|Array} a
* Bytes to check in
*
* @param {Uint8Array|Array} b
* Bytes to check for
*
* @param {Object} options
* options
*
* @param {Array|Uint8Array} [offset=0]
* offset to use when looking at bytes in a
*
* @param {Array|Uint8Array} [mask=[]]
* mask to use on bytes before comparison.
*
* @return {boolean}
* If all bytes in b are inside of a, taking into account
* bit masks.
*/
export var bytesMatch = function bytesMatch(a, b, _temp3) {
var _ref3 = _temp3 === void 0 ? {} : _temp3,
_ref3$offset = _ref3.offset,
offset = _ref3$offset === void 0 ? 0 : _ref3$offset,
_ref3$mask = _ref3.mask,
mask = _ref3$mask === void 0 ? [] : _ref3$mask;
a = toUint8(a);
b = toUint8(b); // ie 11 does not support uint8 every
var fn = b.every ? b.every : Array.prototype.every;
return b.length && a.length - offset >= b.length && // ie 11 doesn't support every on uin8
fn.call(b, function (bByte, i) {
var aByte = mask[i] ? mask[i] & a[offset + i] : a[offset + i];
return bByte === aByte;
});
};
export var sliceBytes = function sliceBytes(src, start, end) {
if (Uint8Array.prototype.slice) {
return Uint8Array.prototype.slice.call(src, start, end);
}
return new Uint8Array(Array.prototype.slice.call(src, start, end));
};
export var reverseBytes = function reverseBytes(src) {
if (src.reverse) {
return src.reverse();
}
return Array.prototype.reverse.call(src);
};

96
node_modules/@videojs/vhs-utils/es/codec-helpers.js generated vendored Normal file
View file

@ -0,0 +1,96 @@
import { padStart, toHexString, toBinaryString } from './byte-helpers.js'; // https://aomediacodec.github.io/av1-isobmff/#av1codecconfigurationbox-syntax
// https://developer.mozilla.org/en-US/docs/Web/Media/Formats/codecs_parameter#AV1
export var getAv1Codec = function getAv1Codec(bytes) {
var codec = '';
var profile = bytes[1] >>> 3;
var level = bytes[1] & 0x1F;
var tier = bytes[2] >>> 7;
var highBitDepth = (bytes[2] & 0x40) >> 6;
var twelveBit = (bytes[2] & 0x20) >> 5;
var monochrome = (bytes[2] & 0x10) >> 4;
var chromaSubsamplingX = (bytes[2] & 0x08) >> 3;
var chromaSubsamplingY = (bytes[2] & 0x04) >> 2;
var chromaSamplePosition = bytes[2] & 0x03;
codec += profile + "." + padStart(level, 2, '0');
if (tier === 0) {
codec += 'M';
} else if (tier === 1) {
codec += 'H';
}
var bitDepth;
if (profile === 2 && highBitDepth) {
bitDepth = twelveBit ? 12 : 10;
} else {
bitDepth = highBitDepth ? 10 : 8;
}
codec += "." + padStart(bitDepth, 2, '0'); // TODO: can we parse color range??
codec += "." + monochrome;
codec += "." + chromaSubsamplingX + chromaSubsamplingY + chromaSamplePosition;
return codec;
};
export var getAvcCodec = function getAvcCodec(bytes) {
var profileId = toHexString(bytes[1]);
var constraintFlags = toHexString(bytes[2] & 0xFC);
var levelId = toHexString(bytes[3]);
return "" + profileId + constraintFlags + levelId;
};
export var getHvcCodec = function getHvcCodec(bytes) {
var codec = '';
var profileSpace = bytes[1] >> 6;
var profileId = bytes[1] & 0x1F;
var tierFlag = (bytes[1] & 0x20) >> 5;
var profileCompat = bytes.subarray(2, 6);
var constraintIds = bytes.subarray(6, 12);
var levelId = bytes[12];
if (profileSpace === 1) {
codec += 'A';
} else if (profileSpace === 2) {
codec += 'B';
} else if (profileSpace === 3) {
codec += 'C';
}
codec += profileId + "."; // ffmpeg does this in big endian
var profileCompatVal = parseInt(toBinaryString(profileCompat).split('').reverse().join(''), 2); // apple does this in little endian...
if (profileCompatVal > 255) {
profileCompatVal = parseInt(toBinaryString(profileCompat), 2);
}
codec += profileCompatVal.toString(16) + ".";
if (tierFlag === 0) {
codec += 'L';
} else {
codec += 'H';
}
codec += levelId;
var constraints = '';
for (var i = 0; i < constraintIds.length; i++) {
var v = constraintIds[i];
if (v) {
if (constraints) {
constraints += '.';
}
constraints += v.toString(16);
}
}
if (constraints) {
codec += "." + constraints;
}
return codec;
};

253
node_modules/@videojs/vhs-utils/es/codecs.js generated vendored Normal file
View file

@ -0,0 +1,253 @@
import window from 'global/window';
var regexs = {
// to determine mime types
mp4: /^(av0?1|avc0?[1234]|vp0?9|flac|opus|mp3|mp4a|mp4v|stpp.ttml.im1t)/,
webm: /^(vp0?[89]|av0?1|opus|vorbis)/,
ogg: /^(vp0?[89]|theora|flac|opus|vorbis)/,
// to determine if a codec is audio or video
video: /^(av0?1|avc0?[1234]|vp0?[89]|hvc1|hev1|theora|mp4v)/,
audio: /^(mp4a|flac|vorbis|opus|ac-[34]|ec-3|alac|mp3|speex|aac)/,
text: /^(stpp.ttml.im1t)/,
// mux.js support regex
muxerVideo: /^(avc0?1)/,
muxerAudio: /^(mp4a)/,
// match nothing as muxer does not support text right now.
// there cannot never be a character before the start of a string
// so this matches nothing.
muxerText: /a^/
};
var mediaTypes = ['video', 'audio', 'text'];
var upperMediaTypes = ['Video', 'Audio', 'Text'];
/**
* Replace the old apple-style `avc1.<dd>.<dd>` codec string with the standard
* `avc1.<hhhhhh>`
*
* @param {string} codec
* Codec string to translate
* @return {string}
* The translated codec string
*/
export var translateLegacyCodec = function translateLegacyCodec(codec) {
if (!codec) {
return codec;
}
return codec.replace(/avc1\.(\d+)\.(\d+)/i, function (orig, profile, avcLevel) {
var profileHex = ('00' + Number(profile).toString(16)).slice(-2);
var avcLevelHex = ('00' + Number(avcLevel).toString(16)).slice(-2);
return 'avc1.' + profileHex + '00' + avcLevelHex;
});
};
/**
* Replace the old apple-style `avc1.<dd>.<dd>` codec strings with the standard
* `avc1.<hhhhhh>`
*
* @param {string[]} codecs
* An array of codec strings to translate
* @return {string[]}
* The translated array of codec strings
*/
export var translateLegacyCodecs = function translateLegacyCodecs(codecs) {
return codecs.map(translateLegacyCodec);
};
/**
* Replace codecs in the codec string with the old apple-style `avc1.<dd>.<dd>` to the
* standard `avc1.<hhhhhh>`.
*
* @param {string} codecString
* The codec string
* @return {string}
* The codec string with old apple-style codecs replaced
*
* @private
*/
export var mapLegacyAvcCodecs = function mapLegacyAvcCodecs(codecString) {
return codecString.replace(/avc1\.(\d+)\.(\d+)/i, function (match) {
return translateLegacyCodecs([match])[0];
});
};
/**
* @typedef {Object} ParsedCodecInfo
* @property {number} codecCount
* Number of codecs parsed
* @property {string} [videoCodec]
* Parsed video codec (if found)
* @property {string} [videoObjectTypeIndicator]
* Video object type indicator (if found)
* @property {string|null} audioProfile
* Audio profile
*/
/**
* Parses a codec string to retrieve the number of codecs specified, the video codec and
* object type indicator, and the audio profile.
*
* @param {string} [codecString]
* The codec string to parse
* @return {ParsedCodecInfo}
* Parsed codec info
*/
export var parseCodecs = function parseCodecs(codecString) {
if (codecString === void 0) {
codecString = '';
}
var codecs = codecString.split(',');
var result = [];
codecs.forEach(function (codec) {
codec = codec.trim();
var codecType;
mediaTypes.forEach(function (name) {
var match = regexs[name].exec(codec.toLowerCase());
if (!match || match.length <= 1) {
return;
}
codecType = name; // maintain codec case
var type = codec.substring(0, match[1].length);
var details = codec.replace(type, '');
result.push({
type: type,
details: details,
mediaType: name
});
});
if (!codecType) {
result.push({
type: codec,
details: '',
mediaType: 'unknown'
});
}
});
return result;
};
/**
* Returns a ParsedCodecInfo object for the default alternate audio playlist if there is
* a default alternate audio playlist for the provided audio group.
*
* @param {Object} master
* The master playlist
* @param {string} audioGroupId
* ID of the audio group for which to find the default codec info
* @return {ParsedCodecInfo}
* Parsed codec info
*/
export var codecsFromDefault = function codecsFromDefault(master, audioGroupId) {
if (!master.mediaGroups.AUDIO || !audioGroupId) {
return null;
}
var audioGroup = master.mediaGroups.AUDIO[audioGroupId];
if (!audioGroup) {
return null;
}
for (var name in audioGroup) {
var audioType = audioGroup[name];
if (audioType.default && audioType.playlists) {
// codec should be the same for all playlists within the audio type
return parseCodecs(audioType.playlists[0].attributes.CODECS);
}
}
return null;
};
export var isVideoCodec = function isVideoCodec(codec) {
if (codec === void 0) {
codec = '';
}
return regexs.video.test(codec.trim().toLowerCase());
};
export var isAudioCodec = function isAudioCodec(codec) {
if (codec === void 0) {
codec = '';
}
return regexs.audio.test(codec.trim().toLowerCase());
};
export var isTextCodec = function isTextCodec(codec) {
if (codec === void 0) {
codec = '';
}
return regexs.text.test(codec.trim().toLowerCase());
};
export var getMimeForCodec = function getMimeForCodec(codecString) {
if (!codecString || typeof codecString !== 'string') {
return;
}
var codecs = codecString.toLowerCase().split(',').map(function (c) {
return translateLegacyCodec(c.trim());
}); // default to video type
var type = 'video'; // only change to audio type if the only codec we have is
// audio
if (codecs.length === 1 && isAudioCodec(codecs[0])) {
type = 'audio';
} else if (codecs.length === 1 && isTextCodec(codecs[0])) {
// text uses application/<container> for now
type = 'application';
} // default the container to mp4
var container = 'mp4'; // every codec must be able to go into the container
// for that container to be the correct one
if (codecs.every(function (c) {
return regexs.mp4.test(c);
})) {
container = 'mp4';
} else if (codecs.every(function (c) {
return regexs.webm.test(c);
})) {
container = 'webm';
} else if (codecs.every(function (c) {
return regexs.ogg.test(c);
})) {
container = 'ogg';
}
return type + "/" + container + ";codecs=\"" + codecString + "\"";
};
export var browserSupportsCodec = function browserSupportsCodec(codecString) {
if (codecString === void 0) {
codecString = '';
}
return window.MediaSource && window.MediaSource.isTypeSupported && window.MediaSource.isTypeSupported(getMimeForCodec(codecString)) || false;
};
export var muxerSupportsCodec = function muxerSupportsCodec(codecString) {
if (codecString === void 0) {
codecString = '';
}
return codecString.toLowerCase().split(',').every(function (codec) {
codec = codec.trim(); // any match is supported.
for (var i = 0; i < upperMediaTypes.length; i++) {
var type = upperMediaTypes[i];
if (regexs["muxer" + type].test(codec)) {
return true;
}
}
return false;
});
};
export var DEFAULT_AUDIO_CODEC = 'mp4a.40.2';
export var DEFAULT_VIDEO_CODEC = 'avc1.4d400d';

162
node_modules/@videojs/vhs-utils/es/containers.js generated vendored Normal file
View file

@ -0,0 +1,162 @@
import { toUint8, bytesMatch } from './byte-helpers.js';
import { findBox } from './mp4-helpers.js';
import { findEbml, EBML_TAGS } from './ebml-helpers.js';
import { getId3Offset } from './id3-helpers.js';
import { findH264Nal, findH265Nal } from './nal-helpers.js';
var CONSTANTS = {
// "webm" string literal in hex
'webm': toUint8([0x77, 0x65, 0x62, 0x6d]),
// "matroska" string literal in hex
'matroska': toUint8([0x6d, 0x61, 0x74, 0x72, 0x6f, 0x73, 0x6b, 0x61]),
// "fLaC" string literal in hex
'flac': toUint8([0x66, 0x4c, 0x61, 0x43]),
// "OggS" string literal in hex
'ogg': toUint8([0x4f, 0x67, 0x67, 0x53]),
// ac-3 sync byte, also works for ec-3 as that is simply a codec
// of ac-3
'ac3': toUint8([0x0b, 0x77]),
// "RIFF" string literal in hex used for wav and avi
'riff': toUint8([0x52, 0x49, 0x46, 0x46]),
// "AVI" string literal in hex
'avi': toUint8([0x41, 0x56, 0x49]),
// "WAVE" string literal in hex
'wav': toUint8([0x57, 0x41, 0x56, 0x45]),
// "ftyp3g" string literal in hex
'3gp': toUint8([0x66, 0x74, 0x79, 0x70, 0x33, 0x67]),
// "ftyp" string literal in hex
'mp4': toUint8([0x66, 0x74, 0x79, 0x70]),
// "styp" string literal in hex
'fmp4': toUint8([0x73, 0x74, 0x79, 0x70]),
// "ftyp" string literal in hex
'mov': toUint8([0x66, 0x74, 0x79, 0x70, 0x71, 0x74])
};
var _isLikely = {
aac: function aac(bytes) {
var offset = getId3Offset(bytes);
return bytesMatch(bytes, [0xFF, 0x10], {
offset: offset,
mask: [0xFF, 0x16]
});
},
mp3: function mp3(bytes) {
var offset = getId3Offset(bytes);
return bytesMatch(bytes, [0xFF, 0x02], {
offset: offset,
mask: [0xFF, 0x06]
});
},
webm: function webm(bytes) {
var docType = findEbml(bytes, [EBML_TAGS.EBML, EBML_TAGS.DocType])[0]; // check if DocType EBML tag is webm
return bytesMatch(docType, CONSTANTS.webm);
},
mkv: function mkv(bytes) {
var docType = findEbml(bytes, [EBML_TAGS.EBML, EBML_TAGS.DocType])[0]; // check if DocType EBML tag is matroska
return bytesMatch(docType, CONSTANTS.matroska);
},
mp4: function mp4(bytes) {
return !_isLikely['3gp'](bytes) && !_isLikely.mov(bytes) && (bytesMatch(bytes, CONSTANTS.mp4, {
offset: 4
}) || bytesMatch(bytes, CONSTANTS.fmp4, {
offset: 4
}));
},
mov: function mov(bytes) {
return bytesMatch(bytes, CONSTANTS.mov, {
offset: 4
});
},
'3gp': function gp(bytes) {
return bytesMatch(bytes, CONSTANTS['3gp'], {
offset: 4
});
},
ac3: function ac3(bytes) {
var offset = getId3Offset(bytes);
return bytesMatch(bytes, CONSTANTS.ac3, {
offset: offset
});
},
ts: function ts(bytes) {
if (bytes.length < 189 && bytes.length >= 1) {
return bytes[0] === 0x47;
}
var i = 0; // check the first 376 bytes for two matching sync bytes
while (i + 188 < bytes.length && i < 188) {
if (bytes[i] === 0x47 && bytes[i + 188] === 0x47) {
return true;
}
i += 1;
}
return false;
},
flac: function flac(bytes) {
var offset = getId3Offset(bytes);
return bytesMatch(bytes, CONSTANTS.flac, {
offset: offset
});
},
ogg: function ogg(bytes) {
return bytesMatch(bytes, CONSTANTS.ogg);
},
avi: function avi(bytes) {
return bytesMatch(bytes, CONSTANTS.riff) && bytesMatch(bytes, CONSTANTS.avi, {
offset: 8
});
},
wav: function wav(bytes) {
return bytesMatch(bytes, CONSTANTS.riff) && bytesMatch(bytes, CONSTANTS.wav, {
offset: 8
});
},
'h264': function h264(bytes) {
// find seq_parameter_set_rbsp
return findH264Nal(bytes, 7, 3).length;
},
'h265': function h265(bytes) {
// find video_parameter_set_rbsp or seq_parameter_set_rbsp
return findH265Nal(bytes, [32, 33], 3).length;
}
}; // get all the isLikely functions
// but make sure 'ts' is above h264 and h265
// but below everything else as it is the least specific
var isLikelyTypes = Object.keys(_isLikely) // remove ts, h264, h265
.filter(function (t) {
return t !== 'ts' && t !== 'h264' && t !== 'h265';
}) // add it back to the bottom
.concat(['ts', 'h264', 'h265']); // make sure we are dealing with uint8 data.
isLikelyTypes.forEach(function (type) {
var isLikelyFn = _isLikely[type];
_isLikely[type] = function (bytes) {
return isLikelyFn(toUint8(bytes));
};
}); // export after wrapping
export var isLikely = _isLikely; // A useful list of file signatures can be found here
// https://en.wikipedia.org/wiki/List_of_file_signatures
export var detectContainerForBytes = function detectContainerForBytes(bytes) {
bytes = toUint8(bytes);
for (var i = 0; i < isLikelyTypes.length; i++) {
var type = isLikelyTypes[i];
if (isLikely[type](bytes)) {
return type;
}
}
return '';
}; // fmp4 is not a container
export var isLikelyFmp4MediaSegment = function isLikelyFmp4MediaSegment(bytes) {
return findBox(bytes, ['moof']).length > 0;
};

View file

@ -0,0 +1,16 @@
import window from 'global/window';
var atob = function atob(s) {
return window.atob ? window.atob(s) : Buffer.from(s, 'base64').toString('binary');
};
export default function decodeB64ToUint8Array(b64Text) {
var decodedString = atob(b64Text);
var array = new Uint8Array(decodedString.length);
for (var i = 0; i < decodedString.length; i++) {
array[i] = decodedString.charCodeAt(i);
}
return array;
}

497
node_modules/@videojs/vhs-utils/es/ebml-helpers.js generated vendored Normal file
View file

@ -0,0 +1,497 @@
import { toUint8, bytesToNumber, bytesMatch, bytesToString, numberToBytes, padStart } from './byte-helpers';
import { getAvcCodec, getHvcCodec, getAv1Codec } from './codec-helpers.js'; // relevant specs for this parser:
// https://matroska-org.github.io/libebml/specs.html
// https://www.matroska.org/technical/elements.html
// https://www.webmproject.org/docs/container/
export var EBML_TAGS = {
EBML: toUint8([0x1A, 0x45, 0xDF, 0xA3]),
DocType: toUint8([0x42, 0x82]),
Segment: toUint8([0x18, 0x53, 0x80, 0x67]),
SegmentInfo: toUint8([0x15, 0x49, 0xA9, 0x66]),
Tracks: toUint8([0x16, 0x54, 0xAE, 0x6B]),
Track: toUint8([0xAE]),
TrackNumber: toUint8([0xd7]),
DefaultDuration: toUint8([0x23, 0xe3, 0x83]),
TrackEntry: toUint8([0xAE]),
TrackType: toUint8([0x83]),
FlagDefault: toUint8([0x88]),
CodecID: toUint8([0x86]),
CodecPrivate: toUint8([0x63, 0xA2]),
VideoTrack: toUint8([0xe0]),
AudioTrack: toUint8([0xe1]),
// Not used yet, but will be used for live webm/mkv
// see https://www.matroska.org/technical/basics.html#block-structure
// see https://www.matroska.org/technical/basics.html#simpleblock-structure
Cluster: toUint8([0x1F, 0x43, 0xB6, 0x75]),
Timestamp: toUint8([0xE7]),
TimestampScale: toUint8([0x2A, 0xD7, 0xB1]),
BlockGroup: toUint8([0xA0]),
BlockDuration: toUint8([0x9B]),
Block: toUint8([0xA1]),
SimpleBlock: toUint8([0xA3])
};
/**
* This is a simple table to determine the length
* of things in ebml. The length is one based (starts at 1,
* rather than zero) and for every zero bit before a one bit
* we add one to length. We also need this table because in some
* case we have to xor all the length bits from another value.
*/
var LENGTH_TABLE = [128, 64, 32, 16, 8, 4, 2, 1];
var getLength = function getLength(byte) {
var len = 1;
for (var i = 0; i < LENGTH_TABLE.length; i++) {
if (byte & LENGTH_TABLE[i]) {
break;
}
len++;
}
return len;
}; // length in ebml is stored in the first 4 to 8 bits
// of the first byte. 4 for the id length and 8 for the
// data size length. Length is measured by converting the number to binary
// then 1 + the number of zeros before a 1 is encountered starting
// from the left.
var getvint = function getvint(bytes, offset, removeLength, signed) {
if (removeLength === void 0) {
removeLength = true;
}
if (signed === void 0) {
signed = false;
}
var length = getLength(bytes[offset]);
var valueBytes = bytes.subarray(offset, offset + length); // NOTE that we do **not** subarray here because we need to copy these bytes
// as they will be modified below to remove the dataSizeLen bits and we do not
// want to modify the original data. normally we could just call slice on
// uint8array but ie 11 does not support that...
if (removeLength) {
valueBytes = Array.prototype.slice.call(bytes, offset, offset + length);
valueBytes[0] ^= LENGTH_TABLE[length - 1];
}
return {
length: length,
value: bytesToNumber(valueBytes, {
signed: signed
}),
bytes: valueBytes
};
};
var normalizePath = function normalizePath(path) {
if (typeof path === 'string') {
return path.match(/.{1,2}/g).map(function (p) {
return normalizePath(p);
});
}
if (typeof path === 'number') {
return numberToBytes(path);
}
return path;
};
var normalizePaths = function normalizePaths(paths) {
if (!Array.isArray(paths)) {
return [normalizePath(paths)];
}
return paths.map(function (p) {
return normalizePath(p);
});
};
var getInfinityDataSize = function getInfinityDataSize(id, bytes, offset) {
if (offset >= bytes.length) {
return bytes.length;
}
var innerid = getvint(bytes, offset, false);
if (bytesMatch(id.bytes, innerid.bytes)) {
return offset;
}
var dataHeader = getvint(bytes, offset + innerid.length);
return getInfinityDataSize(id, bytes, offset + dataHeader.length + dataHeader.value + innerid.length);
};
/**
* Notes on the EBLM format.
*
* EBLM uses "vints" tags. Every vint tag contains
* two parts
*
* 1. The length from the first byte. You get this by
* converting the byte to binary and counting the zeros
* before a 1. Then you add 1 to that. Examples
* 00011111 = length 4 because there are 3 zeros before a 1.
* 00100000 = length 3 because there are 2 zeros before a 1.
* 00000011 = length 7 because there are 6 zeros before a 1.
*
* 2. The bits used for length are removed from the first byte
* Then all the bytes are merged into a value. NOTE: this
* is not the case for id ebml tags as there id includes
* length bits.
*
*/
export var findEbml = function findEbml(bytes, paths) {
paths = normalizePaths(paths);
bytes = toUint8(bytes);
var results = [];
if (!paths.length) {
return results;
}
var i = 0;
while (i < bytes.length) {
var id = getvint(bytes, i, false);
var dataHeader = getvint(bytes, i + id.length);
var dataStart = i + id.length + dataHeader.length; // dataSize is unknown or this is a live stream
if (dataHeader.value === 0x7f) {
dataHeader.value = getInfinityDataSize(id, bytes, dataStart);
if (dataHeader.value !== bytes.length) {
dataHeader.value -= dataStart;
}
}
var dataEnd = dataStart + dataHeader.value > bytes.length ? bytes.length : dataStart + dataHeader.value;
var data = bytes.subarray(dataStart, dataEnd);
if (bytesMatch(paths[0], id.bytes)) {
if (paths.length === 1) {
// this is the end of the paths and we've found the tag we were
// looking for
results.push(data);
} else {
// recursively search for the next tag inside of the data
// of this one
results = results.concat(findEbml(data, paths.slice(1)));
}
}
var totalLength = id.length + dataHeader.length + data.length; // move past this tag entirely, we are not looking for it
i += totalLength;
}
return results;
}; // see https://www.matroska.org/technical/basics.html#block-structure
export var decodeBlock = function decodeBlock(block, type, timestampScale, clusterTimestamp) {
var duration;
if (type === 'group') {
duration = findEbml(block, [EBML_TAGS.BlockDuration])[0];
if (duration) {
duration = bytesToNumber(duration);
duration = 1 / timestampScale * duration * timestampScale / 1000;
}
block = findEbml(block, [EBML_TAGS.Block])[0];
type = 'block'; // treat data as a block after this point
}
var dv = new DataView(block.buffer, block.byteOffset, block.byteLength);
var trackNumber = getvint(block, 0);
var timestamp = dv.getInt16(trackNumber.length, false);
var flags = block[trackNumber.length + 2];
var data = block.subarray(trackNumber.length + 3); // pts/dts in seconds
var ptsdts = 1 / timestampScale * (clusterTimestamp + timestamp) * timestampScale / 1000; // return the frame
var parsed = {
duration: duration,
trackNumber: trackNumber.value,
keyframe: type === 'simple' && flags >> 7 === 1,
invisible: (flags & 0x08) >> 3 === 1,
lacing: (flags & 0x06) >> 1,
discardable: type === 'simple' && (flags & 0x01) === 1,
frames: [],
pts: ptsdts,
dts: ptsdts,
timestamp: timestamp
};
if (!parsed.lacing) {
parsed.frames.push(data);
return parsed;
}
var numberOfFrames = data[0] + 1;
var frameSizes = [];
var offset = 1; // Fixed
if (parsed.lacing === 2) {
var sizeOfFrame = (data.length - offset) / numberOfFrames;
for (var i = 0; i < numberOfFrames; i++) {
frameSizes.push(sizeOfFrame);
}
} // xiph
if (parsed.lacing === 1) {
for (var _i = 0; _i < numberOfFrames - 1; _i++) {
var size = 0;
do {
size += data[offset];
offset++;
} while (data[offset - 1] === 0xFF);
frameSizes.push(size);
}
} // ebml
if (parsed.lacing === 3) {
// first vint is unsinged
// after that vints are singed and
// based on a compounding size
var _size = 0;
for (var _i2 = 0; _i2 < numberOfFrames - 1; _i2++) {
var vint = _i2 === 0 ? getvint(data, offset) : getvint(data, offset, true, true);
_size += vint.value;
frameSizes.push(_size);
offset += vint.length;
}
}
frameSizes.forEach(function (size) {
parsed.frames.push(data.subarray(offset, offset + size));
offset += size;
});
return parsed;
}; // VP9 Codec Feature Metadata (CodecPrivate)
// https://www.webmproject.org/docs/container/
var parseVp9Private = function parseVp9Private(bytes) {
var i = 0;
var params = {};
while (i < bytes.length) {
var id = bytes[i] & 0x7f;
var len = bytes[i + 1];
var val = void 0;
if (len === 1) {
val = bytes[i + 2];
} else {
val = bytes.subarray(i + 2, i + 2 + len);
}
if (id === 1) {
params.profile = val;
} else if (id === 2) {
params.level = val;
} else if (id === 3) {
params.bitDepth = val;
} else if (id === 4) {
params.chromaSubsampling = val;
} else {
params[id] = val;
}
i += 2 + len;
}
return params;
};
export var parseTracks = function parseTracks(bytes) {
bytes = toUint8(bytes);
var decodedTracks = [];
var tracks = findEbml(bytes, [EBML_TAGS.Segment, EBML_TAGS.Tracks, EBML_TAGS.Track]);
if (!tracks.length) {
tracks = findEbml(bytes, [EBML_TAGS.Tracks, EBML_TAGS.Track]);
}
if (!tracks.length) {
tracks = findEbml(bytes, [EBML_TAGS.Track]);
}
if (!tracks.length) {
return decodedTracks;
}
tracks.forEach(function (track) {
var trackType = findEbml(track, EBML_TAGS.TrackType)[0];
if (!trackType || !trackType.length) {
return;
} // 1 is video, 2 is audio, 17 is subtitle
// other values are unimportant in this context
if (trackType[0] === 1) {
trackType = 'video';
} else if (trackType[0] === 2) {
trackType = 'audio';
} else if (trackType[0] === 17) {
trackType = 'subtitle';
} else {
return;
} // todo parse language
var decodedTrack = {
rawCodec: bytesToString(findEbml(track, [EBML_TAGS.CodecID])[0]),
type: trackType,
codecPrivate: findEbml(track, [EBML_TAGS.CodecPrivate])[0],
number: bytesToNumber(findEbml(track, [EBML_TAGS.TrackNumber])[0]),
defaultDuration: bytesToNumber(findEbml(track, [EBML_TAGS.DefaultDuration])[0]),
default: findEbml(track, [EBML_TAGS.FlagDefault])[0],
rawData: track
};
var codec = '';
if (/V_MPEG4\/ISO\/AVC/.test(decodedTrack.rawCodec)) {
codec = "avc1." + getAvcCodec(decodedTrack.codecPrivate);
} else if (/V_MPEGH\/ISO\/HEVC/.test(decodedTrack.rawCodec)) {
codec = "hev1." + getHvcCodec(decodedTrack.codecPrivate);
} else if (/V_MPEG4\/ISO\/ASP/.test(decodedTrack.rawCodec)) {
if (decodedTrack.codecPrivate) {
codec = 'mp4v.20.' + decodedTrack.codecPrivate[4].toString();
} else {
codec = 'mp4v.20.9';
}
} else if (/^V_THEORA/.test(decodedTrack.rawCodec)) {
codec = 'theora';
} else if (/^V_VP8/.test(decodedTrack.rawCodec)) {
codec = 'vp8';
} else if (/^V_VP9/.test(decodedTrack.rawCodec)) {
if (decodedTrack.codecPrivate) {
var _parseVp9Private = parseVp9Private(decodedTrack.codecPrivate),
profile = _parseVp9Private.profile,
level = _parseVp9Private.level,
bitDepth = _parseVp9Private.bitDepth,
chromaSubsampling = _parseVp9Private.chromaSubsampling;
codec = 'vp09.';
codec += padStart(profile, 2, '0') + ".";
codec += padStart(level, 2, '0') + ".";
codec += padStart(bitDepth, 2, '0') + ".";
codec += "" + padStart(chromaSubsampling, 2, '0'); // Video -> Colour -> Ebml name
var matrixCoefficients = findEbml(track, [0xE0, [0x55, 0xB0], [0x55, 0xB1]])[0] || [];
var videoFullRangeFlag = findEbml(track, [0xE0, [0x55, 0xB0], [0x55, 0xB9]])[0] || [];
var transferCharacteristics = findEbml(track, [0xE0, [0x55, 0xB0], [0x55, 0xBA]])[0] || [];
var colourPrimaries = findEbml(track, [0xE0, [0x55, 0xB0], [0x55, 0xBB]])[0] || []; // if we find any optional codec parameter specify them all.
if (matrixCoefficients.length || videoFullRangeFlag.length || transferCharacteristics.length || colourPrimaries.length) {
codec += "." + padStart(colourPrimaries[0], 2, '0');
codec += "." + padStart(transferCharacteristics[0], 2, '0');
codec += "." + padStart(matrixCoefficients[0], 2, '0');
codec += "." + padStart(videoFullRangeFlag[0], 2, '0');
}
} else {
codec = 'vp9';
}
} else if (/^V_AV1/.test(decodedTrack.rawCodec)) {
codec = "av01." + getAv1Codec(decodedTrack.codecPrivate);
} else if (/A_ALAC/.test(decodedTrack.rawCodec)) {
codec = 'alac';
} else if (/A_MPEG\/L2/.test(decodedTrack.rawCodec)) {
codec = 'mp2';
} else if (/A_MPEG\/L3/.test(decodedTrack.rawCodec)) {
codec = 'mp3';
} else if (/^A_AAC/.test(decodedTrack.rawCodec)) {
if (decodedTrack.codecPrivate) {
codec = 'mp4a.40.' + (decodedTrack.codecPrivate[0] >>> 3).toString();
} else {
codec = 'mp4a.40.2';
}
} else if (/^A_AC3/.test(decodedTrack.rawCodec)) {
codec = 'ac-3';
} else if (/^A_PCM/.test(decodedTrack.rawCodec)) {
codec = 'pcm';
} else if (/^A_MS\/ACM/.test(decodedTrack.rawCodec)) {
codec = 'speex';
} else if (/^A_EAC3/.test(decodedTrack.rawCodec)) {
codec = 'ec-3';
} else if (/^A_VORBIS/.test(decodedTrack.rawCodec)) {
codec = 'vorbis';
} else if (/^A_FLAC/.test(decodedTrack.rawCodec)) {
codec = 'flac';
} else if (/^A_OPUS/.test(decodedTrack.rawCodec)) {
codec = 'opus';
}
decodedTrack.codec = codec;
decodedTracks.push(decodedTrack);
});
return decodedTracks.sort(function (a, b) {
return a.number - b.number;
});
};
export var parseData = function parseData(data, tracks) {
var allBlocks = [];
var segment = findEbml(data, [EBML_TAGS.Segment])[0];
var timestampScale = findEbml(segment, [EBML_TAGS.SegmentInfo, EBML_TAGS.TimestampScale])[0]; // in nanoseconds, defaults to 1ms
if (timestampScale && timestampScale.length) {
timestampScale = bytesToNumber(timestampScale);
} else {
timestampScale = 1000000;
}
var clusters = findEbml(segment, [EBML_TAGS.Cluster]);
if (!tracks) {
tracks = parseTracks(segment);
}
clusters.forEach(function (cluster, ci) {
var simpleBlocks = findEbml(cluster, [EBML_TAGS.SimpleBlock]).map(function (b) {
return {
type: 'simple',
data: b
};
});
var blockGroups = findEbml(cluster, [EBML_TAGS.BlockGroup]).map(function (b) {
return {
type: 'group',
data: b
};
});
var timestamp = findEbml(cluster, [EBML_TAGS.Timestamp])[0] || 0;
if (timestamp && timestamp.length) {
timestamp = bytesToNumber(timestamp);
} // get all blocks then sort them into the correct order
var blocks = simpleBlocks.concat(blockGroups).sort(function (a, b) {
return a.data.byteOffset - b.data.byteOffset;
});
blocks.forEach(function (block, bi) {
var decoded = decodeBlock(block.data, block.type, timestampScale, timestamp);
allBlocks.push(decoded);
});
});
return {
tracks: tracks,
blocks: allBlocks
};
};

388
node_modules/@videojs/vhs-utils/es/format-parser.js generated vendored Normal file
View file

@ -0,0 +1,388 @@
import { bytesToString, toUint8, toHexString, bytesMatch } from './byte-helpers.js';
import { parseTracks as parseEbmlTracks } from './ebml-helpers.js';
import { parseTracks as parseMp4Tracks } from './mp4-helpers.js';
import { findFourCC } from './riff-helpers.js';
import { getPages } from './ogg-helpers.js';
import { detectContainerForBytes } from './containers.js';
import { findH264Nal, findH265Nal } from './nal-helpers.js';
import { parseTs } from './m2ts-helpers.js';
import { getAvcCodec, getHvcCodec } from './codec-helpers.js';
import { getId3Offset } from './id3-helpers.js'; // https://docs.microsoft.com/en-us/windows/win32/medfound/audio-subtype-guids
// https://tools.ietf.org/html/rfc2361
var wFormatTagCodec = function wFormatTagCodec(wFormatTag) {
wFormatTag = toUint8(wFormatTag);
if (bytesMatch(wFormatTag, [0x00, 0x55])) {
return 'mp3';
} else if (bytesMatch(wFormatTag, [0x16, 0x00]) || bytesMatch(wFormatTag, [0x00, 0xFF])) {
return 'aac';
} else if (bytesMatch(wFormatTag, [0x70, 0x4f])) {
return 'opus';
} else if (bytesMatch(wFormatTag, [0x6C, 0x61])) {
return 'alac';
} else if (bytesMatch(wFormatTag, [0xF1, 0xAC])) {
return 'flac';
} else if (bytesMatch(wFormatTag, [0x20, 0x00])) {
return 'ac-3';
} else if (bytesMatch(wFormatTag, [0xFF, 0xFE])) {
return 'ec-3';
} else if (bytesMatch(wFormatTag, [0x00, 0x50])) {
return 'mp2';
} else if (bytesMatch(wFormatTag, [0x56, 0x6f])) {
return 'vorbis';
} else if (bytesMatch(wFormatTag, [0xA1, 0x09])) {
return 'speex';
}
return '';
};
var formatMimetype = function formatMimetype(name, codecs) {
var codecString = ['video', 'audio'].reduce(function (acc, type) {
if (codecs[type]) {
acc += (acc.length ? ',' : '') + codecs[type];
}
return acc;
}, '');
return (codecs.video ? 'video' : 'audio') + "/" + name + (codecString ? ";codecs=\"" + codecString + "\"" : '');
};
var parseCodecFrom = {
mov: function mov(bytes) {
// mov and mp4 both use a nearly identical box structure.
var retval = parseCodecFrom.mp4(bytes);
if (retval.mimetype) {
retval.mimetype = retval.mimetype.replace('mp4', 'quicktime');
}
return retval;
},
mp4: function mp4(bytes) {
bytes = toUint8(bytes);
var codecs = {};
var tracks = parseMp4Tracks(bytes);
for (var i = 0; i < tracks.length; i++) {
var track = tracks[i];
if (track.type === 'audio' && !codecs.audio) {
codecs.audio = track.codec;
}
if (track.type === 'video' && !codecs.video) {
codecs.video = track.codec;
}
}
return {
codecs: codecs,
mimetype: formatMimetype('mp4', codecs)
};
},
'3gp': function gp(bytes) {
return {
codecs: {},
mimetype: 'video/3gpp'
};
},
ogg: function ogg(bytes) {
var pages = getPages(bytes, 0, 4);
var codecs = {};
pages.forEach(function (page) {
if (bytesMatch(page, [0x4F, 0x70, 0x75, 0x73], {
offset: 28
})) {
codecs.audio = 'opus';
} else if (bytesMatch(page, [0x56, 0x50, 0x38, 0x30], {
offset: 29
})) {
codecs.video = 'vp8';
} else if (bytesMatch(page, [0x74, 0x68, 0x65, 0x6F, 0x72, 0x61], {
offset: 29
})) {
codecs.video = 'theora';
} else if (bytesMatch(page, [0x46, 0x4C, 0x41, 0x43], {
offset: 29
})) {
codecs.audio = 'flac';
} else if (bytesMatch(page, [0x53, 0x70, 0x65, 0x65, 0x78], {
offset: 28
})) {
codecs.audio = 'speex';
} else if (bytesMatch(page, [0x76, 0x6F, 0x72, 0x62, 0x69, 0x73], {
offset: 29
})) {
codecs.audio = 'vorbis';
}
});
return {
codecs: codecs,
mimetype: formatMimetype('ogg', codecs)
};
},
wav: function wav(bytes) {
var format = findFourCC(bytes, ['WAVE', 'fmt'])[0];
var wFormatTag = Array.prototype.slice.call(format, 0, 2).reverse();
var mimetype = 'audio/vnd.wave';
var codecs = {
audio: wFormatTagCodec(wFormatTag)
};
var codecString = wFormatTag.reduce(function (acc, v) {
if (v) {
acc += toHexString(v);
}
return acc;
}, '');
if (codecString) {
mimetype += ";codec=" + codecString;
}
if (codecString && !codecs.audio) {
codecs.audio = codecString;
}
return {
codecs: codecs,
mimetype: mimetype
};
},
avi: function avi(bytes) {
var movi = findFourCC(bytes, ['AVI', 'movi'])[0];
var strls = findFourCC(bytes, ['AVI', 'hdrl', 'strl']);
var codecs = {};
strls.forEach(function (strl) {
var strh = findFourCC(strl, ['strh'])[0];
var strf = findFourCC(strl, ['strf'])[0]; // now parse AVIStreamHeader to get codec and type:
// https://docs.microsoft.com/en-us/previous-versions/windows/desktop/api/avifmt/ns-avifmt-avistreamheader
var type = bytesToString(strh.subarray(0, 4));
var codec;
var codecType;
if (type === 'vids') {
// https://docs.microsoft.com/en-us/windows/win32/api/wingdi/ns-wingdi-bitmapinfoheader
var handler = bytesToString(strh.subarray(4, 8));
var compression = bytesToString(strf.subarray(16, 20)); // look for 00dc (compressed video fourcc code) or 00db (uncompressed video fourcc code)
var videoData = findFourCC(movi, ['00dc'])[0] || findFourCC(movi, ['00db'][0]);
if (handler === 'H264' || compression === 'H264') {
if (videoData && videoData.length) {
codec = parseCodecFrom.h264(videoData).codecs.video;
} else {
codec = 'avc1';
}
} else if (handler === 'HEVC' || compression === 'HEVC') {
if (videoData && videoData.length) {
codec = parseCodecFrom.h265(videoData).codecs.video;
} else {
codec = 'hev1';
}
} else if (handler === 'FMP4' || compression === 'FMP4') {
if (movi.length) {
codec = 'mp4v.20.' + movi[12].toString();
} else {
codec = 'mp4v.20';
}
} else if (handler === 'VP80' || compression === 'VP80') {
codec = 'vp8';
} else if (handler === 'VP90' || compression === 'VP90') {
codec = 'vp9';
} else if (handler === 'AV01' || compression === 'AV01') {
codec = 'av01';
} else if (handler === 'theo' || compression === 'theora') {
codec = 'theora';
} else {
if (videoData && videoData.length) {
var result = detectContainerForBytes(videoData);
if (result === 'h264') {
codec = parseCodecFrom.h264(movi).codecs.video;
}
if (result === 'h265') {
codec = parseCodecFrom.h265(movi).codecs.video;
}
}
if (!codec) {
codec = handler || compression;
}
}
codecType = 'video';
} else if (type === 'auds') {
codecType = 'audio'; // look for 00wb (audio data fourcc)
// const audioData = findFourCC(movi, ['01wb']);
var wFormatTag = Array.prototype.slice.call(strf, 0, 2).reverse();
codecs.audio = wFormatTagCodec(wFormatTag);
} else {
return;
}
if (codec) {
codecs[codecType] = codec;
}
});
return {
codecs: codecs,
mimetype: formatMimetype('avi', codecs)
};
},
ts: function ts(bytes) {
var result = parseTs(bytes, 2);
var codecs = {};
Object.keys(result.streams).forEach(function (esPid) {
var stream = result.streams[esPid];
if (stream.codec === 'avc1' && stream.packets.length) {
stream.codec = parseCodecFrom.h264(stream.packets[0]).codecs.video;
} else if (stream.codec === 'hev1' && stream.packets.length) {
stream.codec = parseCodecFrom.h265(stream.packets[0]).codecs.video;
}
codecs[stream.type] = stream.codec;
});
return {
codecs: codecs,
mimetype: formatMimetype('mp2t', codecs)
};
},
webm: function webm(bytes) {
// mkv and webm both use ebml to store code info
var retval = parseCodecFrom.mkv(bytes);
if (retval.mimetype) {
retval.mimetype = retval.mimetype.replace('x-matroska', 'webm');
}
return retval;
},
mkv: function mkv(bytes) {
var codecs = {};
var tracks = parseEbmlTracks(bytes);
for (var i = 0; i < tracks.length; i++) {
var track = tracks[i];
if (track.type === 'audio' && !codecs.audio) {
codecs.audio = track.codec;
}
if (track.type === 'video' && !codecs.video) {
codecs.video = track.codec;
}
}
return {
codecs: codecs,
mimetype: formatMimetype('x-matroska', codecs)
};
},
aac: function aac(bytes) {
return {
codecs: {
audio: 'aac'
},
mimetype: 'audio/aac'
};
},
ac3: function ac3(bytes) {
// past id3 and syncword
var offset = getId3Offset(bytes) + 2; // default to ac-3
var codec = 'ac-3';
if (bytesMatch(bytes, [0xB8, 0xE0], {
offset: offset
})) {
codec = 'ac-3'; // 0x01, 0x7F
} else if (bytesMatch(bytes, [0x01, 0x7f], {
offset: offset
})) {
codec = 'ec-3';
}
return {
codecs: {
audio: codec
},
mimetype: 'audio/vnd.dolby.dd-raw'
};
},
mp3: function mp3(bytes) {
return {
codecs: {
audio: 'mp3'
},
mimetype: 'audio/mpeg'
};
},
flac: function flac(bytes) {
return {
codecs: {
audio: 'flac'
},
mimetype: 'audio/flac'
};
},
'h264': function h264(bytes) {
// find seq_parameter_set_rbsp to get encoding settings for codec
var nal = findH264Nal(bytes, 7, 3);
var retval = {
codecs: {
video: 'avc1'
},
mimetype: 'video/h264'
};
if (nal.length) {
retval.codecs.video += "." + getAvcCodec(nal);
}
return retval;
},
'h265': function h265(bytes) {
var retval = {
codecs: {
video: 'hev1'
},
mimetype: 'video/h265'
}; // find video_parameter_set_rbsp or seq_parameter_set_rbsp
// to get encoding settings for codec
var nal = findH265Nal(bytes, [32, 33], 3);
if (nal.length) {
var type = nal[0] >> 1 & 0x3F; // profile_tier_level starts at byte 5 for video_parameter_set_rbsp
// byte 2 for seq_parameter_set_rbsp
retval.codecs.video += "." + getHvcCodec(nal.subarray(type === 32 ? 5 : 2));
}
return retval;
}
};
export var parseFormatForBytes = function parseFormatForBytes(bytes) {
bytes = toUint8(bytes);
var result = {
codecs: {},
container: detectContainerForBytes(bytes),
mimetype: ''
};
var parseCodecFn = parseCodecFrom[result.container];
if (parseCodecFn) {
var parsed = parseCodecFn ? parseCodecFn(bytes) : {};
result.codecs = parsed.codecs || {};
result.mimetype = parsed.mimetype || '';
}
return result;
};

37
node_modules/@videojs/vhs-utils/es/id3-helpers.js generated vendored Normal file
View file

@ -0,0 +1,37 @@
import { toUint8, bytesMatch } from './byte-helpers.js';
var ID3 = toUint8([0x49, 0x44, 0x33]);
export var getId3Size = function getId3Size(bytes, offset) {
if (offset === void 0) {
offset = 0;
}
bytes = toUint8(bytes);
var flags = bytes[offset + 5];
var returnSize = bytes[offset + 6] << 21 | bytes[offset + 7] << 14 | bytes[offset + 8] << 7 | bytes[offset + 9];
var footerPresent = (flags & 16) >> 4;
if (footerPresent) {
return returnSize + 20;
}
return returnSize + 10;
};
export var getId3Offset = function getId3Offset(bytes, offset) {
if (offset === void 0) {
offset = 0;
}
bytes = toUint8(bytes);
if (bytes.length - offset < 10 || !bytesMatch(bytes, ID3, {
offset: offset
})) {
return offset;
}
offset += getId3Size(bytes, offset); // recursive check for id3 tags as some files
// have multiple ID3 tag sections even though
// they should not.
return getId3Offset(bytes, offset);
};

16
node_modules/@videojs/vhs-utils/es/index.js generated vendored Normal file
View file

@ -0,0 +1,16 @@
import * as codecs from './codecs';
import * as byteHelpers from './byte-helpers.js';
import * as containers from './containers.js';
import decodeB64ToUint8Array from './decode-b64-to-uint8-array.js';
import * as mediaGroups from './media-groups.js';
import resolveUrl from './resolve-url.js';
import Stream from './stream.js';
export default {
codecs: codecs,
byteHelpers: byteHelpers,
containers: containers,
decodeB64ToUint8Array: decodeB64ToUint8Array,
mediaGroups: mediaGroups,
resolveUrl: resolveUrl,
Stream: Stream
};

105
node_modules/@videojs/vhs-utils/es/m2ts-helpers.js generated vendored Normal file
View file

@ -0,0 +1,105 @@
import { bytesMatch, toUint8 } from './byte-helpers.js';
var SYNC_BYTE = 0x47;
export var parseTs = function parseTs(bytes, maxPes) {
if (maxPes === void 0) {
maxPes = Infinity;
}
bytes = toUint8(bytes);
var startIndex = 0;
var endIndex = 188;
var pmt = {};
var pesCount = 0;
while (endIndex < bytes.byteLength && pesCount < maxPes) {
if (bytes[startIndex] !== SYNC_BYTE && bytes[endIndex] !== SYNC_BYTE) {
endIndex += 1;
startIndex += 1;
continue;
}
var packet = bytes.subarray(startIndex, endIndex);
var pid = (packet[1] & 0x1f) << 8 | packet[2];
var hasPusi = !!(packet[1] & 0x40);
var hasAdaptationHeader = (packet[3] & 0x30) >>> 4 > 0x01;
var payloadOffset = 4 + (hasAdaptationHeader ? packet[4] + 1 : 0);
if (hasPusi) {
payloadOffset += packet[payloadOffset] + 1;
}
if (pid === 0 && !pmt.pid) {
pmt.pid = (packet[payloadOffset + 10] & 0x1f) << 8 | packet[payloadOffset + 11];
} else if (pmt.pid && pid === pmt.pid && !pmt.streams) {
var isNotForward = packet[payloadOffset + 5] & 0x01; // ignore forward pmt delarations
if (!isNotForward) {
continue;
}
pmt.streams = {};
var sectionLength = (packet[payloadOffset + 1] & 0x0f) << 8 | packet[payloadOffset + 2];
var tableEnd = 3 + sectionLength - 4;
var programInfoLength = (packet[payloadOffset + 10] & 0x0f) << 8 | packet[payloadOffset + 11];
var offset = 12 + programInfoLength;
while (offset < tableEnd) {
// add an entry that maps the elementary_pid to the stream_type
var i = payloadOffset + offset;
var type = packet[i];
var esPid = (packet[i + 1] & 0x1F) << 8 | packet[i + 2];
var esLength = (packet[i + 3] & 0x0f) << 8 | packet[i + 4];
var esInfo = packet.subarray(i + 5, i + 5 + esLength);
var stream = pmt.streams[esPid] = {
esInfo: esInfo,
typeNumber: type,
packets: [],
type: '',
codec: ''
};
if (type === 0x06 && bytesMatch(esInfo, [0x4F, 0x70, 0x75, 0x73], {
offset: 2
})) {
stream.type = 'audio';
stream.codec = 'opus';
} else if (type === 0x1B || type === 0x20) {
stream.type = 'video';
stream.codec = 'avc1';
} else if (type === 0x24) {
stream.type = 'video';
stream.codec = 'hev1';
} else if (type === 0x10) {
stream.type = 'video';
stream.codec = 'mp4v.20';
} else if (type === 0x0F) {
stream.type = 'audio';
stream.codec = 'aac';
} else if (type === 0x81) {
stream.type = 'audio';
stream.codec = 'ac-3';
} else if (type === 0x87) {
stream.type = 'audio';
stream.codec = 'ec-3';
} else if (type === 0x03 || type === 0x04) {
stream.type = 'audio';
stream.codec = 'mp3';
}
offset += esLength + 5;
}
} else if (pmt.pid && pmt.streams) {
pmt.streams[pid].packets.push(packet.subarray(payloadOffset));
pesCount++;
}
startIndex += 188;
endIndex += 188;
}
if (!pmt.streams) {
pmt.streams = {};
}
return pmt;
};

21
node_modules/@videojs/vhs-utils/es/media-groups.js generated vendored Normal file
View file

@ -0,0 +1,21 @@
/**
* Loops through all supported media groups in master and calls the provided
* callback for each group
*
* @param {Object} master
* The parsed master manifest object
* @param {string[]} groups
* The media groups to call the callback for
* @param {Function} callback
* Callback to call for each media group
*/
export var forEachMediaGroup = function forEachMediaGroup(master, groups, callback) {
groups.forEach(function (mediaType) {
for (var groupKey in master.mediaGroups[mediaType]) {
for (var labelKey in master.mediaGroups[mediaType][groupKey]) {
var mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
callback(mediaProperties, mediaType, groupKey, labelKey);
}
}
});
};

36
node_modules/@videojs/vhs-utils/es/media-types.js generated vendored Normal file
View file

@ -0,0 +1,36 @@
var MPEGURL_REGEX = /^(audio|video|application)\/(x-|vnd\.apple\.)?mpegurl/i;
var DASH_REGEX = /^application\/dash\+xml/i;
/**
* Returns a string that describes the type of source based on a video source object's
* media type.
*
* @see {@link https://dev.w3.org/html5/pf-summary/video.html#dom-source-type|Source Type}
*
* @param {string} type
* Video source object media type
* @return {('hls'|'dash'|'vhs-json'|null)}
* VHS source type string
*/
export var simpleTypeFromSourceType = function simpleTypeFromSourceType(type) {
if (MPEGURL_REGEX.test(type)) {
return 'hls';
}
if (DASH_REGEX.test(type)) {
return 'dash';
} // Denotes the special case of a manifest object passed to http-streaming instead of a
// source URL.
//
// See https://en.wikipedia.org/wiki/Media_type for details on specifying media types.
//
// In this case, vnd stands for vendor, video.js for the organization, VHS for this
// project, and the +json suffix identifies the structure of the media type.
if (type === 'application/vnd.videojs.vhs+json') {
return 'vhs-json';
}
return null;
};

553
node_modules/@videojs/vhs-utils/es/mp4-helpers.js generated vendored Normal file
View file

@ -0,0 +1,553 @@
import { stringToBytes, toUint8, bytesMatch, bytesToString, toHexString, padStart, bytesToNumber } from './byte-helpers.js';
import { getAvcCodec, getHvcCodec, getAv1Codec } from './codec-helpers.js';
import { parseOpusHead } from './opus-helpers.js';
var normalizePath = function normalizePath(path) {
if (typeof path === 'string') {
return stringToBytes(path);
}
if (typeof path === 'number') {
return path;
}
return path;
};
var normalizePaths = function normalizePaths(paths) {
if (!Array.isArray(paths)) {
return [normalizePath(paths)];
}
return paths.map(function (p) {
return normalizePath(p);
});
};
var DESCRIPTORS;
export var parseDescriptors = function parseDescriptors(bytes) {
bytes = toUint8(bytes);
var results = [];
var i = 0;
while (bytes.length > i) {
var tag = bytes[i];
var size = 0;
var headerSize = 0; // tag
headerSize++;
var byte = bytes[headerSize]; // first byte
headerSize++;
while (byte & 0x80) {
size = (byte & 0x7F) << 7;
byte = bytes[headerSize];
headerSize++;
}
size += byte & 0x7F;
for (var z = 0; z < DESCRIPTORS.length; z++) {
var _DESCRIPTORS$z = DESCRIPTORS[z],
id = _DESCRIPTORS$z.id,
parser = _DESCRIPTORS$z.parser;
if (tag === id) {
results.push(parser(bytes.subarray(headerSize, headerSize + size)));
break;
}
}
i += size + headerSize;
}
return results;
};
DESCRIPTORS = [{
id: 0x03,
parser: function parser(bytes) {
var desc = {
tag: 0x03,
id: bytes[0] << 8 | bytes[1],
flags: bytes[2],
size: 3,
dependsOnEsId: 0,
ocrEsId: 0,
descriptors: [],
url: ''
}; // depends on es id
if (desc.flags & 0x80) {
desc.dependsOnEsId = bytes[desc.size] << 8 | bytes[desc.size + 1];
desc.size += 2;
} // url
if (desc.flags & 0x40) {
var len = bytes[desc.size];
desc.url = bytesToString(bytes.subarray(desc.size + 1, desc.size + 1 + len));
desc.size += len;
} // ocr es id
if (desc.flags & 0x20) {
desc.ocrEsId = bytes[desc.size] << 8 | bytes[desc.size + 1];
desc.size += 2;
}
desc.descriptors = parseDescriptors(bytes.subarray(desc.size)) || [];
return desc;
}
}, {
id: 0x04,
parser: function parser(bytes) {
// DecoderConfigDescriptor
var desc = {
tag: 0x04,
oti: bytes[0],
streamType: bytes[1],
bufferSize: bytes[2] << 16 | bytes[3] << 8 | bytes[4],
maxBitrate: bytes[5] << 24 | bytes[6] << 16 | bytes[7] << 8 | bytes[8],
avgBitrate: bytes[9] << 24 | bytes[10] << 16 | bytes[11] << 8 | bytes[12],
descriptors: parseDescriptors(bytes.subarray(13))
};
return desc;
}
}, {
id: 0x05,
parser: function parser(bytes) {
// DecoderSpecificInfo
return {
tag: 0x05,
bytes: bytes
};
}
}, {
id: 0x06,
parser: function parser(bytes) {
// SLConfigDescriptor
return {
tag: 0x06,
bytes: bytes
};
}
}];
/**
* find any number of boxes by name given a path to it in an iso bmff
* such as mp4.
*
* @param {TypedArray} bytes
* bytes for the iso bmff to search for boxes in
*
* @param {Uint8Array[]|string[]|string|Uint8Array} name
* An array of paths or a single path representing the name
* of boxes to search through in bytes. Paths may be
* uint8 (character codes) or strings.
*
* @param {boolean} [complete=false]
* Should we search only for complete boxes on the final path.
* This is very useful when you do not want to get back partial boxes
* in the case of streaming files.
*
* @return {Uint8Array[]}
* An array of the end paths that we found.
*/
export var findBox = function findBox(bytes, paths, complete) {
if (complete === void 0) {
complete = false;
}
paths = normalizePaths(paths);
bytes = toUint8(bytes);
var results = [];
if (!paths.length) {
// short-circuit the search for empty paths
return results;
}
var i = 0;
while (i < bytes.length) {
var size = (bytes[i] << 24 | bytes[i + 1] << 16 | bytes[i + 2] << 8 | bytes[i + 3]) >>> 0;
var type = bytes.subarray(i + 4, i + 8); // invalid box format.
if (size === 0) {
break;
}
var end = i + size;
if (end > bytes.length) {
// this box is bigger than the number of bytes we have
// and complete is set, we cannot find any more boxes.
if (complete) {
break;
}
end = bytes.length;
}
var data = bytes.subarray(i + 8, end);
if (bytesMatch(type, paths[0])) {
if (paths.length === 1) {
// this is the end of the path and we've found the box we were
// looking for
results.push(data);
} else {
// recursively search for the next box along the path
results.push.apply(results, findBox(data, paths.slice(1), complete));
}
}
i = end;
} // we've finished searching all of bytes
return results;
};
/**
* Search for a single matching box by name in an iso bmff format like
* mp4. This function is useful for finding codec boxes which
* can be placed arbitrarily in sample descriptions depending
* on the version of the file or file type.
*
* @param {TypedArray} bytes
* bytes for the iso bmff to search for boxes in
*
* @param {string|Uint8Array} name
* The name of the box to find.
*
* @return {Uint8Array[]}
* a subarray of bytes representing the name boxed we found.
*/
export var findNamedBox = function findNamedBox(bytes, name) {
name = normalizePath(name);
if (!name.length) {
// short-circuit the search for empty paths
return bytes.subarray(bytes.length);
}
var i = 0;
while (i < bytes.length) {
if (bytesMatch(bytes.subarray(i, i + name.length), name)) {
var size = (bytes[i - 4] << 24 | bytes[i - 3] << 16 | bytes[i - 2] << 8 | bytes[i - 1]) >>> 0;
var end = size > 1 ? i + size : bytes.byteLength;
return bytes.subarray(i + 4, end);
}
i++;
} // we've finished searching all of bytes
return bytes.subarray(bytes.length);
};
var parseSamples = function parseSamples(data, entrySize, parseEntry) {
if (entrySize === void 0) {
entrySize = 4;
}
if (parseEntry === void 0) {
parseEntry = function parseEntry(d) {
return bytesToNumber(d);
};
}
var entries = [];
if (!data || !data.length) {
return entries;
}
var entryCount = bytesToNumber(data.subarray(4, 8));
for (var i = 8; entryCount; i += entrySize, entryCount--) {
entries.push(parseEntry(data.subarray(i, i + entrySize)));
}
return entries;
};
export var buildFrameTable = function buildFrameTable(stbl, timescale) {
var keySamples = parseSamples(findBox(stbl, ['stss'])[0]);
var chunkOffsets = parseSamples(findBox(stbl, ['stco'])[0]);
var timeToSamples = parseSamples(findBox(stbl, ['stts'])[0], 8, function (entry) {
return {
sampleCount: bytesToNumber(entry.subarray(0, 4)),
sampleDelta: bytesToNumber(entry.subarray(4, 8))
};
});
var samplesToChunks = parseSamples(findBox(stbl, ['stsc'])[0], 12, function (entry) {
return {
firstChunk: bytesToNumber(entry.subarray(0, 4)),
samplesPerChunk: bytesToNumber(entry.subarray(4, 8)),
sampleDescriptionIndex: bytesToNumber(entry.subarray(8, 12))
};
});
var stsz = findBox(stbl, ['stsz'])[0]; // stsz starts with a 4 byte sampleSize which we don't need
var sampleSizes = parseSamples(stsz && stsz.length && stsz.subarray(4) || null);
var frames = [];
for (var chunkIndex = 0; chunkIndex < chunkOffsets.length; chunkIndex++) {
var samplesInChunk = void 0;
for (var i = 0; i < samplesToChunks.length; i++) {
var sampleToChunk = samplesToChunks[i];
var isThisOne = chunkIndex + 1 >= sampleToChunk.firstChunk && (i + 1 >= samplesToChunks.length || chunkIndex + 1 < samplesToChunks[i + 1].firstChunk);
if (isThisOne) {
samplesInChunk = sampleToChunk.samplesPerChunk;
break;
}
}
var chunkOffset = chunkOffsets[chunkIndex];
for (var _i = 0; _i < samplesInChunk; _i++) {
var frameEnd = sampleSizes[frames.length]; // if we don't have key samples every frame is a keyframe
var keyframe = !keySamples.length;
if (keySamples.length && keySamples.indexOf(frames.length + 1) !== -1) {
keyframe = true;
}
var frame = {
keyframe: keyframe,
start: chunkOffset,
end: chunkOffset + frameEnd
};
for (var k = 0; k < timeToSamples.length; k++) {
var _timeToSamples$k = timeToSamples[k],
sampleCount = _timeToSamples$k.sampleCount,
sampleDelta = _timeToSamples$k.sampleDelta;
if (frames.length <= sampleCount) {
// ms to ns
var lastTimestamp = frames.length ? frames[frames.length - 1].timestamp : 0;
frame.timestamp = lastTimestamp + sampleDelta / timescale * 1000;
frame.duration = sampleDelta;
break;
}
}
frames.push(frame);
chunkOffset += frameEnd;
}
}
return frames;
};
export var addSampleDescription = function addSampleDescription(track, bytes) {
var codec = bytesToString(bytes.subarray(0, 4));
if (track.type === 'video') {
track.info = track.info || {};
track.info.width = bytes[28] << 8 | bytes[29];
track.info.height = bytes[30] << 8 | bytes[31];
} else if (track.type === 'audio') {
track.info = track.info || {};
track.info.channels = bytes[20] << 8 | bytes[21];
track.info.bitDepth = bytes[22] << 8 | bytes[23];
track.info.sampleRate = bytes[28] << 8 | bytes[29];
}
if (codec === 'avc1') {
var avcC = findNamedBox(bytes, 'avcC'); // AVCDecoderConfigurationRecord
codec += "." + getAvcCodec(avcC);
track.info.avcC = avcC; // TODO: do we need to parse all this?
/* {
configurationVersion: avcC[0],
profile: avcC[1],
profileCompatibility: avcC[2],
level: avcC[3],
lengthSizeMinusOne: avcC[4] & 0x3
};
let spsNalUnitCount = avcC[5] & 0x1F;
const spsNalUnits = track.info.avc.spsNalUnits = [];
// past spsNalUnitCount
let offset = 6;
while (spsNalUnitCount--) {
const nalLen = avcC[offset] << 8 | avcC[offset + 1];
spsNalUnits.push(avcC.subarray(offset + 2, offset + 2 + nalLen));
offset += nalLen + 2;
}
let ppsNalUnitCount = avcC[offset];
const ppsNalUnits = track.info.avc.ppsNalUnits = [];
// past ppsNalUnitCount
offset += 1;
while (ppsNalUnitCount--) {
const nalLen = avcC[offset] << 8 | avcC[offset + 1];
ppsNalUnits.push(avcC.subarray(offset + 2, offset + 2 + nalLen));
offset += nalLen + 2;
}*/
// HEVCDecoderConfigurationRecord
} else if (codec === 'hvc1' || codec === 'hev1') {
codec += "." + getHvcCodec(findNamedBox(bytes, 'hvcC'));
} else if (codec === 'mp4a' || codec === 'mp4v') {
var esds = findNamedBox(bytes, 'esds');
var esDescriptor = parseDescriptors(esds.subarray(4))[0];
var decoderConfig = esDescriptor && esDescriptor.descriptors.filter(function (_ref) {
var tag = _ref.tag;
return tag === 0x04;
})[0];
if (decoderConfig) {
// most codecs do not have a further '.'
// such as 0xa5 for ac-3 and 0xa6 for e-ac-3
codec += '.' + toHexString(decoderConfig.oti);
if (decoderConfig.oti === 0x40) {
codec += '.' + (decoderConfig.descriptors[0].bytes[0] >> 3).toString();
} else if (decoderConfig.oti === 0x20) {
codec += '.' + decoderConfig.descriptors[0].bytes[4].toString();
} else if (decoderConfig.oti === 0xdd) {
codec = 'vorbis';
}
} else if (track.type === 'audio') {
codec += '.40.2';
} else {
codec += '.20.9';
}
} else if (codec === 'av01') {
// AV1DecoderConfigurationRecord
codec += "." + getAv1Codec(findNamedBox(bytes, 'av1C'));
} else if (codec === 'vp09') {
// VPCodecConfigurationRecord
var vpcC = findNamedBox(bytes, 'vpcC'); // https://www.webmproject.org/vp9/mp4/
var profile = vpcC[0];
var level = vpcC[1];
var bitDepth = vpcC[2] >> 4;
var chromaSubsampling = (vpcC[2] & 0x0F) >> 1;
var videoFullRangeFlag = (vpcC[2] & 0x0F) >> 3;
var colourPrimaries = vpcC[3];
var transferCharacteristics = vpcC[4];
var matrixCoefficients = vpcC[5];
codec += "." + padStart(profile, 2, '0');
codec += "." + padStart(level, 2, '0');
codec += "." + padStart(bitDepth, 2, '0');
codec += "." + padStart(chromaSubsampling, 2, '0');
codec += "." + padStart(colourPrimaries, 2, '0');
codec += "." + padStart(transferCharacteristics, 2, '0');
codec += "." + padStart(matrixCoefficients, 2, '0');
codec += "." + padStart(videoFullRangeFlag, 2, '0');
} else if (codec === 'theo') {
codec = 'theora';
} else if (codec === 'spex') {
codec = 'speex';
} else if (codec === '.mp3') {
codec = 'mp4a.40.34';
} else if (codec === 'msVo') {
codec = 'vorbis';
} else if (codec === 'Opus') {
codec = 'opus';
var dOps = findNamedBox(bytes, 'dOps');
track.info.opus = parseOpusHead(dOps); // TODO: should this go into the webm code??
// Firefox requires a codecDelay for opus playback
// see https://bugzilla.mozilla.org/show_bug.cgi?id=1276238
track.info.codecDelay = 6500000;
} else {
codec = codec.toLowerCase();
}
/* eslint-enable */
// flac, ac-3, ec-3, opus
track.codec = codec;
};
export var parseTracks = function parseTracks(bytes, frameTable) {
if (frameTable === void 0) {
frameTable = true;
}
bytes = toUint8(bytes);
var traks = findBox(bytes, ['moov', 'trak'], true);
var tracks = [];
traks.forEach(function (trak) {
var track = {
bytes: trak
};
var mdia = findBox(trak, ['mdia'])[0];
var hdlr = findBox(mdia, ['hdlr'])[0];
var trakType = bytesToString(hdlr.subarray(8, 12));
if (trakType === 'soun') {
track.type = 'audio';
} else if (trakType === 'vide') {
track.type = 'video';
} else {
track.type = trakType;
}
var tkhd = findBox(trak, ['tkhd'])[0];
if (tkhd) {
var view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
var tkhdVersion = view.getUint8(0);
track.number = tkhdVersion === 0 ? view.getUint32(12) : view.getUint32(20);
}
var mdhd = findBox(mdia, ['mdhd'])[0];
if (mdhd) {
// mdhd is a FullBox, meaning it will have its own version as the first byte
var version = mdhd[0];
var index = version === 0 ? 12 : 20;
track.timescale = (mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]) >>> 0;
}
var stbl = findBox(mdia, ['minf', 'stbl'])[0];
var stsd = findBox(stbl, ['stsd'])[0];
var descriptionCount = bytesToNumber(stsd.subarray(4, 8));
var offset = 8; // add codec and codec info
while (descriptionCount--) {
var len = bytesToNumber(stsd.subarray(offset, offset + 4));
var sampleDescriptor = stsd.subarray(offset + 4, offset + 4 + len);
addSampleDescription(track, sampleDescriptor);
offset += 4 + len;
}
if (frameTable) {
track.frameTable = buildFrameTable(stbl, track.timescale);
} // codec has no sub parameters
tracks.push(track);
});
return tracks;
};
export var parseMediaInfo = function parseMediaInfo(bytes) {
var mvhd = findBox(bytes, ['moov', 'mvhd'], true)[0];
if (!mvhd || !mvhd.length) {
return;
}
var info = {}; // ms to ns
// mvhd v1 has 8 byte duration and other fields too
if (mvhd[0] === 1) {
info.timestampScale = bytesToNumber(mvhd.subarray(20, 24));
info.duration = bytesToNumber(mvhd.subarray(24, 32));
} else {
info.timestampScale = bytesToNumber(mvhd.subarray(12, 16));
info.duration = bytesToNumber(mvhd.subarray(16, 20));
}
info.bytes = mvhd;
return info;
};

112
node_modules/@videojs/vhs-utils/es/nal-helpers.js generated vendored Normal file
View file

@ -0,0 +1,112 @@
import { bytesMatch, toUint8 } from './byte-helpers.js';
export var NAL_TYPE_ONE = toUint8([0x00, 0x00, 0x00, 0x01]);
export var NAL_TYPE_TWO = toUint8([0x00, 0x00, 0x01]);
export var EMULATION_PREVENTION = toUint8([0x00, 0x00, 0x03]);
/**
* Expunge any "Emulation Prevention" bytes from a "Raw Byte
* Sequence Payload"
*
* @param data {Uint8Array} the bytes of a RBSP from a NAL
* unit
* @return {Uint8Array} the RBSP without any Emulation
* Prevention Bytes
*/
export var discardEmulationPreventionBytes = function discardEmulationPreventionBytes(bytes) {
var positions = [];
var i = 1; // Find all `Emulation Prevention Bytes`
while (i < bytes.length - 2) {
if (bytesMatch(bytes.subarray(i, i + 3), EMULATION_PREVENTION)) {
positions.push(i + 2);
i++;
}
i++;
} // If no Emulation Prevention Bytes were found just return the original
// array
if (positions.length === 0) {
return bytes;
} // Create a new array to hold the NAL unit data
var newLength = bytes.length - positions.length;
var newData = new Uint8Array(newLength);
var sourceIndex = 0;
for (i = 0; i < newLength; sourceIndex++, i++) {
if (sourceIndex === positions[0]) {
// Skip this byte
sourceIndex++; // Remove this position index
positions.shift();
}
newData[i] = bytes[sourceIndex];
}
return newData;
};
export var findNal = function findNal(bytes, dataType, types, nalLimit) {
if (nalLimit === void 0) {
nalLimit = Infinity;
}
bytes = toUint8(bytes);
types = [].concat(types);
var i = 0;
var nalStart;
var nalsFound = 0; // keep searching until:
// we reach the end of bytes
// we reach the maximum number of nals they want to seach
// NOTE: that we disregard nalLimit when we have found the start
// of the nal we want so that we can find the end of the nal we want.
while (i < bytes.length && (nalsFound < nalLimit || nalStart)) {
var nalOffset = void 0;
if (bytesMatch(bytes.subarray(i), NAL_TYPE_ONE)) {
nalOffset = 4;
} else if (bytesMatch(bytes.subarray(i), NAL_TYPE_TWO)) {
nalOffset = 3;
} // we are unsynced,
// find the next nal unit
if (!nalOffset) {
i++;
continue;
}
nalsFound++;
if (nalStart) {
return discardEmulationPreventionBytes(bytes.subarray(nalStart, i));
}
var nalType = void 0;
if (dataType === 'h264') {
nalType = bytes[i + nalOffset] & 0x1f;
} else if (dataType === 'h265') {
nalType = bytes[i + nalOffset] >> 1 & 0x3f;
}
if (types.indexOf(nalType) !== -1) {
nalStart = i + nalOffset;
} // nal header is 1 length for h264, and 2 for h265
i += nalOffset + (dataType === 'h264' ? 1 : 2);
}
return bytes.subarray(0, 0);
};
export var findH264Nal = function findH264Nal(bytes, type, nalLimit) {
return findNal(bytes, 'h264', type, nalLimit);
};
export var findH265Nal = function findH265Nal(bytes, type, nalLimit) {
return findNal(bytes, 'h265', type, nalLimit);
};

28
node_modules/@videojs/vhs-utils/es/ogg-helpers.js generated vendored Normal file
View file

@ -0,0 +1,28 @@
import { bytesMatch, toUint8 } from './byte-helpers';
var SYNC_WORD = toUint8([0x4f, 0x67, 0x67, 0x53]);
export var getPages = function getPages(bytes, start, end) {
if (end === void 0) {
end = Infinity;
}
bytes = toUint8(bytes);
var pages = [];
var i = 0;
while (i < bytes.length && pages.length < end) {
// we are unsynced,
// find the next syncword
if (!bytesMatch(bytes, SYNC_WORD, {
offset: i
})) {
i++;
continue;
}
var segmentLength = bytes[i + 27];
pages.push(bytes.subarray(i, i + 28 + segmentLength));
i += pages[pages.length - 1].length;
}
return pages.slice(start, end);
};

52
node_modules/@videojs/vhs-utils/es/opus-helpers.js generated vendored Normal file
View file

@ -0,0 +1,52 @@
export var OPUS_HEAD = new Uint8Array([// O, p, u, s
0x4f, 0x70, 0x75, 0x73, // H, e, a, d
0x48, 0x65, 0x61, 0x64]); // https://wiki.xiph.org/OggOpus
// https://vfrmaniac.fushizen.eu/contents/opus_in_isobmff.html
// https://opus-codec.org/docs/opusfile_api-0.7/structOpusHead.html
export var parseOpusHead = function parseOpusHead(bytes) {
var view = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
var version = view.getUint8(0); // version 0, from mp4, does not use littleEndian.
var littleEndian = version !== 0;
var config = {
version: version,
channels: view.getUint8(1),
preSkip: view.getUint16(2, littleEndian),
sampleRate: view.getUint32(4, littleEndian),
outputGain: view.getUint16(8, littleEndian),
channelMappingFamily: view.getUint8(10)
};
if (config.channelMappingFamily > 0 && bytes.length > 10) {
config.streamCount = view.getUint8(11);
config.twoChannelStreamCount = view.getUint8(12);
config.channelMapping = [];
for (var c = 0; c < config.channels; c++) {
config.channelMapping.push(view.getUint8(13 + c));
}
}
return config;
};
export var setOpusHead = function setOpusHead(config) {
var size = config.channelMappingFamily <= 0 ? 11 : 12 + config.channels;
var view = new DataView(new ArrayBuffer(size));
var littleEndian = config.version !== 0;
view.setUint8(0, config.version);
view.setUint8(1, config.channels);
view.setUint16(2, config.preSkip, littleEndian);
view.setUint32(4, config.sampleRate, littleEndian);
view.setUint16(8, config.outputGain, littleEndian);
view.setUint8(10, config.channelMappingFamily);
if (config.channelMappingFamily > 0) {
view.setUint8(11, config.streamCount);
config.channelMapping.foreach(function (cm, i) {
view.setUint8(12 + i, cm);
});
}
return new Uint8Array(view.buffer);
};

47
node_modules/@videojs/vhs-utils/es/resolve-url.js generated vendored Normal file
View file

@ -0,0 +1,47 @@
import URLToolkit from 'url-toolkit';
import window from 'global/window';
var DEFAULT_LOCATION = 'http://example.com';
var resolveUrl = function resolveUrl(baseUrl, relativeUrl) {
// return early if we don't need to resolve
if (/^[a-z]+:/i.test(relativeUrl)) {
return relativeUrl;
} // if baseUrl is a data URI, ignore it and resolve everything relative to window.location
if (/^data:/.test(baseUrl)) {
baseUrl = window.location && window.location.href || '';
} // IE11 supports URL but not the URL constructor
// feature detect the behavior we want
var nativeURL = typeof window.URL === 'function';
var protocolLess = /^\/\//.test(baseUrl); // remove location if window.location isn't available (i.e. we're in node)
// and if baseUrl isn't an absolute url
var removeLocation = !window.location && !/\/\//i.test(baseUrl); // if the base URL is relative then combine with the current location
if (nativeURL) {
baseUrl = new window.URL(baseUrl, window.location || DEFAULT_LOCATION);
} else if (!/\/\//i.test(baseUrl)) {
baseUrl = URLToolkit.buildAbsoluteURL(window.location && window.location.href || '', baseUrl);
}
if (nativeURL) {
var newUrl = new URL(relativeUrl, baseUrl); // if we're a protocol-less url, remove the protocol
// and if we're location-less, remove the location
// otherwise, return the url unmodified
if (removeLocation) {
return newUrl.href.slice(DEFAULT_LOCATION.length);
} else if (protocolLess) {
return newUrl.href.slice(newUrl.protocol.length);
}
return newUrl.href;
}
return URLToolkit.buildAbsoluteURL(baseUrl, relativeUrl);
};
export default resolveUrl;

74
node_modules/@videojs/vhs-utils/es/riff-helpers.js generated vendored Normal file
View file

@ -0,0 +1,74 @@
import { toUint8, stringToBytes, bytesMatch } from './byte-helpers.js';
var CONSTANTS = {
LIST: toUint8([0x4c, 0x49, 0x53, 0x54]),
RIFF: toUint8([0x52, 0x49, 0x46, 0x46]),
WAVE: toUint8([0x57, 0x41, 0x56, 0x45])
};
var normalizePath = function normalizePath(path) {
if (typeof path === 'string') {
return stringToBytes(path);
}
if (typeof path === 'number') {
return path;
}
return path;
};
var normalizePaths = function normalizePaths(paths) {
if (!Array.isArray(paths)) {
return [normalizePath(paths)];
}
return paths.map(function (p) {
return normalizePath(p);
});
};
export var findFourCC = function findFourCC(bytes, paths) {
paths = normalizePaths(paths);
bytes = toUint8(bytes);
var results = [];
if (!paths.length) {
// short-circuit the search for empty paths
return results;
}
var i = 0;
while (i < bytes.length) {
var type = bytes.subarray(i, i + 4);
var size = (bytes[i + 7] << 24 | bytes[i + 6] << 16 | bytes[i + 5] << 8 | bytes[i + 4]) >>> 0; // skip LIST/RIFF and get the actual type
if (bytesMatch(type, CONSTANTS.LIST) || bytesMatch(type, CONSTANTS.RIFF) || bytesMatch(type, CONSTANTS.WAVE)) {
type = bytes.subarray(i + 8, i + 12);
i += 4;
size -= 4;
}
var data = bytes.subarray(i + 8, i + 8 + size);
if (bytesMatch(type, paths[0])) {
if (paths.length === 1) {
// this is the end of the path and we've found the box we were
// looking for
results.push(data);
} else {
// recursively search for the next box along the path
var subresults = findFourCC(data, paths.slice(1));
if (subresults.length) {
results = results.concat(subresults);
}
}
}
i += 8 + data.length;
} // we've finished searching all of bytes
return results;
};

121
node_modules/@videojs/vhs-utils/es/stream.js generated vendored Normal file
View file

@ -0,0 +1,121 @@
/**
* @file stream.js
*/
/**
* A lightweight readable stream implemention that handles event dispatching.
*
* @class Stream
*/
var Stream = /*#__PURE__*/function () {
function Stream() {
this.listeners = {};
}
/**
* Add a listener for a specified event type.
*
* @param {string} type the event name
* @param {Function} listener the callback to be invoked when an event of
* the specified type occurs
*/
var _proto = Stream.prototype;
_proto.on = function on(type, listener) {
if (!this.listeners[type]) {
this.listeners[type] = [];
}
this.listeners[type].push(listener);
}
/**
* Remove a listener for a specified event type.
*
* @param {string} type the event name
* @param {Function} listener a function previously registered for this
* type of event through `on`
* @return {boolean} if we could turn it off or not
*/
;
_proto.off = function off(type, listener) {
if (!this.listeners[type]) {
return false;
}
var index = this.listeners[type].indexOf(listener); // TODO: which is better?
// In Video.js we slice listener functions
// on trigger so that it does not mess up the order
// while we loop through.
//
// Here we slice on off so that the loop in trigger
// can continue using it's old reference to loop without
// messing up the order.
this.listeners[type] = this.listeners[type].slice(0);
this.listeners[type].splice(index, 1);
return index > -1;
}
/**
* Trigger an event of the specified type on this stream. Any additional
* arguments to this function are passed as parameters to event listeners.
*
* @param {string} type the event name
*/
;
_proto.trigger = function trigger(type) {
var callbacks = this.listeners[type];
if (!callbacks) {
return;
} // Slicing the arguments on every invocation of this method
// can add a significant amount of overhead. Avoid the
// intermediate object creation for the common case of a
// single callback argument
if (arguments.length === 2) {
var length = callbacks.length;
for (var i = 0; i < length; ++i) {
callbacks[i].call(this, arguments[1]);
}
} else {
var args = Array.prototype.slice.call(arguments, 1);
var _length = callbacks.length;
for (var _i = 0; _i < _length; ++_i) {
callbacks[_i].apply(this, args);
}
}
}
/**
* Destroys the stream and cleans up.
*/
;
_proto.dispose = function dispose() {
this.listeners = {};
}
/**
* Forwards all `data` events on this stream to the destination stream. The
* destination stream should provide a method `push` to receive the data
* events as they arrive.
*
* @param {Stream} destination the stream that will receive all `data` events
* @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
*/
;
_proto.pipe = function pipe(destination) {
this.on('data', function (data) {
destination.push(data);
});
};
return Stream;
}();
export { Stream as default };