First release

This commit is contained in:
Owen Quinlan 2021-07-02 19:29:34 +10:00
commit fa6c85266e
2339 changed files with 761050 additions and 0 deletions

272
node_modules/@videojs/vhs-utils/src/byte-helpers.js generated vendored Normal file
View file

@ -0,0 +1,272 @@
import window from 'global/window';
// const log2 = Math.log2 ? Math.log2 : (x) => (Math.log(x) / Math.log(2));
const repeat = function(str, len) {
let acc = '';
while (len--) {
acc += str;
}
return acc;
};
// count the number of bits it would take to represent a number
// we used to do this with log2 but BigInt does not support builtin math
// Math.ceil(log2(x));
export const countBits = (x) => x.toString(2).length;
// count the number of whole bytes it would take to represent a number
export const countBytes = (x) => Math.ceil(countBits(x) / 8);
export const padStart = (b, len, str = ' ') => (repeat(str, len) + b.toString()).slice(-len);
export const isTypedArray = (obj) => ArrayBuffer.isView(obj);
export const toUint8 = function(bytes) {
if (bytes instanceof Uint8Array) {
return bytes;
}
if (!Array.isArray(bytes) && !isTypedArray(bytes) && !(bytes instanceof ArrayBuffer)) {
// any non-number or NaN leads to empty uint8array
// eslint-disable-next-line
if (typeof bytes !== 'number' || (typeof bytes === 'number' && bytes !== bytes)) {
bytes = 0;
} else {
bytes = [bytes];
}
}
return new Uint8Array(
bytes && bytes.buffer || bytes,
bytes && bytes.byteOffset || 0,
bytes && bytes.byteLength || 0
);
};
export const toHexString = function(bytes) {
bytes = toUint8(bytes);
let str = '';
for (let i = 0; i < bytes.length; i++) {
str += padStart(bytes[i].toString(16), 2, '0');
}
return str;
};
export const toBinaryString = function(bytes) {
bytes = toUint8(bytes);
let str = '';
for (let i = 0; i < bytes.length; i++) {
str += padStart(bytes[i].toString(2), 8, '0');
}
return str;
};
const BigInt = window.BigInt || Number;
const BYTE_TABLE = [
BigInt('0x1'),
BigInt('0x100'),
BigInt('0x10000'),
BigInt('0x1000000'),
BigInt('0x100000000'),
BigInt('0x10000000000'),
BigInt('0x1000000000000'),
BigInt('0x100000000000000'),
BigInt('0x10000000000000000')
];
export const ENDIANNESS = (function() {
const a = new Uint16Array([0xFFCC]);
const b = new Uint8Array(a.buffer, a.byteOffset, a.byteLength);
if (b[0] === 0xFF) {
return 'big';
}
if (b[0] === 0xCC) {
return 'little';
}
return 'unknown';
})();
export const IS_BIG_ENDIAN = ENDIANNESS === 'big';
export const IS_LITTLE_ENDIAN = ENDIANNESS === 'little';
export const bytesToNumber = function(bytes, {signed = false, le = false} = {}) {
bytes = toUint8(bytes);
const fn = le ? 'reduce' : 'reduceRight';
const obj = bytes[fn] ? bytes[fn] : Array.prototype[fn];
let number = obj.call(bytes, function(total, byte, i) {
const exponent = le ? i : Math.abs(i + 1 - bytes.length);
return total + (BigInt(byte) * BYTE_TABLE[exponent]);
}, BigInt(0));
if (signed) {
const max = BYTE_TABLE[bytes.length] / BigInt(2) - BigInt(1);
number = BigInt(number);
if (number > max) {
number -= max;
number -= max;
number -= BigInt(2);
}
}
return Number(number);
};
export const numberToBytes = function(number, {le = false} = {}) {
// eslint-disable-next-line
if ((typeof number !== 'bigint' && typeof number !== 'number') || (typeof number === 'number' && number !== number)) {
number = 0;
}
number = BigInt(number);
const byteCount = countBytes(number);
const bytes = new Uint8Array(new ArrayBuffer(byteCount));
for (let i = 0; i < byteCount; i++) {
const byteIndex = le ? i : Math.abs(i + 1 - bytes.length);
bytes[byteIndex] = Number((number / BYTE_TABLE[i]) & BigInt(0xFF));
if (number < 0) {
bytes[byteIndex] = Math.abs(~bytes[byteIndex]);
bytes[byteIndex] -= i === 0 ? 1 : 2;
}
}
return bytes;
};
export const bytesToString = (bytes) => {
if (!bytes) {
return '';
}
// TODO: should toUint8 handle cases where we only have 8 bytes
// but report more since this is a Uint16+ Array?
bytes = Array.prototype.slice.call(bytes);
const string = String.fromCharCode.apply(null, toUint8(bytes));
try {
return decodeURIComponent(escape(string));
} catch (e) {
// if decodeURIComponent/escape fails, we are dealing with partial
// or full non string data. Just return the potentially garbled string.
}
return string;
};
export const stringToBytes = (string, stringIsBytes) => {
if (typeof string !== 'string' && string && typeof string.toString === 'function') {
string = string.toString();
}
if (typeof string !== 'string') {
return new Uint8Array();
}
// If the string already is bytes, we don't have to do this
// otherwise we do this so that we split multi length characters
// into individual bytes
if (!stringIsBytes) {
string = unescape(encodeURIComponent(string));
}
const view = new Uint8Array(string.length);
for (let i = 0; i < string.length; i++) {
view[i] = string.charCodeAt(i);
}
return view;
};
export const concatTypedArrays = (...buffers) => {
buffers = buffers.filter((b) => b && (b.byteLength || b.length) && typeof b !== 'string');
if (buffers.length <= 1) {
// for 0 length we will return empty uint8
// for 1 length we return the first uint8
return toUint8(buffers[0]);
}
const totalLen = buffers.reduce((total, buf, i) => total + (buf.byteLength || buf.length), 0);
const tempBuffer = new Uint8Array(totalLen);
let offset = 0;
buffers.forEach(function(buf) {
buf = toUint8(buf);
tempBuffer.set(buf, offset);
offset += buf.byteLength;
});
return tempBuffer;
};
/**
* Check if the bytes "b" are contained within bytes "a".
*
* @param {Uint8Array|Array} a
* Bytes to check in
*
* @param {Uint8Array|Array} b
* Bytes to check for
*
* @param {Object} options
* options
*
* @param {Array|Uint8Array} [offset=0]
* offset to use when looking at bytes in a
*
* @param {Array|Uint8Array} [mask=[]]
* mask to use on bytes before comparison.
*
* @return {boolean}
* If all bytes in b are inside of a, taking into account
* bit masks.
*/
export const bytesMatch = (a, b, {offset = 0, mask = []} = {}) => {
a = toUint8(a);
b = toUint8(b);
// ie 11 does not support uint8 every
const fn = b.every ? b.every : Array.prototype.every;
return b.length &&
a.length - offset >= b.length &&
// ie 11 doesn't support every on uin8
fn.call(b, (bByte, i) => {
const aByte = (mask[i] ? (mask[i] & a[offset + i]) : a[offset + i]);
return bByte === aByte;
});
};
export const sliceBytes = function(src, start, end) {
if (Uint8Array.prototype.slice) {
return Uint8Array.prototype.slice.call(src, start, end);
}
return new Uint8Array(Array.prototype.slice.call(src, start, end));
};
export const reverseBytes = function(src) {
if (src.reverse) {
return src.reverse();
}
return Array.prototype.reverse.call(src);
};

106
node_modules/@videojs/vhs-utils/src/codec-helpers.js generated vendored Normal file
View file

@ -0,0 +1,106 @@
import {padStart, toHexString, toBinaryString} from './byte-helpers.js';
// https://aomediacodec.github.io/av1-isobmff/#av1codecconfigurationbox-syntax
// https://developer.mozilla.org/en-US/docs/Web/Media/Formats/codecs_parameter#AV1
export const getAv1Codec = function(bytes) {
let codec = '';
const profile = bytes[1] >>> 3;
const level = bytes[1] & 0x1F;
const tier = bytes[2] >>> 7;
const highBitDepth = (bytes[2] & 0x40) >> 6;
const twelveBit = (bytes[2] & 0x20) >> 5;
const monochrome = (bytes[2] & 0x10) >> 4;
const chromaSubsamplingX = (bytes[2] & 0x08) >> 3;
const chromaSubsamplingY = (bytes[2] & 0x04) >> 2;
const chromaSamplePosition = bytes[2] & 0x03;
codec += `${profile}.${padStart(level, 2, '0')}`;
if (tier === 0) {
codec += 'M';
} else if (tier === 1) {
codec += 'H';
}
let bitDepth;
if (profile === 2 && highBitDepth) {
bitDepth = twelveBit ? 12 : 10;
} else {
bitDepth = highBitDepth ? 10 : 8;
}
codec += `.${padStart(bitDepth, 2, '0')}`;
// TODO: can we parse color range??
codec += `.${monochrome}`;
codec += `.${chromaSubsamplingX}${chromaSubsamplingY}${chromaSamplePosition}`;
return codec;
};
export const getAvcCodec = function(bytes) {
const profileId = toHexString(bytes[1]);
const constraintFlags = toHexString(bytes[2] & 0xFC);
const levelId = toHexString(bytes[3]);
return `${profileId}${constraintFlags}${levelId}`;
};
export const getHvcCodec = function(bytes) {
let codec = '';
const profileSpace = bytes[1] >> 6;
const profileId = bytes[1] & 0x1F;
const tierFlag = (bytes[1] & 0x20) >> 5;
const profileCompat = bytes.subarray(2, 6);
const constraintIds = bytes.subarray(6, 12);
const levelId = bytes[12];
if (profileSpace === 1) {
codec += 'A';
} else if (profileSpace === 2) {
codec += 'B';
} else if (profileSpace === 3) {
codec += 'C';
}
codec += `${profileId}.`;
// ffmpeg does this in big endian
let profileCompatVal = parseInt(toBinaryString(profileCompat).split('').reverse().join(''), 2);
// apple does this in little endian...
if (profileCompatVal > 255) {
profileCompatVal = parseInt(toBinaryString(profileCompat), 2);
}
codec += `${profileCompatVal.toString(16)}.`;
if (tierFlag === 0) {
codec += 'L';
} else {
codec += 'H';
}
codec += levelId;
let constraints = '';
for (let i = 0; i < constraintIds.length; i++) {
const v = constraintIds[i];
if (v) {
if (constraints) {
constraints += '.';
}
constraints += v.toString(16);
}
}
if (constraints) {
codec += `.${constraints}`;
}
return codec;
};

225
node_modules/@videojs/vhs-utils/src/codecs.js generated vendored Normal file
View file

@ -0,0 +1,225 @@
import window from 'global/window';
const regexs = {
// to determine mime types
mp4: /^(av0?1|avc0?[1234]|vp0?9|flac|opus|mp3|mp4a|mp4v|stpp.ttml.im1t)/,
webm: /^(vp0?[89]|av0?1|opus|vorbis)/,
ogg: /^(vp0?[89]|theora|flac|opus|vorbis)/,
// to determine if a codec is audio or video
video: /^(av0?1|avc0?[1234]|vp0?[89]|hvc1|hev1|theora|mp4v)/,
audio: /^(mp4a|flac|vorbis|opus|ac-[34]|ec-3|alac|mp3|speex|aac)/,
text: /^(stpp.ttml.im1t)/,
// mux.js support regex
muxerVideo: /^(avc0?1)/,
muxerAudio: /^(mp4a)/,
// match nothing as muxer does not support text right now.
// there cannot never be a character before the start of a string
// so this matches nothing.
muxerText: /a^/
};
const mediaTypes = ['video', 'audio', 'text'];
const upperMediaTypes = ['Video', 'Audio', 'Text'];
/**
* Replace the old apple-style `avc1.<dd>.<dd>` codec string with the standard
* `avc1.<hhhhhh>`
*
* @param {string} codec
* Codec string to translate
* @return {string}
* The translated codec string
*/
export const translateLegacyCodec = function(codec) {
if (!codec) {
return codec;
}
return codec.replace(/avc1\.(\d+)\.(\d+)/i, function(orig, profile, avcLevel) {
const profileHex = ('00' + Number(profile).toString(16)).slice(-2);
const avcLevelHex = ('00' + Number(avcLevel).toString(16)).slice(-2);
return 'avc1.' + profileHex + '00' + avcLevelHex;
});
};
/**
* Replace the old apple-style `avc1.<dd>.<dd>` codec strings with the standard
* `avc1.<hhhhhh>`
*
* @param {string[]} codecs
* An array of codec strings to translate
* @return {string[]}
* The translated array of codec strings
*/
export const translateLegacyCodecs = function(codecs) {
return codecs.map(translateLegacyCodec);
};
/**
* Replace codecs in the codec string with the old apple-style `avc1.<dd>.<dd>` to the
* standard `avc1.<hhhhhh>`.
*
* @param {string} codecString
* The codec string
* @return {string}
* The codec string with old apple-style codecs replaced
*
* @private
*/
export const mapLegacyAvcCodecs = function(codecString) {
return codecString.replace(/avc1\.(\d+)\.(\d+)/i, (match) => {
return translateLegacyCodecs([match])[0];
});
};
/**
* @typedef {Object} ParsedCodecInfo
* @property {number} codecCount
* Number of codecs parsed
* @property {string} [videoCodec]
* Parsed video codec (if found)
* @property {string} [videoObjectTypeIndicator]
* Video object type indicator (if found)
* @property {string|null} audioProfile
* Audio profile
*/
/**
* Parses a codec string to retrieve the number of codecs specified, the video codec and
* object type indicator, and the audio profile.
*
* @param {string} [codecString]
* The codec string to parse
* @return {ParsedCodecInfo}
* Parsed codec info
*/
export const parseCodecs = function(codecString = '') {
const codecs = codecString.split(',');
const result = [];
codecs.forEach(function(codec) {
codec = codec.trim();
let codecType;
mediaTypes.forEach(function(name) {
const match = regexs[name].exec(codec.toLowerCase());
if (!match || match.length <= 1) {
return;
}
codecType = name;
// maintain codec case
const type = codec.substring(0, match[1].length);
const details = codec.replace(type, '');
result.push({type, details, mediaType: name});
});
if (!codecType) {
result.push({type: codec, details: '', mediaType: 'unknown'});
}
});
return result;
};
/**
* Returns a ParsedCodecInfo object for the default alternate audio playlist if there is
* a default alternate audio playlist for the provided audio group.
*
* @param {Object} master
* The master playlist
* @param {string} audioGroupId
* ID of the audio group for which to find the default codec info
* @return {ParsedCodecInfo}
* Parsed codec info
*/
export const codecsFromDefault = (master, audioGroupId) => {
if (!master.mediaGroups.AUDIO || !audioGroupId) {
return null;
}
const audioGroup = master.mediaGroups.AUDIO[audioGroupId];
if (!audioGroup) {
return null;
}
for (const name in audioGroup) {
const audioType = audioGroup[name];
if (audioType.default && audioType.playlists) {
// codec should be the same for all playlists within the audio type
return parseCodecs(audioType.playlists[0].attributes.CODECS);
}
}
return null;
};
export const isVideoCodec = (codec = '') => regexs.video.test(codec.trim().toLowerCase());
export const isAudioCodec = (codec = '') => regexs.audio.test(codec.trim().toLowerCase());
export const isTextCodec = (codec = '') => regexs.text.test(codec.trim().toLowerCase());
export const getMimeForCodec = (codecString) => {
if (!codecString || typeof codecString !== 'string') {
return;
}
const codecs = codecString
.toLowerCase()
.split(',')
.map((c) => translateLegacyCodec(c.trim()));
// default to video type
let type = 'video';
// only change to audio type if the only codec we have is
// audio
if (codecs.length === 1 && isAudioCodec(codecs[0])) {
type = 'audio';
} else if (codecs.length === 1 && isTextCodec(codecs[0])) {
// text uses application/<container> for now
type = 'application';
}
// default the container to mp4
let container = 'mp4';
// every codec must be able to go into the container
// for that container to be the correct one
if (codecs.every((c) => regexs.mp4.test(c))) {
container = 'mp4';
} else if (codecs.every((c) => regexs.webm.test(c))) {
container = 'webm';
} else if (codecs.every((c) => regexs.ogg.test(c))) {
container = 'ogg';
}
return `${type}/${container};codecs="${codecString}"`;
};
export const browserSupportsCodec = (codecString = '') => window.MediaSource &&
window.MediaSource.isTypeSupported &&
window.MediaSource.isTypeSupported(getMimeForCodec(codecString)) || false;
export const muxerSupportsCodec = (codecString = '') => codecString.toLowerCase().split(',').every((codec) => {
codec = codec.trim();
// any match is supported.
for (let i = 0; i < upperMediaTypes.length; i++) {
const type = upperMediaTypes[i];
if (regexs[`muxer${type}`].test(codec)) {
return true;
}
}
return false;
});
export const DEFAULT_AUDIO_CODEC = 'mp4a.40.2';
export const DEFAULT_VIDEO_CODEC = 'avc1.4d400d';

172
node_modules/@videojs/vhs-utils/src/containers.js generated vendored Normal file
View file

@ -0,0 +1,172 @@
import {toUint8, bytesMatch} from './byte-helpers.js';
import {findBox} from './mp4-helpers.js';
import {findEbml, EBML_TAGS} from './ebml-helpers.js';
import {getId3Offset} from './id3-helpers.js';
import {findH264Nal, findH265Nal} from './nal-helpers.js';
const CONSTANTS = {
// "webm" string literal in hex
'webm': toUint8([0x77, 0x65, 0x62, 0x6d]),
// "matroska" string literal in hex
'matroska': toUint8([0x6d, 0x61, 0x74, 0x72, 0x6f, 0x73, 0x6b, 0x61]),
// "fLaC" string literal in hex
'flac': toUint8([0x66, 0x4c, 0x61, 0x43]),
// "OggS" string literal in hex
'ogg': toUint8([0x4f, 0x67, 0x67, 0x53]),
// ac-3 sync byte, also works for ec-3 as that is simply a codec
// of ac-3
'ac3': toUint8([0x0b, 0x77]),
// "RIFF" string literal in hex used for wav and avi
'riff': toUint8([0x52, 0x49, 0x46, 0x46]),
// "AVI" string literal in hex
'avi': toUint8([0x41, 0x56, 0x49]),
// "WAVE" string literal in hex
'wav': toUint8([0x57, 0x41, 0x56, 0x45]),
// "ftyp3g" string literal in hex
'3gp': toUint8([0x66, 0x74, 0x79, 0x70, 0x33, 0x67]),
// "ftyp" string literal in hex
'mp4': toUint8([0x66, 0x74, 0x79, 0x70]),
// "styp" string literal in hex
'fmp4': toUint8([0x73, 0x74, 0x79, 0x70]),
// "ftyp" string literal in hex
'mov': toUint8([0x66, 0x74, 0x79, 0x70, 0x71, 0x74])
};
const _isLikely = {
aac(bytes) {
const offset = getId3Offset(bytes);
return bytesMatch(bytes, [0xFF, 0x10], {offset, mask: [0xFF, 0x16]});
},
mp3(bytes) {
const offset = getId3Offset(bytes);
return bytesMatch(bytes, [0xFF, 0x02], {offset, mask: [0xFF, 0x06]});
},
webm(bytes) {
const docType = findEbml(bytes, [EBML_TAGS.EBML, EBML_TAGS.DocType])[0];
// check if DocType EBML tag is webm
return bytesMatch(docType, CONSTANTS.webm);
},
mkv(bytes) {
const docType = findEbml(bytes, [EBML_TAGS.EBML, EBML_TAGS.DocType])[0];
// check if DocType EBML tag is matroska
return bytesMatch(docType, CONSTANTS.matroska);
},
mp4(bytes) {
return !_isLikely['3gp'](bytes) && !_isLikely.mov(bytes) &&
(bytesMatch(bytes, CONSTANTS.mp4, {offset: 4}) ||
bytesMatch(bytes, CONSTANTS.fmp4, {offset: 4}));
},
mov(bytes) {
return bytesMatch(bytes, CONSTANTS.mov, {offset: 4});
},
'3gp'(bytes) {
return bytesMatch(bytes, CONSTANTS['3gp'], {offset: 4});
},
ac3(bytes) {
const offset = getId3Offset(bytes);
return bytesMatch(bytes, CONSTANTS.ac3, {offset});
},
ts(bytes) {
if (bytes.length < 189 && bytes.length >= 1) {
return bytes[0] === 0x47;
}
let i = 0;
// check the first 376 bytes for two matching sync bytes
while (i + 188 < bytes.length && i < 188) {
if (bytes[i] === 0x47 && bytes[i + 188] === 0x47) {
return true;
}
i += 1;
}
return false;
},
flac(bytes) {
const offset = getId3Offset(bytes);
return bytesMatch(bytes, CONSTANTS.flac, {offset});
},
ogg(bytes) {
return bytesMatch(bytes, CONSTANTS.ogg);
},
avi(bytes) {
return bytesMatch(bytes, CONSTANTS.riff) &&
bytesMatch(bytes, CONSTANTS.avi, {offset: 8});
},
wav(bytes) {
return bytesMatch(bytes, CONSTANTS.riff) &&
bytesMatch(bytes, CONSTANTS.wav, {offset: 8});
},
'h264'(bytes) {
// find seq_parameter_set_rbsp
return findH264Nal(bytes, 7, 3).length;
},
'h265'(bytes) {
// find video_parameter_set_rbsp or seq_parameter_set_rbsp
return findH265Nal(bytes, [32, 33], 3).length;
}
};
// get all the isLikely functions
// but make sure 'ts' is above h264 and h265
// but below everything else as it is the least specific
const isLikelyTypes = Object.keys(_isLikely)
// remove ts, h264, h265
.filter((t) => t !== 'ts' && t !== 'h264' && t !== 'h265')
// add it back to the bottom
.concat(['ts', 'h264', 'h265']);
// make sure we are dealing with uint8 data.
isLikelyTypes.forEach(function(type) {
const isLikelyFn = _isLikely[type];
_isLikely[type] = (bytes) => isLikelyFn(toUint8(bytes));
});
// export after wrapping
export const isLikely = _isLikely;
// A useful list of file signatures can be found here
// https://en.wikipedia.org/wiki/List_of_file_signatures
export const detectContainerForBytes = (bytes) => {
bytes = toUint8(bytes);
for (let i = 0; i < isLikelyTypes.length; i++) {
const type = isLikelyTypes[i];
if (isLikely[type](bytes)) {
return type;
}
}
return '';
};
// fmp4 is not a container
export const isLikelyFmp4MediaSegment = (bytes) => {
return findBox(bytes, ['moof']).length > 0;
};

View file

@ -0,0 +1,13 @@
import window from 'global/window';
const atob = (s) => window.atob ? window.atob(s) : Buffer.from(s, 'base64').toString('binary');
export default function decodeB64ToUint8Array(b64Text) {
const decodedString = atob(b64Text);
const array = new Uint8Array(decodedString.length);
for (let i = 0; i < decodedString.length; i++) {
array[i] = decodedString.charCodeAt(i);
}
return array;
}

503
node_modules/@videojs/vhs-utils/src/ebml-helpers.js generated vendored Normal file
View file

@ -0,0 +1,503 @@
import {
toUint8,
bytesToNumber,
bytesMatch,
bytesToString,
numberToBytes,
padStart
} from './byte-helpers';
import {getAvcCodec, getHvcCodec, getAv1Codec} from './codec-helpers.js';
// relevant specs for this parser:
// https://matroska-org.github.io/libebml/specs.html
// https://www.matroska.org/technical/elements.html
// https://www.webmproject.org/docs/container/
export const EBML_TAGS = {
EBML: toUint8([0x1A, 0x45, 0xDF, 0xA3]),
DocType: toUint8([0x42, 0x82]),
Segment: toUint8([0x18, 0x53, 0x80, 0x67]),
SegmentInfo: toUint8([0x15, 0x49, 0xA9, 0x66]),
Tracks: toUint8([0x16, 0x54, 0xAE, 0x6B]),
Track: toUint8([0xAE]),
TrackNumber: toUint8([0xd7]),
DefaultDuration: toUint8([0x23, 0xe3, 0x83]),
TrackEntry: toUint8([0xAE]),
TrackType: toUint8([0x83]),
FlagDefault: toUint8([0x88]),
CodecID: toUint8([0x86]),
CodecPrivate: toUint8([0x63, 0xA2]),
VideoTrack: toUint8([0xe0]),
AudioTrack: toUint8([0xe1]),
// Not used yet, but will be used for live webm/mkv
// see https://www.matroska.org/technical/basics.html#block-structure
// see https://www.matroska.org/technical/basics.html#simpleblock-structure
Cluster: toUint8([0x1F, 0x43, 0xB6, 0x75]),
Timestamp: toUint8([0xE7]),
TimestampScale: toUint8([0x2A, 0xD7, 0xB1]),
BlockGroup: toUint8([0xA0]),
BlockDuration: toUint8([0x9B]),
Block: toUint8([0xA1]),
SimpleBlock: toUint8([0xA3])
};
/**
* This is a simple table to determine the length
* of things in ebml. The length is one based (starts at 1,
* rather than zero) and for every zero bit before a one bit
* we add one to length. We also need this table because in some
* case we have to xor all the length bits from another value.
*/
const LENGTH_TABLE = [
0b10000000,
0b01000000,
0b00100000,
0b00010000,
0b00001000,
0b00000100,
0b00000010,
0b00000001
];
const getLength = function(byte) {
let len = 1;
for (let i = 0; i < LENGTH_TABLE.length; i++) {
if (byte & LENGTH_TABLE[i]) {
break;
}
len++;
}
return len;
};
// length in ebml is stored in the first 4 to 8 bits
// of the first byte. 4 for the id length and 8 for the
// data size length. Length is measured by converting the number to binary
// then 1 + the number of zeros before a 1 is encountered starting
// from the left.
const getvint = function(bytes, offset, removeLength = true, signed = false) {
const length = getLength(bytes[offset]);
let valueBytes = bytes.subarray(offset, offset + length);
// NOTE that we do **not** subarray here because we need to copy these bytes
// as they will be modified below to remove the dataSizeLen bits and we do not
// want to modify the original data. normally we could just call slice on
// uint8array but ie 11 does not support that...
if (removeLength) {
valueBytes = Array.prototype.slice.call(bytes, offset, offset + length);
valueBytes[0] ^= LENGTH_TABLE[length - 1];
}
return {
length,
value: bytesToNumber(valueBytes, {signed}),
bytes: valueBytes
};
};
const normalizePath = function(path) {
if (typeof path === 'string') {
return path.match(/.{1,2}/g).map((p) => normalizePath(p));
}
if (typeof path === 'number') {
return numberToBytes(path);
}
return path;
};
const normalizePaths = function(paths) {
if (!Array.isArray(paths)) {
return [normalizePath(paths)];
}
return paths.map((p) => normalizePath(p));
};
const getInfinityDataSize = (id, bytes, offset) => {
if (offset >= bytes.length) {
return bytes.length;
}
const innerid = getvint(bytes, offset, false);
if (bytesMatch(id.bytes, innerid.bytes)) {
return offset;
}
const dataHeader = getvint(bytes, offset + innerid.length);
return getInfinityDataSize(id, bytes, offset + dataHeader.length + dataHeader.value + innerid.length);
};
/**
* Notes on the EBLM format.
*
* EBLM uses "vints" tags. Every vint tag contains
* two parts
*
* 1. The length from the first byte. You get this by
* converting the byte to binary and counting the zeros
* before a 1. Then you add 1 to that. Examples
* 00011111 = length 4 because there are 3 zeros before a 1.
* 00100000 = length 3 because there are 2 zeros before a 1.
* 00000011 = length 7 because there are 6 zeros before a 1.
*
* 2. The bits used for length are removed from the first byte
* Then all the bytes are merged into a value. NOTE: this
* is not the case for id ebml tags as there id includes
* length bits.
*
*/
export const findEbml = function(bytes, paths) {
paths = normalizePaths(paths);
bytes = toUint8(bytes);
let results = [];
if (!paths.length) {
return results;
}
let i = 0;
while (i < bytes.length) {
const id = getvint(bytes, i, false);
const dataHeader = getvint(bytes, i + id.length);
const dataStart = i + id.length + dataHeader.length;
// dataSize is unknown or this is a live stream
if (dataHeader.value === 0x7f) {
dataHeader.value = getInfinityDataSize(id, bytes, dataStart);
if (dataHeader.value !== bytes.length) {
dataHeader.value -= dataStart;
}
}
const dataEnd = (dataStart + dataHeader.value) > bytes.length ? bytes.length : (dataStart + dataHeader.value);
const data = bytes.subarray(dataStart, dataEnd);
if (bytesMatch(paths[0], id.bytes)) {
if (paths.length === 1) {
// this is the end of the paths and we've found the tag we were
// looking for
results.push(data);
} else {
// recursively search for the next tag inside of the data
// of this one
results = results.concat(findEbml(data, paths.slice(1)));
}
}
const totalLength = id.length + dataHeader.length + data.length;
// move past this tag entirely, we are not looking for it
i += totalLength;
}
return results;
};
// see https://www.matroska.org/technical/basics.html#block-structure
export const decodeBlock = function(block, type, timestampScale, clusterTimestamp) {
let duration;
if (type === 'group') {
duration = findEbml(block, [EBML_TAGS.BlockDuration])[0];
if (duration) {
duration = bytesToNumber(duration);
duration = (((1 / timestampScale) * (duration)) * timestampScale) / 1000;
}
block = findEbml(block, [EBML_TAGS.Block])[0];
type = 'block';
// treat data as a block after this point
}
const dv = new DataView(block.buffer, block.byteOffset, block.byteLength);
const trackNumber = getvint(block, 0);
const timestamp = dv.getInt16(trackNumber.length, false);
const flags = block[trackNumber.length + 2];
const data = block.subarray(trackNumber.length + 3);
// pts/dts in seconds
const ptsdts = (((1 / timestampScale) * (clusterTimestamp + timestamp)) * timestampScale) / 1000;
// return the frame
const parsed = {
duration,
trackNumber: trackNumber.value,
keyframe: type === 'simple' && (flags >> 7) === 1,
invisible: ((flags & 0x08) >> 3) === 1,
lacing: ((flags & 0x06) >> 1),
discardable: type === 'simple' && (flags & 0x01) === 1,
frames: [],
pts: ptsdts,
dts: ptsdts,
timestamp
};
if (!parsed.lacing) {
parsed.frames.push(data);
return parsed;
}
const numberOfFrames = data[0] + 1;
const frameSizes = [];
let offset = 1;
// Fixed
if (parsed.lacing === 2) {
const sizeOfFrame = (data.length - offset) / numberOfFrames;
for (let i = 0; i < numberOfFrames; i++) {
frameSizes.push(sizeOfFrame);
}
}
// xiph
if (parsed.lacing === 1) {
for (let i = 0; i < numberOfFrames - 1; i++) {
let size = 0;
do {
size += data[offset];
offset++;
} while (data[offset - 1] === 0xFF);
frameSizes.push(size);
}
}
// ebml
if (parsed.lacing === 3) {
// first vint is unsinged
// after that vints are singed and
// based on a compounding size
let size = 0;
for (let i = 0; i < numberOfFrames - 1; i++) {
const vint = i === 0 ? getvint(data, offset) : getvint(data, offset, true, true);
size += vint.value;
frameSizes.push(size);
offset += vint.length;
}
}
frameSizes.forEach(function(size) {
parsed.frames.push(data.subarray(offset, offset + size));
offset += size;
});
return parsed;
};
// VP9 Codec Feature Metadata (CodecPrivate)
// https://www.webmproject.org/docs/container/
const parseVp9Private = (bytes) => {
let i = 0;
const params = {};
while (i < bytes.length) {
const id = bytes[i] & 0x7f;
const len = bytes[i + 1];
let val;
if (len === 1) {
val = bytes[i + 2];
} else {
val = bytes.subarray(i + 2, i + 2 + len);
}
if (id === 1) {
params.profile = val;
} else if (id === 2) {
params.level = val;
} else if (id === 3) {
params.bitDepth = val;
} else if (id === 4) {
params.chromaSubsampling = val;
} else {
params[id] = val;
}
i += 2 + len;
}
return params;
};
export const parseTracks = function(bytes) {
bytes = toUint8(bytes);
const decodedTracks = [];
let tracks = findEbml(bytes, [EBML_TAGS.Segment, EBML_TAGS.Tracks, EBML_TAGS.Track]);
if (!tracks.length) {
tracks = findEbml(bytes, [EBML_TAGS.Tracks, EBML_TAGS.Track]);
}
if (!tracks.length) {
tracks = findEbml(bytes, [EBML_TAGS.Track]);
}
if (!tracks.length) {
return decodedTracks;
}
tracks.forEach(function(track) {
let trackType = findEbml(track, EBML_TAGS.TrackType)[0];
if (!trackType || !trackType.length) {
return;
}
// 1 is video, 2 is audio, 17 is subtitle
// other values are unimportant in this context
if (trackType[0] === 1) {
trackType = 'video';
} else if (trackType[0] === 2) {
trackType = 'audio';
} else if (trackType[0] === 17) {
trackType = 'subtitle';
} else {
return;
}
// todo parse language
const decodedTrack = {
rawCodec: bytesToString(findEbml(track, [EBML_TAGS.CodecID])[0]),
type: trackType,
codecPrivate: findEbml(track, [EBML_TAGS.CodecPrivate])[0],
number: bytesToNumber(findEbml(track, [EBML_TAGS.TrackNumber])[0]),
defaultDuration: bytesToNumber(findEbml(track, [EBML_TAGS.DefaultDuration])[0]),
default: findEbml(track, [EBML_TAGS.FlagDefault])[0],
rawData: track
};
let codec = '';
if ((/V_MPEG4\/ISO\/AVC/).test(decodedTrack.rawCodec)) {
codec = `avc1.${getAvcCodec(decodedTrack.codecPrivate)}`;
} else if ((/V_MPEGH\/ISO\/HEVC/).test(decodedTrack.rawCodec)) {
codec = `hev1.${getHvcCodec(decodedTrack.codecPrivate)}`;
} else if ((/V_MPEG4\/ISO\/ASP/).test(decodedTrack.rawCodec)) {
if (decodedTrack.codecPrivate) {
codec = 'mp4v.20.' + decodedTrack.codecPrivate[4].toString();
} else {
codec = 'mp4v.20.9';
}
} else if ((/^V_THEORA/).test(decodedTrack.rawCodec)) {
codec = 'theora';
} else if ((/^V_VP8/).test(decodedTrack.rawCodec)) {
codec = 'vp8';
} else if ((/^V_VP9/).test(decodedTrack.rawCodec)) {
if (decodedTrack.codecPrivate) {
const {profile, level, bitDepth, chromaSubsampling} = parseVp9Private(decodedTrack.codecPrivate);
codec = 'vp09.';
codec += `${padStart(profile, 2, '0')}.`;
codec += `${padStart(level, 2, '0')}.`;
codec += `${padStart(bitDepth, 2, '0')}.`;
codec += `${padStart(chromaSubsampling, 2, '0')}`;
// Video -> Colour -> Ebml name
const matrixCoefficients = findEbml(track, [0xE0, [0x55, 0xB0], [0x55, 0xB1]])[0] || [];
const videoFullRangeFlag = findEbml(track, [0xE0, [0x55, 0xB0], [0x55, 0xB9]])[0] || [];
const transferCharacteristics = findEbml(track, [0xE0, [0x55, 0xB0], [0x55, 0xBA]])[0] || [];
const colourPrimaries = findEbml(track, [0xE0, [0x55, 0xB0], [0x55, 0xBB]])[0] || [];
// if we find any optional codec parameter specify them all.
if (matrixCoefficients.length ||
videoFullRangeFlag.length ||
transferCharacteristics.length ||
colourPrimaries.length) {
codec += `.${padStart(colourPrimaries[0], 2, '0')}`;
codec += `.${padStart(transferCharacteristics[0], 2, '0')}`;
codec += `.${padStart(matrixCoefficients[0], 2, '0')}`;
codec += `.${padStart(videoFullRangeFlag[0], 2, '0')}`;
}
} else {
codec = 'vp9';
}
} else if ((/^V_AV1/).test(decodedTrack.rawCodec)) {
codec = `av01.${getAv1Codec(decodedTrack.codecPrivate)}`;
} else if ((/A_ALAC/).test(decodedTrack.rawCodec)) {
codec = 'alac';
} else if ((/A_MPEG\/L2/).test(decodedTrack.rawCodec)) {
codec = 'mp2';
} else if ((/A_MPEG\/L3/).test(decodedTrack.rawCodec)) {
codec = 'mp3';
} else if ((/^A_AAC/).test(decodedTrack.rawCodec)) {
if (decodedTrack.codecPrivate) {
codec = 'mp4a.40.' + (decodedTrack.codecPrivate[0] >>> 3).toString();
} else {
codec = 'mp4a.40.2';
}
} else if ((/^A_AC3/).test(decodedTrack.rawCodec)) {
codec = 'ac-3';
} else if ((/^A_PCM/).test(decodedTrack.rawCodec)) {
codec = 'pcm';
} else if ((/^A_MS\/ACM/).test(decodedTrack.rawCodec)) {
codec = 'speex';
} else if ((/^A_EAC3/).test(decodedTrack.rawCodec)) {
codec = 'ec-3';
} else if ((/^A_VORBIS/).test(decodedTrack.rawCodec)) {
codec = 'vorbis';
} else if ((/^A_FLAC/).test(decodedTrack.rawCodec)) {
codec = 'flac';
} else if ((/^A_OPUS/).test(decodedTrack.rawCodec)) {
codec = 'opus';
}
decodedTrack.codec = codec;
decodedTracks.push(decodedTrack);
});
return decodedTracks.sort((a, b) => a.number - b.number);
};
export const parseData = function(data, tracks) {
const allBlocks = [];
const segment = findEbml(data, [EBML_TAGS.Segment])[0];
let timestampScale = findEbml(segment, [EBML_TAGS.SegmentInfo, EBML_TAGS.TimestampScale])[0];
// in nanoseconds, defaults to 1ms
if (timestampScale && timestampScale.length) {
timestampScale = bytesToNumber(timestampScale);
} else {
timestampScale = 1000000;
}
const clusters = findEbml(segment, [EBML_TAGS.Cluster]);
if (!tracks) {
tracks = parseTracks(segment);
}
clusters.forEach(function(cluster, ci) {
const simpleBlocks = findEbml(cluster, [EBML_TAGS.SimpleBlock]).map((b) => ({type: 'simple', data: b}));
const blockGroups = findEbml(cluster, [EBML_TAGS.BlockGroup]).map((b) => ({type: 'group', data: b}));
let timestamp = findEbml(cluster, [EBML_TAGS.Timestamp])[0] || 0;
if (timestamp && timestamp.length) {
timestamp = bytesToNumber(timestamp);
}
// get all blocks then sort them into the correct order
const blocks = simpleBlocks
.concat(blockGroups)
.sort((a, b) => a.data.byteOffset - b.data.byteOffset);
blocks.forEach(function(block, bi) {
const decoded = decodeBlock(block.data, block.type, timestampScale, timestamp);
allBlocks.push(decoded);
});
});
return {tracks, blocks: allBlocks};
};

338
node_modules/@videojs/vhs-utils/src/format-parser.js generated vendored Normal file
View file

@ -0,0 +1,338 @@
import {bytesToString, toUint8, toHexString, bytesMatch} from './byte-helpers.js';
import {parseTracks as parseEbmlTracks} from './ebml-helpers.js';
import {parseTracks as parseMp4Tracks} from './mp4-helpers.js';
import {findFourCC} from './riff-helpers.js';
import {getPages} from './ogg-helpers.js';
import {detectContainerForBytes} from './containers.js';
import {findH264Nal, findH265Nal} from './nal-helpers.js';
import {parseTs} from './m2ts-helpers.js';
import {getAvcCodec, getHvcCodec} from './codec-helpers.js';
import {getId3Offset} from './id3-helpers.js';
// https://docs.microsoft.com/en-us/windows/win32/medfound/audio-subtype-guids
// https://tools.ietf.org/html/rfc2361
const wFormatTagCodec = function(wFormatTag) {
wFormatTag = toUint8(wFormatTag);
if (bytesMatch(wFormatTag, [0x00, 0x55])) {
return 'mp3';
} else if (bytesMatch(wFormatTag, [0x16, 0x00]) || bytesMatch(wFormatTag, [0x00, 0xFF])) {
return 'aac';
} else if (bytesMatch(wFormatTag, [0x70, 0x4f])) {
return 'opus';
} else if (bytesMatch(wFormatTag, [0x6C, 0x61])) {
return 'alac';
} else if (bytesMatch(wFormatTag, [0xF1, 0xAC])) {
return 'flac';
} else if (bytesMatch(wFormatTag, [0x20, 0x00])) {
return 'ac-3';
} else if (bytesMatch(wFormatTag, [0xFF, 0xFE])) {
return 'ec-3';
} else if (bytesMatch(wFormatTag, [0x00, 0x50])) {
return 'mp2';
} else if (bytesMatch(wFormatTag, [0x56, 0x6f])) {
return 'vorbis';
} else if (bytesMatch(wFormatTag, [0xA1, 0x09])) {
return 'speex';
}
return '';
};
const formatMimetype = (name, codecs) => {
const codecString = ['video', 'audio'].reduce((acc, type) => {
if (codecs[type]) {
acc += (acc.length ? ',' : '') + codecs[type];
}
return acc;
}, '');
return `${(codecs.video ? 'video' : 'audio')}/${name}${codecString ? `;codecs="${codecString}"` : ''}`;
};
const parseCodecFrom = {
mov(bytes) {
// mov and mp4 both use a nearly identical box structure.
const retval = parseCodecFrom.mp4(bytes);
if (retval.mimetype) {
retval.mimetype = retval.mimetype.replace('mp4', 'quicktime');
}
return retval;
},
mp4(bytes) {
bytes = toUint8(bytes);
const codecs = {};
const tracks = parseMp4Tracks(bytes);
for (let i = 0; i < tracks.length; i++) {
const track = tracks[i];
if (track.type === 'audio' && !codecs.audio) {
codecs.audio = track.codec;
}
if (track.type === 'video' && !codecs.video) {
codecs.video = track.codec;
}
}
return {codecs, mimetype: formatMimetype('mp4', codecs)};
},
'3gp'(bytes) {
return {codecs: {}, mimetype: 'video/3gpp'};
},
ogg(bytes) {
const pages = getPages(bytes, 0, 4);
const codecs = {};
pages.forEach(function(page) {
if (bytesMatch(page, [0x4F, 0x70, 0x75, 0x73], {offset: 28})) {
codecs.audio = 'opus';
} else if (bytesMatch(page, [0x56, 0x50, 0x38, 0x30], {offset: 29})) {
codecs.video = 'vp8';
} else if (bytesMatch(page, [0x74, 0x68, 0x65, 0x6F, 0x72, 0x61], {offset: 29})) {
codecs.video = 'theora';
} else if (bytesMatch(page, [0x46, 0x4C, 0x41, 0x43], {offset: 29})) {
codecs.audio = 'flac';
} else if (bytesMatch(page, [0x53, 0x70, 0x65, 0x65, 0x78], {offset: 28})) {
codecs.audio = 'speex';
} else if (bytesMatch(page, [0x76, 0x6F, 0x72, 0x62, 0x69, 0x73], {offset: 29})) {
codecs.audio = 'vorbis';
}
});
return {codecs, mimetype: formatMimetype('ogg', codecs)};
},
wav(bytes) {
const format = findFourCC(bytes, ['WAVE', 'fmt'])[0];
const wFormatTag = Array.prototype.slice.call(format, 0, 2).reverse();
let mimetype = 'audio/vnd.wave';
const codecs = {
audio: wFormatTagCodec(wFormatTag)
};
const codecString = wFormatTag.reduce(function(acc, v) {
if (v) {
acc += toHexString(v);
}
return acc;
}, '');
if (codecString) {
mimetype += `;codec=${codecString}`;
}
if (codecString && !codecs.audio) {
codecs.audio = codecString;
}
return {codecs, mimetype};
},
avi(bytes) {
const movi = findFourCC(bytes, ['AVI', 'movi'])[0];
const strls = findFourCC(bytes, ['AVI', 'hdrl', 'strl']);
const codecs = {};
strls.forEach(function(strl) {
const strh = findFourCC(strl, ['strh'])[0];
const strf = findFourCC(strl, ['strf'])[0];
// now parse AVIStreamHeader to get codec and type:
// https://docs.microsoft.com/en-us/previous-versions/windows/desktop/api/avifmt/ns-avifmt-avistreamheader
const type = bytesToString(strh.subarray(0, 4));
let codec;
let codecType;
if (type === 'vids') {
// https://docs.microsoft.com/en-us/windows/win32/api/wingdi/ns-wingdi-bitmapinfoheader
const handler = bytesToString(strh.subarray(4, 8));
const compression = bytesToString(strf.subarray(16, 20));
// look for 00dc (compressed video fourcc code) or 00db (uncompressed video fourcc code)
const videoData = findFourCC(movi, ['00dc'])[0] || findFourCC(movi, ['00db'][0]);
if (handler === 'H264' || compression === 'H264') {
if (videoData && videoData.length) {
codec = parseCodecFrom.h264(videoData).codecs.video;
} else {
codec = 'avc1';
}
} else if (handler === 'HEVC' || compression === 'HEVC') {
if (videoData && videoData.length) {
codec = parseCodecFrom.h265(videoData).codecs.video;
} else {
codec = 'hev1';
}
} else if (handler === 'FMP4' || compression === 'FMP4') {
if (movi.length) {
codec = 'mp4v.20.' + movi[12].toString();
} else {
codec = 'mp4v.20';
}
} else if (handler === 'VP80' || compression === 'VP80') {
codec = 'vp8';
} else if (handler === 'VP90' || compression === 'VP90') {
codec = 'vp9';
} else if (handler === 'AV01' || compression === 'AV01') {
codec = 'av01';
} else if (handler === 'theo' || compression === 'theora') {
codec = 'theora';
} else {
if (videoData && videoData.length) {
const result = detectContainerForBytes(videoData);
if (result === 'h264') {
codec = parseCodecFrom.h264(movi).codecs.video;
}
if (result === 'h265') {
codec = parseCodecFrom.h265(movi).codecs.video;
}
}
if (!codec) {
codec = handler || compression;
}
}
codecType = 'video';
} else if (type === 'auds') {
codecType = 'audio';
// look for 00wb (audio data fourcc)
// const audioData = findFourCC(movi, ['01wb']);
const wFormatTag = Array.prototype.slice.call(strf, 0, 2).reverse();
codecs.audio = wFormatTagCodec(wFormatTag);
} else {
return;
}
if (codec) {
codecs[codecType] = codec;
}
});
return {codecs, mimetype: formatMimetype('avi', codecs)};
},
ts(bytes) {
const result = parseTs(bytes, 2);
const codecs = {};
Object.keys(result.streams).forEach(function(esPid) {
const stream = result.streams[esPid];
if (stream.codec === 'avc1' && stream.packets.length) {
stream.codec = parseCodecFrom.h264(stream.packets[0]).codecs.video;
} else if (stream.codec === 'hev1' && stream.packets.length) {
stream.codec = parseCodecFrom.h265(stream.packets[0]).codecs.video;
}
codecs[stream.type] = stream.codec;
});
return {codecs, mimetype: formatMimetype('mp2t', codecs)};
},
webm(bytes) {
// mkv and webm both use ebml to store code info
const retval = parseCodecFrom.mkv(bytes);
if (retval.mimetype) {
retval.mimetype = retval.mimetype.replace('x-matroska', 'webm');
}
return retval;
},
mkv(bytes) {
const codecs = {};
const tracks = parseEbmlTracks(bytes);
for (let i = 0; i < tracks.length; i++) {
const track = tracks[i];
if (track.type === 'audio' && !codecs.audio) {
codecs.audio = track.codec;
}
if (track.type === 'video' && !codecs.video) {
codecs.video = track.codec;
}
}
return {codecs, mimetype: formatMimetype('x-matroska', codecs)};
},
aac(bytes) {
return {codecs: {audio: 'aac'}, mimetype: 'audio/aac'};
},
ac3(bytes) {
// past id3 and syncword
const offset = getId3Offset(bytes) + 2;
// default to ac-3
let codec = 'ac-3';
if (bytesMatch(bytes, [0xB8, 0xE0], {offset})) {
codec = 'ac-3';
// 0x01, 0x7F
} else if (bytesMatch(bytes, [0x01, 0x7f], {offset})) {
codec = 'ec-3';
}
return {codecs: {audio: codec}, mimetype: 'audio/vnd.dolby.dd-raw'};
},
mp3(bytes) {
return {codecs: {audio: 'mp3'}, mimetype: 'audio/mpeg'};
},
flac(bytes) {
return {codecs: {audio: 'flac'}, mimetype: 'audio/flac'};
},
'h264'(bytes) {
// find seq_parameter_set_rbsp to get encoding settings for codec
const nal = findH264Nal(bytes, 7, 3);
const retval = {codecs: {video: 'avc1'}, mimetype: 'video/h264'};
if (nal.length) {
retval.codecs.video += `.${getAvcCodec(nal)}`;
}
return retval;
},
'h265'(bytes) {
const retval = {codecs: {video: 'hev1'}, mimetype: 'video/h265'};
// find video_parameter_set_rbsp or seq_parameter_set_rbsp
// to get encoding settings for codec
const nal = findH265Nal(bytes, [32, 33], 3);
if (nal.length) {
const type = (nal[0] >> 1) & 0x3F;
// profile_tier_level starts at byte 5 for video_parameter_set_rbsp
// byte 2 for seq_parameter_set_rbsp
retval.codecs.video += `.${getHvcCodec(nal.subarray(type === 32 ? 5 : 2))}`;
}
return retval;
}
};
export const parseFormatForBytes = (bytes) => {
bytes = toUint8(bytes);
const result = {
codecs: {},
container: detectContainerForBytes(bytes),
mimetype: ''
};
const parseCodecFn = parseCodecFrom[result.container];
if (parseCodecFn) {
const parsed = parseCodecFn ? parseCodecFn(bytes) : {};
result.codecs = parsed.codecs || {};
result.mimetype = parsed.mimetype || '';
}
return result;
};

35
node_modules/@videojs/vhs-utils/src/id3-helpers.js generated vendored Normal file
View file

@ -0,0 +1,35 @@
import {toUint8, bytesMatch} from './byte-helpers.js';
const ID3 = toUint8([0x49, 0x44, 0x33]);
export const getId3Size = function(bytes, offset = 0) {
bytes = toUint8(bytes);
const flags = bytes[offset + 5];
const returnSize = (bytes[offset + 6] << 21) |
(bytes[offset + 7] << 14) |
(bytes[offset + 8] << 7) |
(bytes[offset + 9]);
const footerPresent = (flags & 16) >> 4;
if (footerPresent) {
return returnSize + 20;
}
return returnSize + 10;
};
export const getId3Offset = function(bytes, offset = 0) {
bytes = toUint8(bytes);
if ((bytes.length - offset) < 10 || !bytesMatch(bytes, ID3, {offset})) {
return offset;
}
offset += getId3Size(bytes, offset);
// recursive check for id3 tags as some files
// have multiple ID3 tag sections even though
// they should not.
return getId3Offset(bytes, offset);
};

17
node_modules/@videojs/vhs-utils/src/index.js generated vendored Normal file
View file

@ -0,0 +1,17 @@
import * as codecs from './codecs';
import * as byteHelpers from './byte-helpers.js';
import * as containers from './containers.js';
import decodeB64ToUint8Array from './decode-b64-to-uint8-array.js';
import * as mediaGroups from './media-groups.js';
import resolveUrl from './resolve-url.js';
import Stream from './stream.js';
export default {
codecs,
byteHelpers,
containers,
decodeB64ToUint8Array,
mediaGroups,
resolveUrl,
Stream
};

101
node_modules/@videojs/vhs-utils/src/m2ts-helpers.js generated vendored Normal file
View file

@ -0,0 +1,101 @@
import {bytesMatch, toUint8} from './byte-helpers.js';
const SYNC_BYTE = 0x47;
export const parseTs = function(bytes, maxPes = Infinity) {
bytes = toUint8(bytes);
let startIndex = 0;
let endIndex = 188;
const pmt = {};
let pesCount = 0;
while (endIndex < bytes.byteLength && pesCount < maxPes) {
if (bytes[startIndex] !== SYNC_BYTE && bytes[endIndex] !== SYNC_BYTE) {
endIndex += 1;
startIndex += 1;
continue;
}
const packet = bytes.subarray(startIndex, endIndex);
const pid = (((packet[1] & 0x1f) << 8) | packet[2]);
const hasPusi = !!(packet[1] & 0x40);
const hasAdaptationHeader = (((packet[3] & 0x30) >>> 4) > 0x01);
let payloadOffset = 4 + (hasAdaptationHeader ? (packet[4] + 1) : 0);
if (hasPusi) {
payloadOffset += packet[payloadOffset] + 1;
}
if (pid === 0 && !pmt.pid) {
pmt.pid = (packet[payloadOffset + 10] & 0x1f) << 8 | packet[payloadOffset + 11];
} else if (pmt.pid && pid === pmt.pid && !pmt.streams) {
const isNotForward = packet[payloadOffset + 5] & 0x01;
// ignore forward pmt delarations
if (!isNotForward) {
continue;
}
pmt.streams = {};
const sectionLength = (packet[payloadOffset + 1] & 0x0f) << 8 | packet[payloadOffset + 2];
const tableEnd = 3 + sectionLength - 4;
const programInfoLength = (packet[payloadOffset + 10] & 0x0f) << 8 | packet[payloadOffset + 11];
let offset = 12 + programInfoLength;
while (offset < tableEnd) {
// add an entry that maps the elementary_pid to the stream_type
const i = payloadOffset + offset;
const type = packet[i];
const esPid = (packet[i + 1] & 0x1F) << 8 | packet[i + 2];
const esLength = ((packet[i + 3] & 0x0f) << 8 | (packet[i + 4]));
const esInfo = packet.subarray(i + 5, i + 5 + esLength);
const stream = pmt.streams[esPid] = {
esInfo,
typeNumber: type,
packets: [],
type: '',
codec: ''
};
if (type === 0x06 && bytesMatch(esInfo, [0x4F, 0x70, 0x75, 0x73], {offset: 2})) {
stream.type = 'audio';
stream.codec = 'opus';
} else if (type === 0x1B || type === 0x20) {
stream.type = 'video';
stream.codec = 'avc1';
} else if (type === 0x24) {
stream.type = 'video';
stream.codec = 'hev1';
} else if (type === 0x10) {
stream.type = 'video';
stream.codec = 'mp4v.20';
} else if (type === 0x0F) {
stream.type = 'audio';
stream.codec = 'aac';
} else if (type === 0x81) {
stream.type = 'audio';
stream.codec = 'ac-3';
} else if (type === 0x87) {
stream.type = 'audio';
stream.codec = 'ec-3';
} else if (type === 0x03 || type === 0x04) {
stream.type = 'audio';
stream.codec = 'mp3';
}
offset += esLength + 5;
}
} else if (pmt.pid && pmt.streams) {
pmt.streams[pid].packets.push(packet.subarray(payloadOffset));
pesCount++;
}
startIndex += 188;
endIndex += 188;
}
if (!pmt.streams) {
pmt.streams = {};
}
return pmt;
};

22
node_modules/@videojs/vhs-utils/src/media-groups.js generated vendored Normal file
View file

@ -0,0 +1,22 @@
/**
* Loops through all supported media groups in master and calls the provided
* callback for each group
*
* @param {Object} master
* The parsed master manifest object
* @param {string[]} groups
* The media groups to call the callback for
* @param {Function} callback
* Callback to call for each media group
*/
export const forEachMediaGroup = (master, groups, callback) => {
groups.forEach((mediaType) => {
for (const groupKey in master.mediaGroups[mediaType]) {
for (const labelKey in master.mediaGroups[mediaType][groupKey]) {
const mediaProperties = master.mediaGroups[mediaType][groupKey][labelKey];
callback(mediaProperties, mediaType, groupKey, labelKey);
}
}
});
};

36
node_modules/@videojs/vhs-utils/src/media-types.js generated vendored Normal file
View file

@ -0,0 +1,36 @@
const MPEGURL_REGEX = /^(audio|video|application)\/(x-|vnd\.apple\.)?mpegurl/i;
const DASH_REGEX = /^application\/dash\+xml/i;
/**
* Returns a string that describes the type of source based on a video source object's
* media type.
*
* @see {@link https://dev.w3.org/html5/pf-summary/video.html#dom-source-type|Source Type}
*
* @param {string} type
* Video source object media type
* @return {('hls'|'dash'|'vhs-json'|null)}
* VHS source type string
*/
export const simpleTypeFromSourceType = (type) => {
if (MPEGURL_REGEX.test(type)) {
return 'hls';
}
if (DASH_REGEX.test(type)) {
return 'dash';
}
// Denotes the special case of a manifest object passed to http-streaming instead of a
// source URL.
//
// See https://en.wikipedia.org/wiki/Media_type for details on specifying media types.
//
// In this case, vnd stands for vendor, video.js for the organization, VHS for this
// project, and the +json suffix identifies the structure of the media type.
if (type === 'application/vnd.videojs.vhs+json') {
return 'vhs-json';
}
return null;
};

564
node_modules/@videojs/vhs-utils/src/mp4-helpers.js generated vendored Normal file
View file

@ -0,0 +1,564 @@
import {
stringToBytes,
toUint8,
bytesMatch,
bytesToString,
toHexString,
padStart,
bytesToNumber
} from './byte-helpers.js';
import {getAvcCodec, getHvcCodec, getAv1Codec} from './codec-helpers.js';
import {parseOpusHead} from './opus-helpers.js';
const normalizePath = function(path) {
if (typeof path === 'string') {
return stringToBytes(path);
}
if (typeof path === 'number') {
return path;
}
return path;
};
const normalizePaths = function(paths) {
if (!Array.isArray(paths)) {
return [normalizePath(paths)];
}
return paths.map((p) => normalizePath(p));
};
let DESCRIPTORS;
export const parseDescriptors = function(bytes) {
bytes = toUint8(bytes);
const results = [];
let i = 0;
while (bytes.length > i) {
const tag = bytes[i];
let size = 0;
let headerSize = 0;
// tag
headerSize++;
let byte = bytes[headerSize];
// first byte
headerSize++;
while (byte & 0x80) {
size = (byte & 0x7F) << 7;
byte = bytes[headerSize];
headerSize++;
}
size += byte & 0x7F;
for (let z = 0; z < DESCRIPTORS.length; z++) {
const {id, parser} = DESCRIPTORS[z];
if (tag === id) {
results.push(parser(bytes.subarray(headerSize, headerSize + size)));
break;
}
}
i += size + headerSize;
}
return results;
};
DESCRIPTORS = [
{id: 0x03, parser(bytes) {
const desc = {
tag: 0x03,
id: bytes[0] << 8 | bytes[1],
flags: bytes[2],
size: 3,
dependsOnEsId: 0,
ocrEsId: 0,
descriptors: [],
url: ''
};
// depends on es id
if (desc.flags & 0x80) {
desc.dependsOnEsId = bytes[desc.size] << 8 | bytes[desc.size + 1];
desc.size += 2;
}
// url
if (desc.flags & 0x40) {
const len = bytes[desc.size];
desc.url = bytesToString(bytes.subarray(desc.size + 1, desc.size + 1 + len));
desc.size += len;
}
// ocr es id
if (desc.flags & 0x20) {
desc.ocrEsId = bytes[desc.size] << 8 | bytes[desc.size + 1];
desc.size += 2;
}
desc.descriptors = parseDescriptors(bytes.subarray(desc.size)) || [];
return desc;
}},
{id: 0x04, parser(bytes) {
// DecoderConfigDescriptor
const desc = {
tag: 0x04,
oti: bytes[0],
streamType: bytes[1],
bufferSize: bytes[2] << 16 | bytes [3] << 8 | bytes[4],
maxBitrate: bytes[5] << 24 | bytes[6] << 16 | bytes [7] << 8 | bytes[8],
avgBitrate: bytes[9] << 24 | bytes[10] << 16 | bytes [11] << 8 | bytes[12],
descriptors: parseDescriptors(bytes.subarray(13))
};
return desc;
}},
{id: 0x05, parser(bytes) {
// DecoderSpecificInfo
return {tag: 0x05, bytes};
}},
{id: 0x06, parser(bytes) {
// SLConfigDescriptor
return {tag: 0x06, bytes};
}}
];
/**
* find any number of boxes by name given a path to it in an iso bmff
* such as mp4.
*
* @param {TypedArray} bytes
* bytes for the iso bmff to search for boxes in
*
* @param {Uint8Array[]|string[]|string|Uint8Array} name
* An array of paths or a single path representing the name
* of boxes to search through in bytes. Paths may be
* uint8 (character codes) or strings.
*
* @param {boolean} [complete=false]
* Should we search only for complete boxes on the final path.
* This is very useful when you do not want to get back partial boxes
* in the case of streaming files.
*
* @return {Uint8Array[]}
* An array of the end paths that we found.
*/
export const findBox = function(bytes, paths, complete = false) {
paths = normalizePaths(paths);
bytes = toUint8(bytes);
const results = [];
if (!paths.length) {
// short-circuit the search for empty paths
return results;
}
let i = 0;
while (i < bytes.length) {
const size = (bytes[i] << 24 | bytes[i + 1] << 16 | bytes[i + 2] << 8 | bytes[i + 3]) >>> 0;
const type = bytes.subarray(i + 4, i + 8);
// invalid box format.
if (size === 0) {
break;
}
let end = i + size;
if (end > bytes.length) {
// this box is bigger than the number of bytes we have
// and complete is set, we cannot find any more boxes.
if (complete) {
break;
}
end = bytes.length;
}
const data = bytes.subarray(i + 8, end);
if (bytesMatch(type, paths[0])) {
if (paths.length === 1) {
// this is the end of the path and we've found the box we were
// looking for
results.push(data);
} else {
// recursively search for the next box along the path
results.push.apply(results, findBox(data, paths.slice(1), complete));
}
}
i = end;
}
// we've finished searching all of bytes
return results;
};
/**
* Search for a single matching box by name in an iso bmff format like
* mp4. This function is useful for finding codec boxes which
* can be placed arbitrarily in sample descriptions depending
* on the version of the file or file type.
*
* @param {TypedArray} bytes
* bytes for the iso bmff to search for boxes in
*
* @param {string|Uint8Array} name
* The name of the box to find.
*
* @return {Uint8Array[]}
* a subarray of bytes representing the name boxed we found.
*/
export const findNamedBox = function(bytes, name) {
name = normalizePath(name);
if (!name.length) {
// short-circuit the search for empty paths
return bytes.subarray(bytes.length);
}
let i = 0;
while (i < bytes.length) {
if (bytesMatch(bytes.subarray(i, i + name.length), name)) {
const size = (bytes[i - 4] << 24 | bytes[i - 3] << 16 | bytes[i - 2] << 8 | bytes[i - 1]) >>> 0;
const end = size > 1 ? i + size : bytes.byteLength;
return bytes.subarray(i + 4, end);
}
i++;
}
// we've finished searching all of bytes
return bytes.subarray(bytes.length);
};
const parseSamples = function(data, entrySize = 4, parseEntry = (d) => bytesToNumber(d)) {
const entries = [];
if (!data || !data.length) {
return entries;
}
let entryCount = bytesToNumber(data.subarray(4, 8));
for (let i = 8; entryCount; i += entrySize, entryCount--) {
entries.push(parseEntry(data.subarray(i, i + entrySize)));
}
return entries;
};
export const buildFrameTable = function(stbl, timescale) {
const keySamples = parseSamples(findBox(stbl, ['stss'])[0]);
const chunkOffsets = parseSamples(findBox(stbl, ['stco'])[0]);
const timeToSamples = parseSamples(findBox(stbl, ['stts'])[0], 8, (entry) => ({
sampleCount: bytesToNumber(entry.subarray(0, 4)),
sampleDelta: bytesToNumber(entry.subarray(4, 8))
}));
const samplesToChunks = parseSamples(findBox(stbl, ['stsc'])[0], 12, (entry) => ({
firstChunk: bytesToNumber(entry.subarray(0, 4)),
samplesPerChunk: bytesToNumber(entry.subarray(4, 8)),
sampleDescriptionIndex: bytesToNumber(entry.subarray(8, 12))
}));
const stsz = findBox(stbl, ['stsz'])[0];
// stsz starts with a 4 byte sampleSize which we don't need
const sampleSizes = parseSamples(stsz && stsz.length && stsz.subarray(4) || null);
const frames = [];
for (let chunkIndex = 0; chunkIndex < chunkOffsets.length; chunkIndex++) {
let samplesInChunk;
for (let i = 0; i < samplesToChunks.length; i++) {
const sampleToChunk = samplesToChunks[i];
const isThisOne = (chunkIndex + 1) >= sampleToChunk.firstChunk &&
(i + 1 >= samplesToChunks.length || (chunkIndex + 1) < samplesToChunks[i + 1].firstChunk);
if (isThisOne) {
samplesInChunk = sampleToChunk.samplesPerChunk;
break;
}
}
let chunkOffset = chunkOffsets[chunkIndex];
for (let i = 0; i < samplesInChunk; i++) {
const frameEnd = sampleSizes[frames.length];
// if we don't have key samples every frame is a keyframe
let keyframe = !keySamples.length;
if (keySamples.length && keySamples.indexOf(frames.length + 1) !== -1) {
keyframe = true;
}
const frame = {
keyframe,
start: chunkOffset,
end: chunkOffset + frameEnd
};
for (let k = 0; k < timeToSamples.length; k++) {
const {sampleCount, sampleDelta} = timeToSamples[k];
if ((frames.length) <= sampleCount) {
// ms to ns
const lastTimestamp = frames.length ? frames[frames.length - 1].timestamp : 0;
frame.timestamp = lastTimestamp + ((sampleDelta / timescale) * 1000);
frame.duration = sampleDelta;
break;
}
}
frames.push(frame);
chunkOffset += frameEnd;
}
}
return frames;
};
export const addSampleDescription = function(track, bytes) {
let codec = bytesToString(bytes.subarray(0, 4));
if (track.type === 'video') {
track.info = track.info || {};
track.info.width = bytes[28] << 8 | bytes[29];
track.info.height = bytes[30] << 8 | bytes[31];
} else if (track.type === 'audio') {
track.info = track.info || {};
track.info.channels = bytes[20] << 8 | bytes[21];
track.info.bitDepth = bytes[22] << 8 | bytes[23];
track.info.sampleRate = bytes[28] << 8 | bytes[29];
}
if (codec === 'avc1') {
const avcC = findNamedBox(bytes, 'avcC');
// AVCDecoderConfigurationRecord
codec += `.${getAvcCodec(avcC)}`;
track.info.avcC = avcC;
// TODO: do we need to parse all this?
/* {
configurationVersion: avcC[0],
profile: avcC[1],
profileCompatibility: avcC[2],
level: avcC[3],
lengthSizeMinusOne: avcC[4] & 0x3
};
let spsNalUnitCount = avcC[5] & 0x1F;
const spsNalUnits = track.info.avc.spsNalUnits = [];
// past spsNalUnitCount
let offset = 6;
while (spsNalUnitCount--) {
const nalLen = avcC[offset] << 8 | avcC[offset + 1];
spsNalUnits.push(avcC.subarray(offset + 2, offset + 2 + nalLen));
offset += nalLen + 2;
}
let ppsNalUnitCount = avcC[offset];
const ppsNalUnits = track.info.avc.ppsNalUnits = [];
// past ppsNalUnitCount
offset += 1;
while (ppsNalUnitCount--) {
const nalLen = avcC[offset] << 8 | avcC[offset + 1];
ppsNalUnits.push(avcC.subarray(offset + 2, offset + 2 + nalLen));
offset += nalLen + 2;
}*/
// HEVCDecoderConfigurationRecord
} else if (codec === 'hvc1' || codec === 'hev1') {
codec += `.${getHvcCodec(findNamedBox(bytes, 'hvcC'))}`;
} else if (codec === 'mp4a' || codec === 'mp4v') {
const esds = findNamedBox(bytes, 'esds');
const esDescriptor = parseDescriptors(esds.subarray(4))[0];
const decoderConfig = esDescriptor && esDescriptor.descriptors.filter(({tag}) => tag === 0x04)[0];
if (decoderConfig) {
// most codecs do not have a further '.'
// such as 0xa5 for ac-3 and 0xa6 for e-ac-3
codec += '.' + toHexString(decoderConfig.oti);
if (decoderConfig.oti === 0x40) {
codec += '.' + (decoderConfig.descriptors[0].bytes[0] >> 3).toString();
} else if (decoderConfig.oti === 0x20) {
codec += '.' + (decoderConfig.descriptors[0].bytes[4]).toString();
} else if (decoderConfig.oti === 0xdd) {
codec = 'vorbis';
}
} else if (track.type === 'audio') {
codec += '.40.2';
} else {
codec += '.20.9';
}
} else if (codec === 'av01') {
// AV1DecoderConfigurationRecord
codec += `.${getAv1Codec(findNamedBox(bytes, 'av1C'))}`;
} else if (codec === 'vp09') {
// VPCodecConfigurationRecord
const vpcC = findNamedBox(bytes, 'vpcC');
// https://www.webmproject.org/vp9/mp4/
const profile = vpcC[0];
const level = vpcC[1];
const bitDepth = vpcC[2] >> 4;
const chromaSubsampling = (vpcC[2] & 0x0F) >> 1;
const videoFullRangeFlag = (vpcC[2] & 0x0F) >> 3;
const colourPrimaries = vpcC[3];
const transferCharacteristics = vpcC[4];
const matrixCoefficients = vpcC[5];
codec += `.${padStart(profile, 2, '0')}`;
codec += `.${padStart(level, 2, '0')}`;
codec += `.${padStart(bitDepth, 2, '0')}`;
codec += `.${padStart(chromaSubsampling, 2, '0')}`;
codec += `.${padStart(colourPrimaries, 2, '0')}`;
codec += `.${padStart(transferCharacteristics, 2, '0')}`;
codec += `.${padStart(matrixCoefficients, 2, '0')}`;
codec += `.${padStart(videoFullRangeFlag, 2, '0')}`;
} else if (codec === 'theo') {
codec = 'theora';
} else if (codec === 'spex') {
codec = 'speex';
} else if (codec === '.mp3') {
codec = 'mp4a.40.34';
} else if (codec === 'msVo') {
codec = 'vorbis';
} else if (codec === 'Opus') {
codec = 'opus';
const dOps = findNamedBox(bytes, 'dOps');
track.info.opus = parseOpusHead(dOps);
// TODO: should this go into the webm code??
// Firefox requires a codecDelay for opus playback
// see https://bugzilla.mozilla.org/show_bug.cgi?id=1276238
track.info.codecDelay = 6500000;
} else {
codec = codec.toLowerCase();
}
/* eslint-enable */
// flac, ac-3, ec-3, opus
track.codec = codec;
};
export const parseTracks = function(bytes, frameTable = true) {
bytes = toUint8(bytes);
const traks = findBox(bytes, ['moov', 'trak'], true);
const tracks = [];
traks.forEach(function(trak) {
const track = {bytes: trak};
const mdia = findBox(trak, ['mdia'])[0];
const hdlr = findBox(mdia, ['hdlr'])[0];
const trakType = bytesToString(hdlr.subarray(8, 12));
if (trakType === 'soun') {
track.type = 'audio';
} else if (trakType === 'vide') {
track.type = 'video';
} else {
track.type = trakType;
}
const tkhd = findBox(trak, ['tkhd'])[0];
if (tkhd) {
const view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength);
const tkhdVersion = view.getUint8(0);
track.number = (tkhdVersion === 0) ? view.getUint32(12) : view.getUint32(20);
}
const mdhd = findBox(mdia, ['mdhd'])[0];
if (mdhd) {
// mdhd is a FullBox, meaning it will have its own version as the first byte
const version = mdhd[0];
const index = version === 0 ? 12 : 20;
track.timescale = (
mdhd[index] << 24 |
mdhd[index + 1] << 16 |
mdhd[index + 2] << 8 |
mdhd[index + 3]
) >>> 0;
}
const stbl = findBox(mdia, ['minf', 'stbl'])[0];
const stsd = findBox(stbl, ['stsd'])[0];
let descriptionCount = bytesToNumber(stsd.subarray(4, 8));
let offset = 8;
// add codec and codec info
while (descriptionCount--) {
const len = bytesToNumber(stsd.subarray(offset, offset + 4));
const sampleDescriptor = stsd.subarray(offset + 4, offset + 4 + len);
addSampleDescription(track, sampleDescriptor);
offset += 4 + len;
}
if (frameTable) {
track.frameTable = buildFrameTable(stbl, track.timescale);
}
// codec has no sub parameters
tracks.push(track);
});
return tracks;
};
export const parseMediaInfo = function(bytes) {
const mvhd = findBox(bytes, ['moov', 'mvhd'], true)[0];
if (!mvhd || !mvhd.length) {
return;
}
const info = {};
// ms to ns
// mvhd v1 has 8 byte duration and other fields too
if (mvhd[0] === 1) {
info.timestampScale = bytesToNumber(mvhd.subarray(20, 24));
info.duration = bytesToNumber(mvhd.subarray(24, 32));
} else {
info.timestampScale = bytesToNumber(mvhd.subarray(12, 16));
info.duration = bytesToNumber(mvhd.subarray(16, 20));
}
info.bytes = mvhd;
return info;
};

109
node_modules/@videojs/vhs-utils/src/nal-helpers.js generated vendored Normal file
View file

@ -0,0 +1,109 @@
import {bytesMatch, toUint8} from './byte-helpers.js';
export const NAL_TYPE_ONE = toUint8([0x00, 0x00, 0x00, 0x01]);
export const NAL_TYPE_TWO = toUint8([0x00, 0x00, 0x01]);
export const EMULATION_PREVENTION = toUint8([0x00, 0x00, 0x03]);
/**
* Expunge any "Emulation Prevention" bytes from a "Raw Byte
* Sequence Payload"
*
* @param data {Uint8Array} the bytes of a RBSP from a NAL
* unit
* @return {Uint8Array} the RBSP without any Emulation
* Prevention Bytes
*/
export const discardEmulationPreventionBytes = function(bytes) {
const positions = [];
let i = 1;
// Find all `Emulation Prevention Bytes`
while (i < bytes.length - 2) {
if (bytesMatch(bytes.subarray(i, i + 3), EMULATION_PREVENTION)) {
positions.push(i + 2);
i++;
}
i++;
}
// If no Emulation Prevention Bytes were found just return the original
// array
if (positions.length === 0) {
return bytes;
}
// Create a new array to hold the NAL unit data
const newLength = bytes.length - positions.length;
const newData = new Uint8Array(newLength);
let sourceIndex = 0;
for (i = 0; i < newLength; sourceIndex++, i++) {
if (sourceIndex === positions[0]) {
// Skip this byte
sourceIndex++;
// Remove this position index
positions.shift();
}
newData[i] = bytes[sourceIndex];
}
return newData;
};
export const findNal = function(bytes, dataType, types, nalLimit = Infinity) {
bytes = toUint8(bytes);
types = [].concat(types);
let i = 0;
let nalStart;
let nalsFound = 0;
// keep searching until:
// we reach the end of bytes
// we reach the maximum number of nals they want to seach
// NOTE: that we disregard nalLimit when we have found the start
// of the nal we want so that we can find the end of the nal we want.
while (i < bytes.length && (nalsFound < nalLimit || nalStart)) {
let nalOffset;
if (bytesMatch(bytes.subarray(i), NAL_TYPE_ONE)) {
nalOffset = 4;
} else if (bytesMatch(bytes.subarray(i), NAL_TYPE_TWO)) {
nalOffset = 3;
}
// we are unsynced,
// find the next nal unit
if (!nalOffset) {
i++;
continue;
}
nalsFound++;
if (nalStart) {
return discardEmulationPreventionBytes(bytes.subarray(nalStart, i));
}
let nalType;
if (dataType === 'h264') {
nalType = (bytes[i + nalOffset] & 0x1f);
} else if (dataType === 'h265') {
nalType = (bytes[i + nalOffset] >> 1) & 0x3f;
}
if (types.indexOf(nalType) !== -1) {
nalStart = i + nalOffset;
}
// nal header is 1 length for h264, and 2 for h265
i += nalOffset + (dataType === 'h264' ? 1 : 2);
}
return bytes.subarray(0, 0);
};
export const findH264Nal = (bytes, type, nalLimit) => findNal(bytes, 'h264', type, nalLimit);
export const findH265Nal = (bytes, type, nalLimit) => findNal(bytes, 'h265', type, nalLimit);

27
node_modules/@videojs/vhs-utils/src/ogg-helpers.js generated vendored Normal file
View file

@ -0,0 +1,27 @@
import {bytesMatch, toUint8} from './byte-helpers';
const SYNC_WORD = toUint8([0x4f, 0x67, 0x67, 0x53]);
export const getPages = function(bytes, start, end = Infinity) {
bytes = toUint8(bytes);
const pages = [];
let i = 0;
while (i < bytes.length && pages.length < end) {
// we are unsynced,
// find the next syncword
if (!bytesMatch(bytes, SYNC_WORD, {offset: i})) {
i++;
continue;
}
const segmentLength = bytes[i + 27];
pages.push(bytes.subarray(i, i + 28 + segmentLength));
i += pages[pages.length - 1].length;
}
return pages.slice(start, end);
};

61
node_modules/@videojs/vhs-utils/src/opus-helpers.js generated vendored Normal file
View file

@ -0,0 +1,61 @@
export const OPUS_HEAD = new Uint8Array([
// O, p, u, s
0x4f, 0x70, 0x75, 0x73,
// H, e, a, d
0x48, 0x65, 0x61, 0x64
]);
// https://wiki.xiph.org/OggOpus
// https://vfrmaniac.fushizen.eu/contents/opus_in_isobmff.html
// https://opus-codec.org/docs/opusfile_api-0.7/structOpusHead.html
export const parseOpusHead = function(bytes) {
const view = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
const version = view.getUint8(0);
// version 0, from mp4, does not use littleEndian.
const littleEndian = version !== 0;
const config = {
version,
channels: view.getUint8(1),
preSkip: view.getUint16(2, littleEndian),
sampleRate: view.getUint32(4, littleEndian),
outputGain: view.getUint16(8, littleEndian),
channelMappingFamily: view.getUint8(10)
};
if (config.channelMappingFamily > 0 && bytes.length > 10) {
config.streamCount = view.getUint8(11);
config.twoChannelStreamCount = view.getUint8(12);
config.channelMapping = [];
for (let c = 0; c < config.channels; c++) {
config.channelMapping.push(view.getUint8(13 + c));
}
}
return config;
};
export const setOpusHead = function(config) {
const size = config.channelMappingFamily <= 0 ? 11 : (12 + config.channels);
const view = new DataView(new ArrayBuffer(size));
const littleEndian = config.version !== 0;
view.setUint8(0, config.version);
view.setUint8(1, config.channels);
view.setUint16(2, config.preSkip, littleEndian);
view.setUint32(4, config.sampleRate, littleEndian);
view.setUint16(8, config.outputGain, littleEndian);
view.setUint8(10, config.channelMappingFamily);
if (config.channelMappingFamily > 0) {
view.setUint8(11, config.streamCount);
config.channelMapping.foreach(function(cm, i) {
view.setUint8(12 + i, cm);
});
}
return new Uint8Array(view.buffer);
};

51
node_modules/@videojs/vhs-utils/src/resolve-url.js generated vendored Normal file
View file

@ -0,0 +1,51 @@
import URLToolkit from 'url-toolkit';
import window from 'global/window';
const DEFAULT_LOCATION = 'http://example.com';
const resolveUrl = (baseUrl, relativeUrl) => {
// return early if we don't need to resolve
if ((/^[a-z]+:/i).test(relativeUrl)) {
return relativeUrl;
}
// if baseUrl is a data URI, ignore it and resolve everything relative to window.location
if ((/^data:/).test(baseUrl)) {
baseUrl = window.location && window.location.href || '';
}
// IE11 supports URL but not the URL constructor
// feature detect the behavior we want
const nativeURL = typeof window.URL === 'function';
const protocolLess = (/^\/\//.test(baseUrl));
// remove location if window.location isn't available (i.e. we're in node)
// and if baseUrl isn't an absolute url
const removeLocation = !window.location && !(/\/\//i).test(baseUrl);
// if the base URL is relative then combine with the current location
if (nativeURL) {
baseUrl = new window.URL(baseUrl, window.location || DEFAULT_LOCATION);
} else if (!(/\/\//i).test(baseUrl)) {
baseUrl = URLToolkit.buildAbsoluteURL(window.location && window.location.href || '', baseUrl);
}
if (nativeURL) {
const newUrl = new URL(relativeUrl, baseUrl);
// if we're a protocol-less url, remove the protocol
// and if we're location-less, remove the location
// otherwise, return the url unmodified
if (removeLocation) {
return newUrl.href.slice(DEFAULT_LOCATION.length);
} else if (protocolLess) {
return newUrl.href.slice(newUrl.protocol.length);
}
return newUrl.href;
}
return URLToolkit.buildAbsoluteURL(baseUrl, relativeUrl);
};
export default resolveUrl;

75
node_modules/@videojs/vhs-utils/src/riff-helpers.js generated vendored Normal file
View file

@ -0,0 +1,75 @@
import {toUint8, stringToBytes, bytesMatch} from './byte-helpers.js';
const CONSTANTS = {
LIST: toUint8([0x4c, 0x49, 0x53, 0x54]),
RIFF: toUint8([0x52, 0x49, 0x46, 0x46]),
WAVE: toUint8([0x57, 0x41, 0x56, 0x45])
};
const normalizePath = function(path) {
if (typeof path === 'string') {
return stringToBytes(path);
}
if (typeof path === 'number') {
return path;
}
return path;
};
const normalizePaths = function(paths) {
if (!Array.isArray(paths)) {
return [normalizePath(paths)];
}
return paths.map((p) => normalizePath(p));
};
export const findFourCC = function(bytes, paths) {
paths = normalizePaths(paths);
bytes = toUint8(bytes);
let results = [];
if (!paths.length) {
// short-circuit the search for empty paths
return results;
}
let i = 0;
while (i < bytes.length) {
let type = bytes.subarray(i, i + 4);
let size = ((bytes[i + 7] << 24 | bytes[i + 6] << 16 | bytes[i + 5] << 8 | bytes[i + 4]) >>> 0);
// skip LIST/RIFF and get the actual type
if (bytesMatch(type, CONSTANTS.LIST) || bytesMatch(type, CONSTANTS.RIFF) || bytesMatch(type, CONSTANTS.WAVE)) {
type = bytes.subarray(i + 8, i + 12);
i += 4;
size -= 4;
}
const data = bytes.subarray(i + 8, i + 8 + size);
if (bytesMatch(type, paths[0])) {
if (paths.length === 1) {
// this is the end of the path and we've found the box we were
// looking for
results.push(data);
} else {
// recursively search for the next box along the path
const subresults = findFourCC(data, paths.slice(1));
if (subresults.length) {
results = results.concat(subresults);
}
}
}
i += 8 + data.length;
}
// we've finished searching all of bytes
return results;
};

108
node_modules/@videojs/vhs-utils/src/stream.js generated vendored Normal file
View file

@ -0,0 +1,108 @@
/**
* @file stream.js
*/
/**
* A lightweight readable stream implemention that handles event dispatching.
*
* @class Stream
*/
export default class Stream {
constructor() {
this.listeners = {};
}
/**
* Add a listener for a specified event type.
*
* @param {string} type the event name
* @param {Function} listener the callback to be invoked when an event of
* the specified type occurs
*/
on(type, listener) {
if (!this.listeners[type]) {
this.listeners[type] = [];
}
this.listeners[type].push(listener);
}
/**
* Remove a listener for a specified event type.
*
* @param {string} type the event name
* @param {Function} listener a function previously registered for this
* type of event through `on`
* @return {boolean} if we could turn it off or not
*/
off(type, listener) {
if (!this.listeners[type]) {
return false;
}
const index = this.listeners[type].indexOf(listener);
// TODO: which is better?
// In Video.js we slice listener functions
// on trigger so that it does not mess up the order
// while we loop through.
//
// Here we slice on off so that the loop in trigger
// can continue using it's old reference to loop without
// messing up the order.
this.listeners[type] = this.listeners[type].slice(0);
this.listeners[type].splice(index, 1);
return index > -1;
}
/**
* Trigger an event of the specified type on this stream. Any additional
* arguments to this function are passed as parameters to event listeners.
*
* @param {string} type the event name
*/
trigger(type) {
const callbacks = this.listeners[type];
if (!callbacks) {
return;
}
// Slicing the arguments on every invocation of this method
// can add a significant amount of overhead. Avoid the
// intermediate object creation for the common case of a
// single callback argument
if (arguments.length === 2) {
const length = callbacks.length;
for (let i = 0; i < length; ++i) {
callbacks[i].call(this, arguments[1]);
}
} else {
const args = Array.prototype.slice.call(arguments, 1);
const length = callbacks.length;
for (let i = 0; i < length; ++i) {
callbacks[i].apply(this, args);
}
}
}
/**
* Destroys the stream and cleans up.
*/
dispose() {
this.listeners = {};
}
/**
* Forwards all `data` events on this stream to the destination stream. The
* destination stream should provide a method `push` to receive the data
* events as they arrive.
*
* @param {Stream} destination the stream that will receive all `data` events
* @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
*/
pipe(destination) {
this.on('data', function(data) {
destination.push(data);
});
}
}