First release

This commit is contained in:
Owen Quinlan 2021-07-02 19:29:34 +10:00
commit fa6c85266e
2339 changed files with 761050 additions and 0 deletions

189
node_modules/mux.js/cjs/tools/caption-packet-parser.js generated vendored Normal file
View file

@ -0,0 +1,189 @@
/**
* mux.js
*
* Copyright (c) Brightcove
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
*
* Reads in-band caption information from a video elementary
* stream. Captions must follow the CEA-708 standard for injection
* into an MPEG-2 transport streams.
* @see https://en.wikipedia.org/wiki/CEA-708
* @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf
*/
'use strict'; // Supplemental enhancement information (SEI) NAL units have a
// payload type field to indicate how they are to be
// interpreted. CEAS-708 caption content is always transmitted with
// payload type 0x04.
var USER_DATA_REGISTERED_ITU_T_T35 = 4,
RBSP_TRAILING_BITS = 128;
/**
* Parse a supplemental enhancement information (SEI) NAL unit.
* Stops parsing once a message of type ITU T T35 has been found.
*
* @param bytes {Uint8Array} the bytes of a SEI NAL unit
* @return {object} the parsed SEI payload
* @see Rec. ITU-T H.264, 7.3.2.3.1
*/
var parseSei = function parseSei(bytes) {
var i = 0,
result = {
payloadType: -1,
payloadSize: 0
},
payloadType = 0,
payloadSize = 0; // go through the sei_rbsp parsing each each individual sei_message
while (i < bytes.byteLength) {
// stop once we have hit the end of the sei_rbsp
if (bytes[i] === RBSP_TRAILING_BITS) {
break;
} // Parse payload type
while (bytes[i] === 0xFF) {
payloadType += 255;
i++;
}
payloadType += bytes[i++]; // Parse payload size
while (bytes[i] === 0xFF) {
payloadSize += 255;
i++;
}
payloadSize += bytes[i++]; // this sei_message is a 608/708 caption so save it and break
// there can only ever be one caption message in a frame's sei
if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
var userIdentifier = String.fromCharCode(bytes[i + 3], bytes[i + 4], bytes[i + 5], bytes[i + 6]);
if (userIdentifier === 'GA94') {
result.payloadType = payloadType;
result.payloadSize = payloadSize;
result.payload = bytes.subarray(i, i + payloadSize);
break;
} else {
result.payload = void 0;
}
} // skip the payload and parse the next message
i += payloadSize;
payloadType = 0;
payloadSize = 0;
}
return result;
}; // see ANSI/SCTE 128-1 (2013), section 8.1
var parseUserData = function parseUserData(sei) {
// itu_t_t35_contry_code must be 181 (United States) for
// captions
if (sei.payload[0] !== 181) {
return null;
} // itu_t_t35_provider_code should be 49 (ATSC) for captions
if ((sei.payload[1] << 8 | sei.payload[2]) !== 49) {
return null;
} // the user_identifier should be "GA94" to indicate ATSC1 data
if (String.fromCharCode(sei.payload[3], sei.payload[4], sei.payload[5], sei.payload[6]) !== 'GA94') {
return null;
} // finally, user_data_type_code should be 0x03 for caption data
if (sei.payload[7] !== 0x03) {
return null;
} // return the user_data_type_structure and strip the trailing
// marker bits
return sei.payload.subarray(8, sei.payload.length - 1);
}; // see CEA-708-D, section 4.4
var parseCaptionPackets = function parseCaptionPackets(pts, userData) {
var results = [],
i,
count,
offset,
data; // if this is just filler, return immediately
if (!(userData[0] & 0x40)) {
return results;
} // parse out the cc_data_1 and cc_data_2 fields
count = userData[0] & 0x1f;
for (i = 0; i < count; i++) {
offset = i * 3;
data = {
type: userData[offset + 2] & 0x03,
pts: pts
}; // capture cc data when cc_valid is 1
if (userData[offset + 2] & 0x04) {
data.ccData = userData[offset + 3] << 8 | userData[offset + 4];
results.push(data);
}
}
return results;
};
var discardEmulationPreventionBytes = function discardEmulationPreventionBytes(data) {
var length = data.byteLength,
emulationPreventionBytesPositions = [],
i = 1,
newLength,
newData; // Find all `Emulation Prevention Bytes`
while (i < length - 2) {
if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
emulationPreventionBytesPositions.push(i + 2);
i += 2;
} else {
i++;
}
} // If no Emulation Prevention Bytes were found just return the original
// array
if (emulationPreventionBytesPositions.length === 0) {
return data;
} // Create a new array to hold the NAL unit data
newLength = length - emulationPreventionBytesPositions.length;
newData = new Uint8Array(newLength);
var sourceIndex = 0;
for (i = 0; i < newLength; sourceIndex++, i++) {
if (sourceIndex === emulationPreventionBytesPositions[0]) {
// Skip this byte
sourceIndex++; // Remove this position index
emulationPreventionBytesPositions.shift();
}
newData[i] = data[sourceIndex];
}
return newData;
}; // exports
module.exports = {
parseSei: parseSei,
parseUserData: parseUserData,
parseCaptionPackets: parseCaptionPackets,
discardEmulationPreventionBytes: discardEmulationPreventionBytes,
USER_DATA_REGISTERED_ITU_T_T35: USER_DATA_REGISTERED_ITU_T_T35
};

134
node_modules/mux.js/cjs/tools/flv-inspector.js generated vendored Normal file
View file

@ -0,0 +1,134 @@
/**
* mux.js
*
* Copyright (c) Brightcove
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
*/
'use strict';
var tagTypes = {
0x08: 'audio',
0x09: 'video',
0x12: 'metadata'
},
hex = function hex(val) {
return '0x' + ('00' + val.toString(16)).slice(-2).toUpperCase();
},
hexStringList = function hexStringList(data) {
var arr = [],
i;
while (data.byteLength > 0) {
i = 0;
arr.push(hex(data[i++]));
data = data.subarray(i);
}
return arr.join(' ');
},
parseAVCTag = function parseAVCTag(tag, obj) {
var avcPacketTypes = ['AVC Sequence Header', 'AVC NALU', 'AVC End-of-Sequence'],
compositionTime = tag[1] & parseInt('01111111', 2) << 16 | tag[2] << 8 | tag[3];
obj = obj || {};
obj.avcPacketType = avcPacketTypes[tag[0]];
obj.CompositionTime = tag[1] & parseInt('10000000', 2) ? -compositionTime : compositionTime;
if (tag[0] === 1) {
obj.nalUnitTypeRaw = hexStringList(tag.subarray(4, 100));
} else {
obj.data = hexStringList(tag.subarray(4));
}
return obj;
},
parseVideoTag = function parseVideoTag(tag, obj) {
var frameTypes = ['Unknown', 'Keyframe (for AVC, a seekable frame)', 'Inter frame (for AVC, a nonseekable frame)', 'Disposable inter frame (H.263 only)', 'Generated keyframe (reserved for server use only)', 'Video info/command frame'],
codecID = tag[0] & parseInt('00001111', 2);
obj = obj || {};
obj.frameType = frameTypes[(tag[0] & parseInt('11110000', 2)) >>> 4];
obj.codecID = codecID;
if (codecID === 7) {
return parseAVCTag(tag.subarray(1), obj);
}
return obj;
},
parseAACTag = function parseAACTag(tag, obj) {
var packetTypes = ['AAC Sequence Header', 'AAC Raw'];
obj = obj || {};
obj.aacPacketType = packetTypes[tag[0]];
obj.data = hexStringList(tag.subarray(1));
return obj;
},
parseAudioTag = function parseAudioTag(tag, obj) {
var formatTable = ['Linear PCM, platform endian', 'ADPCM', 'MP3', 'Linear PCM, little endian', 'Nellymoser 16-kHz mono', 'Nellymoser 8-kHz mono', 'Nellymoser', 'G.711 A-law logarithmic PCM', 'G.711 mu-law logarithmic PCM', 'reserved', 'AAC', 'Speex', 'MP3 8-Khz', 'Device-specific sound'],
samplingRateTable = ['5.5-kHz', '11-kHz', '22-kHz', '44-kHz'],
soundFormat = (tag[0] & parseInt('11110000', 2)) >>> 4;
obj = obj || {};
obj.soundFormat = formatTable[soundFormat];
obj.soundRate = samplingRateTable[(tag[0] & parseInt('00001100', 2)) >>> 2];
obj.soundSize = (tag[0] & parseInt('00000010', 2)) >>> 1 ? '16-bit' : '8-bit';
obj.soundType = tag[0] & parseInt('00000001', 2) ? 'Stereo' : 'Mono';
if (soundFormat === 10) {
return parseAACTag(tag.subarray(1), obj);
}
return obj;
},
parseGenericTag = function parseGenericTag(tag) {
return {
tagType: tagTypes[tag[0]],
dataSize: tag[1] << 16 | tag[2] << 8 | tag[3],
timestamp: tag[7] << 24 | tag[4] << 16 | tag[5] << 8 | tag[6],
streamID: tag[8] << 16 | tag[9] << 8 | tag[10]
};
},
inspectFlvTag = function inspectFlvTag(tag) {
var header = parseGenericTag(tag);
switch (tag[0]) {
case 0x08:
parseAudioTag(tag.subarray(11), header);
break;
case 0x09:
parseVideoTag(tag.subarray(11), header);
break;
case 0x12:
}
return header;
},
inspectFlv = function inspectFlv(bytes) {
var i = 9,
// header
dataSize,
parsedResults = [],
tag; // traverse the tags
i += 4; // skip previous tag size
while (i < bytes.byteLength) {
dataSize = bytes[i + 1] << 16;
dataSize |= bytes[i + 2] << 8;
dataSize |= bytes[i + 3];
dataSize += 11;
tag = bytes.subarray(i, i + dataSize);
parsedResults.push(inspectFlvTag(tag));
i += dataSize + 4;
}
return parsedResults;
},
textifyFlv = function textifyFlv(flvTagArray) {
return JSON.stringify(flvTagArray, null, 2);
};
module.exports = {
inspectTag: inspectFlvTag,
inspect: inspectFlv,
textify: textifyFlv
};

753
node_modules/mux.js/cjs/tools/mp4-inspector.js generated vendored Normal file
View file

@ -0,0 +1,753 @@
/**
* mux.js
*
* Copyright (c) Brightcove
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
*
* Parse the internal MP4 structure into an equivalent javascript
* object.
*/
'use strict';
var MAX_UINT32 = Math.pow(2, 32);
var inspectMp4,
_textifyMp,
parseMp4Date = function parseMp4Date(seconds) {
return new Date(seconds * 1000 - 2082844800000);
},
parseType = require('../mp4/parse-type'),
findBox = require('../mp4/find-box'),
nalParse = function nalParse(avcStream) {
var avcView = new DataView(avcStream.buffer, avcStream.byteOffset, avcStream.byteLength),
result = [],
i,
length;
for (i = 0; i + 4 < avcStream.length; i += length) {
length = avcView.getUint32(i);
i += 4; // bail if this doesn't appear to be an H264 stream
if (length <= 0) {
result.push('<span style=\'color:red;\'>MALFORMED DATA</span>');
continue;
}
switch (avcStream[i] & 0x1F) {
case 0x01:
result.push('slice_layer_without_partitioning_rbsp');
break;
case 0x05:
result.push('slice_layer_without_partitioning_rbsp_idr');
break;
case 0x06:
result.push('sei_rbsp');
break;
case 0x07:
result.push('seq_parameter_set_rbsp');
break;
case 0x08:
result.push('pic_parameter_set_rbsp');
break;
case 0x09:
result.push('access_unit_delimiter_rbsp');
break;
default:
result.push('UNKNOWN NAL - ' + avcStream[i] & 0x1F);
break;
}
}
return result;
},
// registry of handlers for individual mp4 box types
parse = {
// codingname, not a first-class box type. stsd entries share the
// same format as real boxes so the parsing infrastructure can be
// shared
avc1: function avc1(data) {
var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
return {
dataReferenceIndex: view.getUint16(6),
width: view.getUint16(24),
height: view.getUint16(26),
horizresolution: view.getUint16(28) + view.getUint16(30) / 16,
vertresolution: view.getUint16(32) + view.getUint16(34) / 16,
frameCount: view.getUint16(40),
depth: view.getUint16(74),
config: inspectMp4(data.subarray(78, data.byteLength))
};
},
avcC: function avcC(data) {
var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
result = {
configurationVersion: data[0],
avcProfileIndication: data[1],
profileCompatibility: data[2],
avcLevelIndication: data[3],
lengthSizeMinusOne: data[4] & 0x03,
sps: [],
pps: []
},
numOfSequenceParameterSets = data[5] & 0x1f,
numOfPictureParameterSets,
nalSize,
offset,
i; // iterate past any SPSs
offset = 6;
for (i = 0; i < numOfSequenceParameterSets; i++) {
nalSize = view.getUint16(offset);
offset += 2;
result.sps.push(new Uint8Array(data.subarray(offset, offset + nalSize)));
offset += nalSize;
} // iterate past any PPSs
numOfPictureParameterSets = data[offset];
offset++;
for (i = 0; i < numOfPictureParameterSets; i++) {
nalSize = view.getUint16(offset);
offset += 2;
result.pps.push(new Uint8Array(data.subarray(offset, offset + nalSize)));
offset += nalSize;
}
return result;
},
btrt: function btrt(data) {
var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
return {
bufferSizeDB: view.getUint32(0),
maxBitrate: view.getUint32(4),
avgBitrate: view.getUint32(8)
};
},
edts: function edts(data) {
return {
boxes: inspectMp4(data)
};
},
elst: function elst(data) {
var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
result = {
version: view.getUint8(0),
flags: new Uint8Array(data.subarray(1, 4)),
edits: []
},
entryCount = view.getUint32(4),
i;
for (i = 8; entryCount; entryCount--) {
if (result.version === 0) {
result.edits.push({
segmentDuration: view.getUint32(i),
mediaTime: view.getInt32(i + 4),
mediaRate: view.getUint16(i + 8) + view.getUint16(i + 10) / (256 * 256)
});
i += 12;
} else {
result.edits.push({
segmentDuration: view.getUint32(i) * MAX_UINT32 + view.getUint32(i + 4),
mediaTime: view.getUint32(i + 8) * MAX_UINT32 + view.getUint32(i + 12),
mediaRate: view.getUint16(i + 16) + view.getUint16(i + 18) / (256 * 256)
});
i += 20;
}
}
return result;
},
esds: function esds(data) {
return {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
esId: data[6] << 8 | data[7],
streamPriority: data[8] & 0x1f,
decoderConfig: {
objectProfileIndication: data[11],
streamType: data[12] >>> 2 & 0x3f,
bufferSize: data[13] << 16 | data[14] << 8 | data[15],
maxBitrate: data[16] << 24 | data[17] << 16 | data[18] << 8 | data[19],
avgBitrate: data[20] << 24 | data[21] << 16 | data[22] << 8 | data[23],
decoderConfigDescriptor: {
tag: data[24],
length: data[25],
audioObjectType: data[26] >>> 3 & 0x1f,
samplingFrequencyIndex: (data[26] & 0x07) << 1 | data[27] >>> 7 & 0x01,
channelConfiguration: data[27] >>> 3 & 0x0f
}
}
};
},
ftyp: function ftyp(data) {
var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
result = {
majorBrand: parseType(data.subarray(0, 4)),
minorVersion: view.getUint32(4),
compatibleBrands: []
},
i = 8;
while (i < data.byteLength) {
result.compatibleBrands.push(parseType(data.subarray(i, i + 4)));
i += 4;
}
return result;
},
dinf: function dinf(data) {
return {
boxes: inspectMp4(data)
};
},
dref: function dref(data) {
return {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
dataReferences: inspectMp4(data.subarray(8))
};
},
hdlr: function hdlr(data) {
var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
result = {
version: view.getUint8(0),
flags: new Uint8Array(data.subarray(1, 4)),
handlerType: parseType(data.subarray(8, 12)),
name: ''
},
i = 8; // parse out the name field
for (i = 24; i < data.byteLength; i++) {
if (data[i] === 0x00) {
// the name field is null-terminated
i++;
break;
}
result.name += String.fromCharCode(data[i]);
} // decode UTF-8 to javascript's internal representation
// see http://ecmanaut.blogspot.com/2006/07/encoding-decoding-utf8-in-javascript.html
result.name = decodeURIComponent(escape(result.name));
return result;
},
mdat: function mdat(data) {
return {
byteLength: data.byteLength,
nals: nalParse(data)
};
},
mdhd: function mdhd(data) {
var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
i = 4,
language,
result = {
version: view.getUint8(0),
flags: new Uint8Array(data.subarray(1, 4)),
language: ''
};
if (result.version === 1) {
i += 4;
result.creationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
i += 8;
result.modificationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
i += 4;
result.timescale = view.getUint32(i);
i += 8;
result.duration = view.getUint32(i); // truncating top 4 bytes
} else {
result.creationTime = parseMp4Date(view.getUint32(i));
i += 4;
result.modificationTime = parseMp4Date(view.getUint32(i));
i += 4;
result.timescale = view.getUint32(i);
i += 4;
result.duration = view.getUint32(i);
}
i += 4; // language is stored as an ISO-639-2/T code in an array of three 5-bit fields
// each field is the packed difference between its ASCII value and 0x60
language = view.getUint16(i);
result.language += String.fromCharCode((language >> 10) + 0x60);
result.language += String.fromCharCode(((language & 0x03e0) >> 5) + 0x60);
result.language += String.fromCharCode((language & 0x1f) + 0x60);
return result;
},
mdia: function mdia(data) {
return {
boxes: inspectMp4(data)
};
},
mfhd: function mfhd(data) {
return {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
sequenceNumber: data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7]
};
},
minf: function minf(data) {
return {
boxes: inspectMp4(data)
};
},
// codingname, not a first-class box type. stsd entries share the
// same format as real boxes so the parsing infrastructure can be
// shared
mp4a: function mp4a(data) {
var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
result = {
// 6 bytes reserved
dataReferenceIndex: view.getUint16(6),
// 4 + 4 bytes reserved
channelcount: view.getUint16(16),
samplesize: view.getUint16(18),
// 2 bytes pre_defined
// 2 bytes reserved
samplerate: view.getUint16(24) + view.getUint16(26) / 65536
}; // if there are more bytes to process, assume this is an ISO/IEC
// 14496-14 MP4AudioSampleEntry and parse the ESDBox
if (data.byteLength > 28) {
result.streamDescriptor = inspectMp4(data.subarray(28))[0];
}
return result;
},
moof: function moof(data) {
return {
boxes: inspectMp4(data)
};
},
moov: function moov(data) {
return {
boxes: inspectMp4(data)
};
},
mvex: function mvex(data) {
return {
boxes: inspectMp4(data)
};
},
mvhd: function mvhd(data) {
var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
i = 4,
result = {
version: view.getUint8(0),
flags: new Uint8Array(data.subarray(1, 4))
};
if (result.version === 1) {
i += 4;
result.creationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
i += 8;
result.modificationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
i += 4;
result.timescale = view.getUint32(i);
i += 8;
result.duration = view.getUint32(i); // truncating top 4 bytes
} else {
result.creationTime = parseMp4Date(view.getUint32(i));
i += 4;
result.modificationTime = parseMp4Date(view.getUint32(i));
i += 4;
result.timescale = view.getUint32(i);
i += 4;
result.duration = view.getUint32(i);
}
i += 4; // convert fixed-point, base 16 back to a number
result.rate = view.getUint16(i) + view.getUint16(i + 2) / 16;
i += 4;
result.volume = view.getUint8(i) + view.getUint8(i + 1) / 8;
i += 2;
i += 2;
i += 2 * 4;
result.matrix = new Uint32Array(data.subarray(i, i + 9 * 4));
i += 9 * 4;
i += 6 * 4;
result.nextTrackId = view.getUint32(i);
return result;
},
pdin: function pdin(data) {
var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
return {
version: view.getUint8(0),
flags: new Uint8Array(data.subarray(1, 4)),
rate: view.getUint32(4),
initialDelay: view.getUint32(8)
};
},
sdtp: function sdtp(data) {
var result = {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
samples: []
},
i;
for (i = 4; i < data.byteLength; i++) {
result.samples.push({
dependsOn: (data[i] & 0x30) >> 4,
isDependedOn: (data[i] & 0x0c) >> 2,
hasRedundancy: data[i] & 0x03
});
}
return result;
},
sidx: require('./parse-sidx.js'),
smhd: function smhd(data) {
return {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
balance: data[4] + data[5] / 256
};
},
stbl: function stbl(data) {
return {
boxes: inspectMp4(data)
};
},
ctts: function ctts(data) {
var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
result = {
version: view.getUint8(0),
flags: new Uint8Array(data.subarray(1, 4)),
compositionOffsets: []
},
entryCount = view.getUint32(4),
i;
for (i = 8; entryCount; i += 8, entryCount--) {
result.compositionOffsets.push({
sampleCount: view.getUint32(i),
sampleOffset: view[result.version === 0 ? 'getUint32' : 'getInt32'](i + 4)
});
}
return result;
},
stss: function stss(data) {
var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
result = {
version: view.getUint8(0),
flags: new Uint8Array(data.subarray(1, 4)),
syncSamples: []
},
entryCount = view.getUint32(4),
i;
for (i = 8; entryCount; i += 4, entryCount--) {
result.syncSamples.push(view.getUint32(i));
}
return result;
},
stco: function stco(data) {
var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
result = {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
chunkOffsets: []
},
entryCount = view.getUint32(4),
i;
for (i = 8; entryCount; i += 4, entryCount--) {
result.chunkOffsets.push(view.getUint32(i));
}
return result;
},
stsc: function stsc(data) {
var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
entryCount = view.getUint32(4),
result = {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
sampleToChunks: []
},
i;
for (i = 8; entryCount; i += 12, entryCount--) {
result.sampleToChunks.push({
firstChunk: view.getUint32(i),
samplesPerChunk: view.getUint32(i + 4),
sampleDescriptionIndex: view.getUint32(i + 8)
});
}
return result;
},
stsd: function stsd(data) {
return {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
sampleDescriptions: inspectMp4(data.subarray(8))
};
},
stsz: function stsz(data) {
var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
result = {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
sampleSize: view.getUint32(4),
entries: []
},
i;
for (i = 12; i < data.byteLength; i += 4) {
result.entries.push(view.getUint32(i));
}
return result;
},
stts: function stts(data) {
var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
result = {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
timeToSamples: []
},
entryCount = view.getUint32(4),
i;
for (i = 8; entryCount; i += 8, entryCount--) {
result.timeToSamples.push({
sampleCount: view.getUint32(i),
sampleDelta: view.getUint32(i + 4)
});
}
return result;
},
styp: function styp(data) {
return parse.ftyp(data);
},
tfdt: require('./parse-tfdt.js'),
tfhd: require('./parse-tfhd.js'),
tkhd: function tkhd(data) {
var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
i = 4,
result = {
version: view.getUint8(0),
flags: new Uint8Array(data.subarray(1, 4))
};
if (result.version === 1) {
i += 4;
result.creationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
i += 8;
result.modificationTime = parseMp4Date(view.getUint32(i)); // truncating top 4 bytes
i += 4;
result.trackId = view.getUint32(i);
i += 4;
i += 8;
result.duration = view.getUint32(i); // truncating top 4 bytes
} else {
result.creationTime = parseMp4Date(view.getUint32(i));
i += 4;
result.modificationTime = parseMp4Date(view.getUint32(i));
i += 4;
result.trackId = view.getUint32(i);
i += 4;
i += 4;
result.duration = view.getUint32(i);
}
i += 4;
i += 2 * 4;
result.layer = view.getUint16(i);
i += 2;
result.alternateGroup = view.getUint16(i);
i += 2; // convert fixed-point, base 16 back to a number
result.volume = view.getUint8(i) + view.getUint8(i + 1) / 8;
i += 2;
i += 2;
result.matrix = new Uint32Array(data.subarray(i, i + 9 * 4));
i += 9 * 4;
result.width = view.getUint16(i) + view.getUint16(i + 2) / 65536;
i += 4;
result.height = view.getUint16(i) + view.getUint16(i + 2) / 65536;
return result;
},
traf: function traf(data) {
return {
boxes: inspectMp4(data)
};
},
trak: function trak(data) {
return {
boxes: inspectMp4(data)
};
},
trex: function trex(data) {
var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
return {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
trackId: view.getUint32(4),
defaultSampleDescriptionIndex: view.getUint32(8),
defaultSampleDuration: view.getUint32(12),
defaultSampleSize: view.getUint32(16),
sampleDependsOn: data[20] & 0x03,
sampleIsDependedOn: (data[21] & 0xc0) >> 6,
sampleHasRedundancy: (data[21] & 0x30) >> 4,
samplePaddingValue: (data[21] & 0x0e) >> 1,
sampleIsDifferenceSample: !!(data[21] & 0x01),
sampleDegradationPriority: view.getUint16(22)
};
},
trun: require('./parse-trun.js'),
'url ': function url(data) {
return {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4))
};
},
vmhd: function vmhd(data) {
var view = new DataView(data.buffer, data.byteOffset, data.byteLength);
return {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
graphicsmode: view.getUint16(4),
opcolor: new Uint16Array([view.getUint16(6), view.getUint16(8), view.getUint16(10)])
};
}
};
/**
* Return a javascript array of box objects parsed from an ISO base
* media file.
* @param data {Uint8Array} the binary data of the media to be inspected
* @return {array} a javascript array of potentially nested box objects
*/
inspectMp4 = function inspectMp4(data) {
var i = 0,
result = [],
view,
size,
type,
end,
box; // Convert data from Uint8Array to ArrayBuffer, to follow Dataview API
var ab = new ArrayBuffer(data.length);
var v = new Uint8Array(ab);
for (var z = 0; z < data.length; ++z) {
v[z] = data[z];
}
view = new DataView(ab);
while (i < data.byteLength) {
// parse box data
size = view.getUint32(i);
type = parseType(data.subarray(i + 4, i + 8));
end = size > 1 ? i + size : data.byteLength; // parse type-specific data
box = (parse[type] || function (data) {
return {
data: data
};
})(data.subarray(i + 8, end));
box.size = size;
box.type = type; // store this box and move to the next
result.push(box);
i = end;
}
return result;
};
/**
* Returns a textual representation of the javascript represtentation
* of an MP4 file. You can use it as an alternative to
* JSON.stringify() to compare inspected MP4s.
* @param inspectedMp4 {array} the parsed array of boxes in an MP4
* file
* @param depth {number} (optional) the number of ancestor boxes of
* the elements of inspectedMp4. Assumed to be zero if unspecified.
* @return {string} a text representation of the parsed MP4
*/
_textifyMp = function textifyMp4(inspectedMp4, depth) {
var indent;
depth = depth || 0;
indent = new Array(depth * 2 + 1).join(' '); // iterate over all the boxes
return inspectedMp4.map(function (box, index) {
// list the box type first at the current indentation level
return indent + box.type + '\n' + // the type is already included and handle child boxes separately
Object.keys(box).filter(function (key) {
return key !== 'type' && key !== 'boxes'; // output all the box properties
}).map(function (key) {
var prefix = indent + ' ' + key + ': ',
value = box[key]; // print out raw bytes as hexademical
if (value instanceof Uint8Array || value instanceof Uint32Array) {
var bytes = Array.prototype.slice.call(new Uint8Array(value.buffer, value.byteOffset, value.byteLength)).map(function (byte) {
return ' ' + ('00' + byte.toString(16)).slice(-2);
}).join('').match(/.{1,24}/g);
if (!bytes) {
return prefix + '<>';
}
if (bytes.length === 1) {
return prefix + '<' + bytes.join('').slice(1) + '>';
}
return prefix + '<\n' + bytes.map(function (line) {
return indent + ' ' + line;
}).join('\n') + '\n' + indent + ' >';
} // stringify generic objects
return prefix + JSON.stringify(value, null, 2).split('\n').map(function (line, index) {
if (index === 0) {
return line;
}
return indent + ' ' + line;
}).join('\n');
}).join('\n') + ( // recursively textify the child boxes
box.boxes ? '\n' + _textifyMp(box.boxes, depth + 1) : '');
}).join('\n');
};
module.exports = {
inspect: inspectMp4,
textify: _textifyMp,
parseType: parseType,
findBox: findBox,
parseTraf: parse.traf,
parseTfdt: parse.tfdt,
parseHdlr: parse.hdlr,
parseTfhd: parse.tfhd,
parseTrun: parse.trun,
parseSidx: parse.sidx
};

15
node_modules/mux.js/cjs/tools/parse-sample-flags.js generated vendored Normal file
View file

@ -0,0 +1,15 @@
"use strict";
var parseSampleFlags = function parseSampleFlags(flags) {
return {
isLeading: (flags[0] & 0x0c) >>> 2,
dependsOn: flags[0] & 0x03,
isDependedOn: (flags[1] & 0xc0) >>> 6,
hasRedundancy: (flags[1] & 0x30) >>> 4,
paddingValue: (flags[1] & 0x0e) >>> 1,
isNonSyncSample: flags[1] & 0x01,
degradationPriority: flags[2] << 8 | flags[3]
};
};
module.exports = parseSampleFlags;

46
node_modules/mux.js/cjs/tools/parse-sidx.js generated vendored Normal file
View file

@ -0,0 +1,46 @@
"use strict";
var MAX_UINT32 = Math.pow(2, 32);
var parseSidx = function parseSidx(data) {
var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
result = {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
references: [],
referenceId: view.getUint32(4),
timescale: view.getUint32(8)
},
i = 12;
if (result.version === 0) {
result.earliestPresentationTime = view.getUint32(i);
result.firstOffset = view.getUint32(i + 4);
i += 8;
} else {
// read 64 bits
result.earliestPresentationTime = view.getUint32(i) * MAX_UINT32 + view.getUint32(i + 4);
result.firstOffset = view.getUint32(i + 8) * MAX_UINT32 + view.getUint32(i + 12);
i += 16;
}
i += 2; // reserved
var referenceCount = view.getUint16(i);
i += 2; // start of references
for (; referenceCount > 0; i += 12, referenceCount--) {
result.references.push({
referenceType: (data[i] & 0x80) >>> 7,
referencedSize: view.getUint32(i) & 0x7FFFFFFF,
subsegmentDuration: view.getUint32(i + 4),
startsWithSap: !!(data[i + 8] & 0x80),
sapType: (data[i + 8] & 0x70) >>> 4,
sapDeltaTime: view.getUint32(i + 8) & 0x0FFFFFFF
});
}
return result;
};
module.exports = parseSidx;

20
node_modules/mux.js/cjs/tools/parse-tfdt.js generated vendored Normal file
View file

@ -0,0 +1,20 @@
"use strict";
var toUnsigned = require('../utils/bin').toUnsigned;
var tfdt = function tfdt(data) {
var result = {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
baseMediaDecodeTime: toUnsigned(data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7])
};
if (result.version === 1) {
result.baseMediaDecodeTime *= Math.pow(2, 32);
result.baseMediaDecodeTime += toUnsigned(data[8] << 24 | data[9] << 16 | data[10] << 8 | data[11]);
}
return result;
};
module.exports = tfdt;

58
node_modules/mux.js/cjs/tools/parse-tfhd.js generated vendored Normal file
View file

@ -0,0 +1,58 @@
"use strict";
var tfhd = function tfhd(data) {
var view = new DataView(data.buffer, data.byteOffset, data.byteLength),
result = {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
trackId: view.getUint32(4)
},
baseDataOffsetPresent = result.flags[2] & 0x01,
sampleDescriptionIndexPresent = result.flags[2] & 0x02,
defaultSampleDurationPresent = result.flags[2] & 0x08,
defaultSampleSizePresent = result.flags[2] & 0x10,
defaultSampleFlagsPresent = result.flags[2] & 0x20,
durationIsEmpty = result.flags[0] & 0x010000,
defaultBaseIsMoof = result.flags[0] & 0x020000,
i;
i = 8;
if (baseDataOffsetPresent) {
i += 4; // truncate top 4 bytes
// FIXME: should we read the full 64 bits?
result.baseDataOffset = view.getUint32(12);
i += 4;
}
if (sampleDescriptionIndexPresent) {
result.sampleDescriptionIndex = view.getUint32(i);
i += 4;
}
if (defaultSampleDurationPresent) {
result.defaultSampleDuration = view.getUint32(i);
i += 4;
}
if (defaultSampleSizePresent) {
result.defaultSampleSize = view.getUint32(i);
i += 4;
}
if (defaultSampleFlagsPresent) {
result.defaultSampleFlags = view.getUint32(i);
}
if (durationIsEmpty) {
result.durationIsEmpty = true;
}
if (!baseDataOffsetPresent && defaultBaseIsMoof) {
result.baseDataOffsetIsMoof = true;
}
return result;
};
module.exports = tfhd;

101
node_modules/mux.js/cjs/tools/parse-trun.js generated vendored Normal file
View file

@ -0,0 +1,101 @@
"use strict";
var parseSampleFlags = require('./parse-sample-flags.js');
var trun = function trun(data) {
var result = {
version: data[0],
flags: new Uint8Array(data.subarray(1, 4)),
samples: []
},
view = new DataView(data.buffer, data.byteOffset, data.byteLength),
// Flag interpretation
dataOffsetPresent = result.flags[2] & 0x01,
// compare with 2nd byte of 0x1
firstSampleFlagsPresent = result.flags[2] & 0x04,
// compare with 2nd byte of 0x4
sampleDurationPresent = result.flags[1] & 0x01,
// compare with 2nd byte of 0x100
sampleSizePresent = result.flags[1] & 0x02,
// compare with 2nd byte of 0x200
sampleFlagsPresent = result.flags[1] & 0x04,
// compare with 2nd byte of 0x400
sampleCompositionTimeOffsetPresent = result.flags[1] & 0x08,
// compare with 2nd byte of 0x800
sampleCount = view.getUint32(4),
offset = 8,
sample;
if (dataOffsetPresent) {
// 32 bit signed integer
result.dataOffset = view.getInt32(offset);
offset += 4;
} // Overrides the flags for the first sample only. The order of
// optional values will be: duration, size, compositionTimeOffset
if (firstSampleFlagsPresent && sampleCount) {
sample = {
flags: parseSampleFlags(data.subarray(offset, offset + 4))
};
offset += 4;
if (sampleDurationPresent) {
sample.duration = view.getUint32(offset);
offset += 4;
}
if (sampleSizePresent) {
sample.size = view.getUint32(offset);
offset += 4;
}
if (sampleCompositionTimeOffsetPresent) {
if (result.version === 1) {
sample.compositionTimeOffset = view.getInt32(offset);
} else {
sample.compositionTimeOffset = view.getUint32(offset);
}
offset += 4;
}
result.samples.push(sample);
sampleCount--;
}
while (sampleCount--) {
sample = {};
if (sampleDurationPresent) {
sample.duration = view.getUint32(offset);
offset += 4;
}
if (sampleSizePresent) {
sample.size = view.getUint32(offset);
offset += 4;
}
if (sampleFlagsPresent) {
sample.flags = parseSampleFlags(data.subarray(offset, offset + 4));
offset += 4;
}
if (sampleCompositionTimeOffsetPresent) {
if (result.version === 1) {
sample.compositionTimeOffset = view.getInt32(offset);
} else {
sample.compositionTimeOffset = view.getUint32(offset);
}
offset += 4;
}
result.samples.push(sample);
}
return result;
};
module.exports = trun;

555
node_modules/mux.js/cjs/tools/ts-inspector.js generated vendored Normal file
View file

@ -0,0 +1,555 @@
/**
* mux.js
*
* Copyright (c) Brightcove
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
*
* Parse mpeg2 transport stream packets to extract basic timing information
*/
'use strict';
var StreamTypes = require('../m2ts/stream-types.js');
var handleRollover = require('../m2ts/timestamp-rollover-stream.js').handleRollover;
var probe = {};
probe.ts = require('../m2ts/probe.js');
probe.aac = require('../aac/utils.js');
var ONE_SECOND_IN_TS = require('../utils/clock').ONE_SECOND_IN_TS;
var MP2T_PACKET_LENGTH = 188,
// bytes
SYNC_BYTE = 0x47;
/**
* walks through segment data looking for pat and pmt packets to parse out
* program map table information
*/
var parsePsi_ = function parsePsi_(bytes, pmt) {
var startIndex = 0,
endIndex = MP2T_PACKET_LENGTH,
packet,
type;
while (endIndex < bytes.byteLength) {
// Look for a pair of start and end sync bytes in the data..
if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
// We found a packet
packet = bytes.subarray(startIndex, endIndex);
type = probe.ts.parseType(packet, pmt.pid);
switch (type) {
case 'pat':
pmt.pid = probe.ts.parsePat(packet);
break;
case 'pmt':
var table = probe.ts.parsePmt(packet);
pmt.table = pmt.table || {};
Object.keys(table).forEach(function (key) {
pmt.table[key] = table[key];
});
break;
default:
break;
}
startIndex += MP2T_PACKET_LENGTH;
endIndex += MP2T_PACKET_LENGTH;
continue;
} // If we get here, we have somehow become de-synchronized and we need to step
// forward one byte at a time until we find a pair of sync bytes that denote
// a packet
startIndex++;
endIndex++;
}
};
/**
* walks through the segment data from the start and end to get timing information
* for the first and last audio pes packets
*/
var parseAudioPes_ = function parseAudioPes_(bytes, pmt, result) {
var startIndex = 0,
endIndex = MP2T_PACKET_LENGTH,
packet,
type,
pesType,
pusi,
parsed;
var endLoop = false; // Start walking from start of segment to get first audio packet
while (endIndex <= bytes.byteLength) {
// Look for a pair of start and end sync bytes in the data..
if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
// We found a packet
packet = bytes.subarray(startIndex, endIndex);
type = probe.ts.parseType(packet, pmt.pid);
switch (type) {
case 'pes':
pesType = probe.ts.parsePesType(packet, pmt.table);
pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
if (pesType === 'audio' && pusi) {
parsed = probe.ts.parsePesTime(packet);
if (parsed) {
parsed.type = 'audio';
result.audio.push(parsed);
endLoop = true;
}
}
break;
default:
break;
}
if (endLoop) {
break;
}
startIndex += MP2T_PACKET_LENGTH;
endIndex += MP2T_PACKET_LENGTH;
continue;
} // If we get here, we have somehow become de-synchronized and we need to step
// forward one byte at a time until we find a pair of sync bytes that denote
// a packet
startIndex++;
endIndex++;
} // Start walking from end of segment to get last audio packet
endIndex = bytes.byteLength;
startIndex = endIndex - MP2T_PACKET_LENGTH;
endLoop = false;
while (startIndex >= 0) {
// Look for a pair of start and end sync bytes in the data..
if (bytes[startIndex] === SYNC_BYTE && (bytes[endIndex] === SYNC_BYTE || endIndex === bytes.byteLength)) {
// We found a packet
packet = bytes.subarray(startIndex, endIndex);
type = probe.ts.parseType(packet, pmt.pid);
switch (type) {
case 'pes':
pesType = probe.ts.parsePesType(packet, pmt.table);
pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
if (pesType === 'audio' && pusi) {
parsed = probe.ts.parsePesTime(packet);
if (parsed) {
parsed.type = 'audio';
result.audio.push(parsed);
endLoop = true;
}
}
break;
default:
break;
}
if (endLoop) {
break;
}
startIndex -= MP2T_PACKET_LENGTH;
endIndex -= MP2T_PACKET_LENGTH;
continue;
} // If we get here, we have somehow become de-synchronized and we need to step
// forward one byte at a time until we find a pair of sync bytes that denote
// a packet
startIndex--;
endIndex--;
}
};
/**
* walks through the segment data from the start and end to get timing information
* for the first and last video pes packets as well as timing information for the first
* key frame.
*/
var parseVideoPes_ = function parseVideoPes_(bytes, pmt, result) {
var startIndex = 0,
endIndex = MP2T_PACKET_LENGTH,
packet,
type,
pesType,
pusi,
parsed,
frame,
i,
pes;
var endLoop = false;
var currentFrame = {
data: [],
size: 0
}; // Start walking from start of segment to get first video packet
while (endIndex < bytes.byteLength) {
// Look for a pair of start and end sync bytes in the data..
if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
// We found a packet
packet = bytes.subarray(startIndex, endIndex);
type = probe.ts.parseType(packet, pmt.pid);
switch (type) {
case 'pes':
pesType = probe.ts.parsePesType(packet, pmt.table);
pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
if (pesType === 'video') {
if (pusi && !endLoop) {
parsed = probe.ts.parsePesTime(packet);
if (parsed) {
parsed.type = 'video';
result.video.push(parsed);
endLoop = true;
}
}
if (!result.firstKeyFrame) {
if (pusi) {
if (currentFrame.size !== 0) {
frame = new Uint8Array(currentFrame.size);
i = 0;
while (currentFrame.data.length) {
pes = currentFrame.data.shift();
frame.set(pes, i);
i += pes.byteLength;
}
if (probe.ts.videoPacketContainsKeyFrame(frame)) {
var firstKeyFrame = probe.ts.parsePesTime(frame); // PTS/DTS may not be available. Simply *not* setting
// the keyframe seems to work fine with HLS playback
// and definitely preferable to a crash with TypeError...
if (firstKeyFrame) {
result.firstKeyFrame = firstKeyFrame;
result.firstKeyFrame.type = 'video';
} else {
// eslint-disable-next-line
console.warn('Failed to extract PTS/DTS from PES at first keyframe. ' + 'This could be an unusual TS segment, or else mux.js did not ' + 'parse your TS segment correctly. If you know your TS ' + 'segments do contain PTS/DTS on keyframes please file a bug ' + 'report! You can try ffprobe to double check for yourself.');
}
}
currentFrame.size = 0;
}
}
currentFrame.data.push(packet);
currentFrame.size += packet.byteLength;
}
}
break;
default:
break;
}
if (endLoop && result.firstKeyFrame) {
break;
}
startIndex += MP2T_PACKET_LENGTH;
endIndex += MP2T_PACKET_LENGTH;
continue;
} // If we get here, we have somehow become de-synchronized and we need to step
// forward one byte at a time until we find a pair of sync bytes that denote
// a packet
startIndex++;
endIndex++;
} // Start walking from end of segment to get last video packet
endIndex = bytes.byteLength;
startIndex = endIndex - MP2T_PACKET_LENGTH;
endLoop = false;
while (startIndex >= 0) {
// Look for a pair of start and end sync bytes in the data..
if (bytes[startIndex] === SYNC_BYTE && bytes[endIndex] === SYNC_BYTE) {
// We found a packet
packet = bytes.subarray(startIndex, endIndex);
type = probe.ts.parseType(packet, pmt.pid);
switch (type) {
case 'pes':
pesType = probe.ts.parsePesType(packet, pmt.table);
pusi = probe.ts.parsePayloadUnitStartIndicator(packet);
if (pesType === 'video' && pusi) {
parsed = probe.ts.parsePesTime(packet);
if (parsed) {
parsed.type = 'video';
result.video.push(parsed);
endLoop = true;
}
}
break;
default:
break;
}
if (endLoop) {
break;
}
startIndex -= MP2T_PACKET_LENGTH;
endIndex -= MP2T_PACKET_LENGTH;
continue;
} // If we get here, we have somehow become de-synchronized and we need to step
// forward one byte at a time until we find a pair of sync bytes that denote
// a packet
startIndex--;
endIndex--;
}
};
/**
* Adjusts the timestamp information for the segment to account for
* rollover and convert to seconds based on pes packet timescale (90khz clock)
*/
var adjustTimestamp_ = function adjustTimestamp_(segmentInfo, baseTimestamp) {
if (segmentInfo.audio && segmentInfo.audio.length) {
var audioBaseTimestamp = baseTimestamp;
if (typeof audioBaseTimestamp === 'undefined' || isNaN(audioBaseTimestamp)) {
audioBaseTimestamp = segmentInfo.audio[0].dts;
}
segmentInfo.audio.forEach(function (info) {
info.dts = handleRollover(info.dts, audioBaseTimestamp);
info.pts = handleRollover(info.pts, audioBaseTimestamp); // time in seconds
info.dtsTime = info.dts / ONE_SECOND_IN_TS;
info.ptsTime = info.pts / ONE_SECOND_IN_TS;
});
}
if (segmentInfo.video && segmentInfo.video.length) {
var videoBaseTimestamp = baseTimestamp;
if (typeof videoBaseTimestamp === 'undefined' || isNaN(videoBaseTimestamp)) {
videoBaseTimestamp = segmentInfo.video[0].dts;
}
segmentInfo.video.forEach(function (info) {
info.dts = handleRollover(info.dts, videoBaseTimestamp);
info.pts = handleRollover(info.pts, videoBaseTimestamp); // time in seconds
info.dtsTime = info.dts / ONE_SECOND_IN_TS;
info.ptsTime = info.pts / ONE_SECOND_IN_TS;
});
if (segmentInfo.firstKeyFrame) {
var frame = segmentInfo.firstKeyFrame;
frame.dts = handleRollover(frame.dts, videoBaseTimestamp);
frame.pts = handleRollover(frame.pts, videoBaseTimestamp); // time in seconds
frame.dtsTime = frame.dts / ONE_SECOND_IN_TS;
frame.ptsTime = frame.pts / ONE_SECOND_IN_TS;
}
}
};
/**
* inspects the aac data stream for start and end time information
*/
var inspectAac_ = function inspectAac_(bytes) {
var endLoop = false,
audioCount = 0,
sampleRate = null,
timestamp = null,
frameSize = 0,
byteIndex = 0,
packet;
while (bytes.length - byteIndex >= 3) {
var type = probe.aac.parseType(bytes, byteIndex);
switch (type) {
case 'timed-metadata':
// Exit early because we don't have enough to parse
// the ID3 tag header
if (bytes.length - byteIndex < 10) {
endLoop = true;
break;
}
frameSize = probe.aac.parseId3TagSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
// to emit a full packet
if (frameSize > bytes.length) {
endLoop = true;
break;
}
if (timestamp === null) {
packet = bytes.subarray(byteIndex, byteIndex + frameSize);
timestamp = probe.aac.parseAacTimestamp(packet);
}
byteIndex += frameSize;
break;
case 'audio':
// Exit early because we don't have enough to parse
// the ADTS frame header
if (bytes.length - byteIndex < 7) {
endLoop = true;
break;
}
frameSize = probe.aac.parseAdtsSize(bytes, byteIndex); // Exit early if we don't have enough in the buffer
// to emit a full packet
if (frameSize > bytes.length) {
endLoop = true;
break;
}
if (sampleRate === null) {
packet = bytes.subarray(byteIndex, byteIndex + frameSize);
sampleRate = probe.aac.parseSampleRate(packet);
}
audioCount++;
byteIndex += frameSize;
break;
default:
byteIndex++;
break;
}
if (endLoop) {
return null;
}
}
if (sampleRate === null || timestamp === null) {
return null;
}
var audioTimescale = ONE_SECOND_IN_TS / sampleRate;
var result = {
audio: [{
type: 'audio',
dts: timestamp,
pts: timestamp
}, {
type: 'audio',
dts: timestamp + audioCount * 1024 * audioTimescale,
pts: timestamp + audioCount * 1024 * audioTimescale
}]
};
return result;
};
/**
* inspects the transport stream segment data for start and end time information
* of the audio and video tracks (when present) as well as the first key frame's
* start time.
*/
var inspectTs_ = function inspectTs_(bytes) {
var pmt = {
pid: null,
table: null
};
var result = {};
parsePsi_(bytes, pmt);
for (var pid in pmt.table) {
if (pmt.table.hasOwnProperty(pid)) {
var type = pmt.table[pid];
switch (type) {
case StreamTypes.H264_STREAM_TYPE:
result.video = [];
parseVideoPes_(bytes, pmt, result);
if (result.video.length === 0) {
delete result.video;
}
break;
case StreamTypes.ADTS_STREAM_TYPE:
result.audio = [];
parseAudioPes_(bytes, pmt, result);
if (result.audio.length === 0) {
delete result.audio;
}
break;
default:
break;
}
}
}
return result;
};
/**
* Inspects segment byte data and returns an object with start and end timing information
*
* @param {Uint8Array} bytes The segment byte data
* @param {Number} baseTimestamp Relative reference timestamp used when adjusting frame
* timestamps for rollover. This value must be in 90khz clock.
* @return {Object} Object containing start and end frame timing info of segment.
*/
var inspect = function inspect(bytes, baseTimestamp) {
var isAacData = probe.aac.isLikelyAacData(bytes);
var result;
if (isAacData) {
result = inspectAac_(bytes);
} else {
result = inspectTs_(bytes);
}
if (!result || !result.audio && !result.video) {
return null;
}
adjustTimestamp_(result, baseTimestamp);
return result;
};
module.exports = {
inspect: inspect,
parseAudioPes_: parseAudioPes_
};