First release
This commit is contained in:
commit
fa6c85266e
2339 changed files with 761050 additions and 0 deletions
1676
node_modules/mux.js/lib/m2ts/caption-stream.js
generated
vendored
Normal file
1676
node_modules/mux.js/lib/m2ts/caption-stream.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
7
node_modules/mux.js/lib/m2ts/index.js
generated
vendored
Normal file
7
node_modules/mux.js/lib/m2ts/index.js
generated
vendored
Normal file
|
@ -0,0 +1,7 @@
|
|||
/**
|
||||
* mux.js
|
||||
*
|
||||
* Copyright (c) Brightcove
|
||||
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
|
||||
*/
|
||||
module.exports = require('./m2ts');
|
547
node_modules/mux.js/lib/m2ts/m2ts.js
generated
vendored
Normal file
547
node_modules/mux.js/lib/m2ts/m2ts.js
generated
vendored
Normal file
|
@ -0,0 +1,547 @@
|
|||
/**
|
||||
* mux.js
|
||||
*
|
||||
* Copyright (c) Brightcove
|
||||
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
|
||||
*
|
||||
* A stream-based mp2t to mp4 converter. This utility can be used to
|
||||
* deliver mp4s to a SourceBuffer on platforms that support native
|
||||
* Media Source Extensions.
|
||||
*/
|
||||
'use strict';
|
||||
var Stream = require('../utils/stream.js'),
|
||||
CaptionStream = require('./caption-stream'),
|
||||
StreamTypes = require('./stream-types'),
|
||||
TimestampRolloverStream = require('./timestamp-rollover-stream').TimestampRolloverStream;
|
||||
|
||||
// object types
|
||||
var TransportPacketStream, TransportParseStream, ElementaryStream;
|
||||
|
||||
// constants
|
||||
var
|
||||
MP2T_PACKET_LENGTH = 188, // bytes
|
||||
SYNC_BYTE = 0x47;
|
||||
|
||||
/**
|
||||
* Splits an incoming stream of binary data into MPEG-2 Transport
|
||||
* Stream packets.
|
||||
*/
|
||||
TransportPacketStream = function() {
|
||||
var
|
||||
buffer = new Uint8Array(MP2T_PACKET_LENGTH),
|
||||
bytesInBuffer = 0;
|
||||
|
||||
TransportPacketStream.prototype.init.call(this);
|
||||
|
||||
// Deliver new bytes to the stream.
|
||||
|
||||
/**
|
||||
* Split a stream of data into M2TS packets
|
||||
**/
|
||||
this.push = function(bytes) {
|
||||
var
|
||||
startIndex = 0,
|
||||
endIndex = MP2T_PACKET_LENGTH,
|
||||
everything;
|
||||
|
||||
// If there are bytes remaining from the last segment, prepend them to the
|
||||
// bytes that were pushed in
|
||||
if (bytesInBuffer) {
|
||||
everything = new Uint8Array(bytes.byteLength + bytesInBuffer);
|
||||
everything.set(buffer.subarray(0, bytesInBuffer));
|
||||
everything.set(bytes, bytesInBuffer);
|
||||
bytesInBuffer = 0;
|
||||
} else {
|
||||
everything = bytes;
|
||||
}
|
||||
|
||||
// While we have enough data for a packet
|
||||
while (endIndex < everything.byteLength) {
|
||||
// Look for a pair of start and end sync bytes in the data..
|
||||
if (everything[startIndex] === SYNC_BYTE && everything[endIndex] === SYNC_BYTE) {
|
||||
// We found a packet so emit it and jump one whole packet forward in
|
||||
// the stream
|
||||
this.trigger('data', everything.subarray(startIndex, endIndex));
|
||||
startIndex += MP2T_PACKET_LENGTH;
|
||||
endIndex += MP2T_PACKET_LENGTH;
|
||||
continue;
|
||||
}
|
||||
// If we get here, we have somehow become de-synchronized and we need to step
|
||||
// forward one byte at a time until we find a pair of sync bytes that denote
|
||||
// a packet
|
||||
startIndex++;
|
||||
endIndex++;
|
||||
}
|
||||
|
||||
// If there was some data left over at the end of the segment that couldn't
|
||||
// possibly be a whole packet, keep it because it might be the start of a packet
|
||||
// that continues in the next segment
|
||||
if (startIndex < everything.byteLength) {
|
||||
buffer.set(everything.subarray(startIndex), 0);
|
||||
bytesInBuffer = everything.byteLength - startIndex;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Passes identified M2TS packets to the TransportParseStream to be parsed
|
||||
**/
|
||||
this.flush = function() {
|
||||
// If the buffer contains a whole packet when we are being flushed, emit it
|
||||
// and empty the buffer. Otherwise hold onto the data because it may be
|
||||
// important for decoding the next segment
|
||||
if (bytesInBuffer === MP2T_PACKET_LENGTH && buffer[0] === SYNC_BYTE) {
|
||||
this.trigger('data', buffer);
|
||||
bytesInBuffer = 0;
|
||||
}
|
||||
this.trigger('done');
|
||||
};
|
||||
|
||||
this.endTimeline = function() {
|
||||
this.flush();
|
||||
this.trigger('endedtimeline');
|
||||
};
|
||||
|
||||
this.reset = function() {
|
||||
bytesInBuffer = 0;
|
||||
this.trigger('reset');
|
||||
};
|
||||
};
|
||||
TransportPacketStream.prototype = new Stream();
|
||||
|
||||
/**
|
||||
* Accepts an MP2T TransportPacketStream and emits data events with parsed
|
||||
* forms of the individual transport stream packets.
|
||||
*/
|
||||
TransportParseStream = function() {
|
||||
var parsePsi, parsePat, parsePmt, self;
|
||||
TransportParseStream.prototype.init.call(this);
|
||||
self = this;
|
||||
|
||||
this.packetsWaitingForPmt = [];
|
||||
this.programMapTable = undefined;
|
||||
|
||||
parsePsi = function(payload, psi) {
|
||||
var offset = 0;
|
||||
|
||||
// PSI packets may be split into multiple sections and those
|
||||
// sections may be split into multiple packets. If a PSI
|
||||
// section starts in this packet, the payload_unit_start_indicator
|
||||
// will be true and the first byte of the payload will indicate
|
||||
// the offset from the current position to the start of the
|
||||
// section.
|
||||
if (psi.payloadUnitStartIndicator) {
|
||||
offset += payload[offset] + 1;
|
||||
}
|
||||
|
||||
if (psi.type === 'pat') {
|
||||
parsePat(payload.subarray(offset), psi);
|
||||
} else {
|
||||
parsePmt(payload.subarray(offset), psi);
|
||||
}
|
||||
};
|
||||
|
||||
parsePat = function(payload, pat) {
|
||||
pat.section_number = payload[7]; // eslint-disable-line camelcase
|
||||
pat.last_section_number = payload[8]; // eslint-disable-line camelcase
|
||||
|
||||
// skip the PSI header and parse the first PMT entry
|
||||
self.pmtPid = (payload[10] & 0x1F) << 8 | payload[11];
|
||||
pat.pmtPid = self.pmtPid;
|
||||
};
|
||||
|
||||
/**
|
||||
* Parse out the relevant fields of a Program Map Table (PMT).
|
||||
* @param payload {Uint8Array} the PMT-specific portion of an MP2T
|
||||
* packet. The first byte in this array should be the table_id
|
||||
* field.
|
||||
* @param pmt {object} the object that should be decorated with
|
||||
* fields parsed from the PMT.
|
||||
*/
|
||||
parsePmt = function(payload, pmt) {
|
||||
var sectionLength, tableEnd, programInfoLength, offset;
|
||||
|
||||
// PMTs can be sent ahead of the time when they should actually
|
||||
// take effect. We don't believe this should ever be the case
|
||||
// for HLS but we'll ignore "forward" PMT declarations if we see
|
||||
// them. Future PMT declarations have the current_next_indicator
|
||||
// set to zero.
|
||||
if (!(payload[5] & 0x01)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// overwrite any existing program map table
|
||||
self.programMapTable = {
|
||||
video: null,
|
||||
audio: null,
|
||||
'timed-metadata': {}
|
||||
};
|
||||
|
||||
// the mapping table ends at the end of the current section
|
||||
sectionLength = (payload[1] & 0x0f) << 8 | payload[2];
|
||||
tableEnd = 3 + sectionLength - 4;
|
||||
|
||||
// to determine where the table is, we have to figure out how
|
||||
// long the program info descriptors are
|
||||
programInfoLength = (payload[10] & 0x0f) << 8 | payload[11];
|
||||
|
||||
// advance the offset to the first entry in the mapping table
|
||||
offset = 12 + programInfoLength;
|
||||
while (offset < tableEnd) {
|
||||
var streamType = payload[offset];
|
||||
var pid = (payload[offset + 1] & 0x1F) << 8 | payload[offset + 2];
|
||||
|
||||
// only map a single elementary_pid for audio and video stream types
|
||||
// TODO: should this be done for metadata too? for now maintain behavior of
|
||||
// multiple metadata streams
|
||||
if (streamType === StreamTypes.H264_STREAM_TYPE &&
|
||||
self.programMapTable.video === null) {
|
||||
self.programMapTable.video = pid;
|
||||
} else if (streamType === StreamTypes.ADTS_STREAM_TYPE &&
|
||||
self.programMapTable.audio === null) {
|
||||
self.programMapTable.audio = pid;
|
||||
} else if (streamType === StreamTypes.METADATA_STREAM_TYPE) {
|
||||
// map pid to stream type for metadata streams
|
||||
self.programMapTable['timed-metadata'][pid] = streamType;
|
||||
}
|
||||
|
||||
// move to the next table entry
|
||||
// skip past the elementary stream descriptors, if present
|
||||
offset += ((payload[offset + 3] & 0x0F) << 8 | payload[offset + 4]) + 5;
|
||||
}
|
||||
|
||||
// record the map on the packet as well
|
||||
pmt.programMapTable = self.programMapTable;
|
||||
};
|
||||
|
||||
/**
|
||||
* Deliver a new MP2T packet to the next stream in the pipeline.
|
||||
*/
|
||||
this.push = function(packet) {
|
||||
var
|
||||
result = {},
|
||||
offset = 4;
|
||||
|
||||
result.payloadUnitStartIndicator = !!(packet[1] & 0x40);
|
||||
|
||||
// pid is a 13-bit field starting at the last bit of packet[1]
|
||||
result.pid = packet[1] & 0x1f;
|
||||
result.pid <<= 8;
|
||||
result.pid |= packet[2];
|
||||
|
||||
// if an adaption field is present, its length is specified by the
|
||||
// fifth byte of the TS packet header. The adaptation field is
|
||||
// used to add stuffing to PES packets that don't fill a complete
|
||||
// TS packet, and to specify some forms of timing and control data
|
||||
// that we do not currently use.
|
||||
if (((packet[3] & 0x30) >>> 4) > 0x01) {
|
||||
offset += packet[offset] + 1;
|
||||
}
|
||||
|
||||
// parse the rest of the packet based on the type
|
||||
if (result.pid === 0) {
|
||||
result.type = 'pat';
|
||||
parsePsi(packet.subarray(offset), result);
|
||||
this.trigger('data', result);
|
||||
} else if (result.pid === this.pmtPid) {
|
||||
result.type = 'pmt';
|
||||
parsePsi(packet.subarray(offset), result);
|
||||
this.trigger('data', result);
|
||||
|
||||
// if there are any packets waiting for a PMT to be found, process them now
|
||||
while (this.packetsWaitingForPmt.length) {
|
||||
this.processPes_.apply(this, this.packetsWaitingForPmt.shift());
|
||||
}
|
||||
} else if (this.programMapTable === undefined) {
|
||||
// When we have not seen a PMT yet, defer further processing of
|
||||
// PES packets until one has been parsed
|
||||
this.packetsWaitingForPmt.push([packet, offset, result]);
|
||||
} else {
|
||||
this.processPes_(packet, offset, result);
|
||||
}
|
||||
};
|
||||
|
||||
this.processPes_ = function(packet, offset, result) {
|
||||
// set the appropriate stream type
|
||||
if (result.pid === this.programMapTable.video) {
|
||||
result.streamType = StreamTypes.H264_STREAM_TYPE;
|
||||
} else if (result.pid === this.programMapTable.audio) {
|
||||
result.streamType = StreamTypes.ADTS_STREAM_TYPE;
|
||||
} else {
|
||||
// if not video or audio, it is timed-metadata or unknown
|
||||
// if unknown, streamType will be undefined
|
||||
result.streamType = this.programMapTable['timed-metadata'][result.pid];
|
||||
}
|
||||
|
||||
result.type = 'pes';
|
||||
result.data = packet.subarray(offset);
|
||||
this.trigger('data', result);
|
||||
};
|
||||
};
|
||||
TransportParseStream.prototype = new Stream();
|
||||
TransportParseStream.STREAM_TYPES = {
|
||||
h264: 0x1b,
|
||||
adts: 0x0f
|
||||
};
|
||||
|
||||
/**
|
||||
* Reconsistutes program elementary stream (PES) packets from parsed
|
||||
* transport stream packets. That is, if you pipe an
|
||||
* mp2t.TransportParseStream into a mp2t.ElementaryStream, the output
|
||||
* events will be events which capture the bytes for individual PES
|
||||
* packets plus relevant metadata that has been extracted from the
|
||||
* container.
|
||||
*/
|
||||
ElementaryStream = function() {
|
||||
var
|
||||
self = this,
|
||||
// PES packet fragments
|
||||
video = {
|
||||
data: [],
|
||||
size: 0
|
||||
},
|
||||
audio = {
|
||||
data: [],
|
||||
size: 0
|
||||
},
|
||||
timedMetadata = {
|
||||
data: [],
|
||||
size: 0
|
||||
},
|
||||
programMapTable,
|
||||
parsePes = function(payload, pes) {
|
||||
var ptsDtsFlags;
|
||||
const startPrefix = payload[0] << 16 | payload[1] << 8 | payload[2]
|
||||
// default to an empty array
|
||||
pes.data = new Uint8Array()
|
||||
// In certain live streams, the start of a TS fragment has ts packets
|
||||
// that are frame data that is continuing from the previous fragment. This
|
||||
// is to check that the pes data is the start of a new pes payload
|
||||
if (startPrefix !== 1) {
|
||||
return;
|
||||
}
|
||||
// get the packet length, this will be 0 for video
|
||||
pes.packetLength = 6 + ((payload[4] << 8) | payload[5]);
|
||||
|
||||
// find out if this packets starts a new keyframe
|
||||
pes.dataAlignmentIndicator = (payload[6] & 0x04) !== 0;
|
||||
// PES packets may be annotated with a PTS value, or a PTS value
|
||||
// and a DTS value. Determine what combination of values is
|
||||
// available to work with.
|
||||
ptsDtsFlags = payload[7];
|
||||
|
||||
// PTS and DTS are normally stored as a 33-bit number. Javascript
|
||||
// performs all bitwise operations on 32-bit integers but javascript
|
||||
// supports a much greater range (52-bits) of integer using standard
|
||||
// mathematical operations.
|
||||
// We construct a 31-bit value using bitwise operators over the 31
|
||||
// most significant bits and then multiply by 4 (equal to a left-shift
|
||||
// of 2) before we add the final 2 least significant bits of the
|
||||
// timestamp (equal to an OR.)
|
||||
if (ptsDtsFlags & 0xC0) {
|
||||
// the PTS and DTS are not written out directly. For information
|
||||
// on how they are encoded, see
|
||||
// http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
|
||||
pes.pts = (payload[9] & 0x0E) << 27 |
|
||||
(payload[10] & 0xFF) << 20 |
|
||||
(payload[11] & 0xFE) << 12 |
|
||||
(payload[12] & 0xFF) << 5 |
|
||||
(payload[13] & 0xFE) >>> 3;
|
||||
pes.pts *= 4; // Left shift by 2
|
||||
pes.pts += (payload[13] & 0x06) >>> 1; // OR by the two LSBs
|
||||
pes.dts = pes.pts;
|
||||
if (ptsDtsFlags & 0x40) {
|
||||
pes.dts = (payload[14] & 0x0E) << 27 |
|
||||
(payload[15] & 0xFF) << 20 |
|
||||
(payload[16] & 0xFE) << 12 |
|
||||
(payload[17] & 0xFF) << 5 |
|
||||
(payload[18] & 0xFE) >>> 3;
|
||||
pes.dts *= 4; // Left shift by 2
|
||||
pes.dts += (payload[18] & 0x06) >>> 1; // OR by the two LSBs
|
||||
}
|
||||
}
|
||||
// the data section starts immediately after the PES header.
|
||||
// pes_header_data_length specifies the number of header bytes
|
||||
// that follow the last byte of the field.
|
||||
pes.data = payload.subarray(9 + payload[8]);
|
||||
},
|
||||
/**
|
||||
* Pass completely parsed PES packets to the next stream in the pipeline
|
||||
**/
|
||||
flushStream = function(stream, type, forceFlush) {
|
||||
var
|
||||
packetData = new Uint8Array(stream.size),
|
||||
event = {
|
||||
type: type
|
||||
},
|
||||
i = 0,
|
||||
offset = 0,
|
||||
packetFlushable = false,
|
||||
fragment;
|
||||
|
||||
// do nothing if there is not enough buffered data for a complete
|
||||
// PES header
|
||||
if (!stream.data.length || stream.size < 9) {
|
||||
return;
|
||||
}
|
||||
event.trackId = stream.data[0].pid;
|
||||
|
||||
// reassemble the packet
|
||||
for (i = 0; i < stream.data.length; i++) {
|
||||
fragment = stream.data[i];
|
||||
|
||||
packetData.set(fragment.data, offset);
|
||||
offset += fragment.data.byteLength;
|
||||
}
|
||||
|
||||
// parse assembled packet's PES header
|
||||
parsePes(packetData, event);
|
||||
|
||||
// non-video PES packets MUST have a non-zero PES_packet_length
|
||||
// check that there is enough stream data to fill the packet
|
||||
packetFlushable = type === 'video' || event.packetLength <= stream.size;
|
||||
|
||||
// flush pending packets if the conditions are right
|
||||
if (forceFlush || packetFlushable) {
|
||||
stream.size = 0;
|
||||
stream.data.length = 0;
|
||||
}
|
||||
|
||||
// only emit packets that are complete. this is to avoid assembling
|
||||
// incomplete PES packets due to poor segmentation
|
||||
if (packetFlushable) {
|
||||
self.trigger('data', event);
|
||||
}
|
||||
};
|
||||
|
||||
ElementaryStream.prototype.init.call(this);
|
||||
|
||||
/**
|
||||
* Identifies M2TS packet types and parses PES packets using metadata
|
||||
* parsed from the PMT
|
||||
**/
|
||||
this.push = function(data) {
|
||||
({
|
||||
pat: function() {
|
||||
// we have to wait for the PMT to arrive as well before we
|
||||
// have any meaningful metadata
|
||||
},
|
||||
pes: function() {
|
||||
var stream, streamType;
|
||||
|
||||
switch (data.streamType) {
|
||||
case StreamTypes.H264_STREAM_TYPE:
|
||||
stream = video;
|
||||
streamType = 'video';
|
||||
break;
|
||||
case StreamTypes.ADTS_STREAM_TYPE:
|
||||
stream = audio;
|
||||
streamType = 'audio';
|
||||
break;
|
||||
case StreamTypes.METADATA_STREAM_TYPE:
|
||||
stream = timedMetadata;
|
||||
streamType = 'timed-metadata';
|
||||
break;
|
||||
default:
|
||||
// ignore unknown stream types
|
||||
return;
|
||||
}
|
||||
|
||||
// if a new packet is starting, we can flush the completed
|
||||
// packet
|
||||
if (data.payloadUnitStartIndicator) {
|
||||
flushStream(stream, streamType, true);
|
||||
}
|
||||
|
||||
// buffer this fragment until we are sure we've received the
|
||||
// complete payload
|
||||
stream.data.push(data);
|
||||
stream.size += data.data.byteLength;
|
||||
},
|
||||
pmt: function() {
|
||||
var
|
||||
event = {
|
||||
type: 'metadata',
|
||||
tracks: []
|
||||
};
|
||||
|
||||
programMapTable = data.programMapTable;
|
||||
|
||||
// translate audio and video streams to tracks
|
||||
if (programMapTable.video !== null) {
|
||||
event.tracks.push({
|
||||
timelineStartInfo: {
|
||||
baseMediaDecodeTime: 0
|
||||
},
|
||||
id: +programMapTable.video,
|
||||
codec: 'avc',
|
||||
type: 'video'
|
||||
});
|
||||
}
|
||||
if (programMapTable.audio !== null) {
|
||||
event.tracks.push({
|
||||
timelineStartInfo: {
|
||||
baseMediaDecodeTime: 0
|
||||
},
|
||||
id: +programMapTable.audio,
|
||||
codec: 'adts',
|
||||
type: 'audio'
|
||||
});
|
||||
}
|
||||
|
||||
self.trigger('data', event);
|
||||
}
|
||||
})[data.type]();
|
||||
};
|
||||
|
||||
this.reset = function() {
|
||||
video.size = 0;
|
||||
video.data.length = 0;
|
||||
audio.size = 0;
|
||||
audio.data.length = 0;
|
||||
this.trigger('reset');
|
||||
};
|
||||
|
||||
/**
|
||||
* Flush any remaining input. Video PES packets may be of variable
|
||||
* length. Normally, the start of a new video packet can trigger the
|
||||
* finalization of the previous packet. That is not possible if no
|
||||
* more video is forthcoming, however. In that case, some other
|
||||
* mechanism (like the end of the file) has to be employed. When it is
|
||||
* clear that no additional data is forthcoming, calling this method
|
||||
* will flush the buffered packets.
|
||||
*/
|
||||
this.flushStreams_ = function() {
|
||||
// !!THIS ORDER IS IMPORTANT!!
|
||||
// video first then audio
|
||||
flushStream(video, 'video');
|
||||
flushStream(audio, 'audio');
|
||||
flushStream(timedMetadata, 'timed-metadata');
|
||||
};
|
||||
|
||||
this.flush = function() {
|
||||
this.flushStreams_();
|
||||
this.trigger('done');
|
||||
};
|
||||
};
|
||||
ElementaryStream.prototype = new Stream();
|
||||
|
||||
var m2ts = {
|
||||
PAT_PID: 0x0000,
|
||||
MP2T_PACKET_LENGTH: MP2T_PACKET_LENGTH,
|
||||
TransportPacketStream: TransportPacketStream,
|
||||
TransportParseStream: TransportParseStream,
|
||||
ElementaryStream: ElementaryStream,
|
||||
TimestampRolloverStream: TimestampRolloverStream,
|
||||
CaptionStream: CaptionStream.CaptionStream,
|
||||
Cea608Stream: CaptionStream.Cea608Stream,
|
||||
Cea708Stream: CaptionStream.Cea708Stream,
|
||||
MetadataStream: require('./metadata-stream')
|
||||
};
|
||||
|
||||
for (var type in StreamTypes) {
|
||||
if (StreamTypes.hasOwnProperty(type)) {
|
||||
m2ts[type] = StreamTypes[type];
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = m2ts;
|
253
node_modules/mux.js/lib/m2ts/metadata-stream.js
generated
vendored
Normal file
253
node_modules/mux.js/lib/m2ts/metadata-stream.js
generated
vendored
Normal file
|
@ -0,0 +1,253 @@
|
|||
/**
|
||||
* mux.js
|
||||
*
|
||||
* Copyright (c) Brightcove
|
||||
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
|
||||
*
|
||||
* Accepts program elementary stream (PES) data events and parses out
|
||||
* ID3 metadata from them, if present.
|
||||
* @see http://id3.org/id3v2.3.0
|
||||
*/
|
||||
'use strict';
|
||||
var
|
||||
Stream = require('../utils/stream'),
|
||||
StreamTypes = require('./stream-types'),
|
||||
// return a percent-encoded representation of the specified byte range
|
||||
// @see http://en.wikipedia.org/wiki/Percent-encoding
|
||||
percentEncode = function(bytes, start, end) {
|
||||
var i, result = '';
|
||||
for (i = start; i < end; i++) {
|
||||
result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
|
||||
}
|
||||
return result;
|
||||
},
|
||||
// return the string representation of the specified byte range,
|
||||
// interpreted as UTf-8.
|
||||
parseUtf8 = function(bytes, start, end) {
|
||||
return decodeURIComponent(percentEncode(bytes, start, end));
|
||||
},
|
||||
// return the string representation of the specified byte range,
|
||||
// interpreted as ISO-8859-1.
|
||||
parseIso88591 = function(bytes, start, end) {
|
||||
return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
|
||||
},
|
||||
parseSyncSafeInteger = function(data) {
|
||||
return (data[0] << 21) |
|
||||
(data[1] << 14) |
|
||||
(data[2] << 7) |
|
||||
(data[3]);
|
||||
},
|
||||
tagParsers = {
|
||||
TXXX: function(tag) {
|
||||
var i;
|
||||
if (tag.data[0] !== 3) {
|
||||
// ignore frames with unrecognized character encodings
|
||||
return;
|
||||
}
|
||||
|
||||
for (i = 1; i < tag.data.length; i++) {
|
||||
if (tag.data[i] === 0) {
|
||||
// parse the text fields
|
||||
tag.description = parseUtf8(tag.data, 1, i);
|
||||
// do not include the null terminator in the tag value
|
||||
tag.value = parseUtf8(tag.data, i + 1, tag.data.length).replace(/\0*$/, '');
|
||||
break;
|
||||
}
|
||||
}
|
||||
tag.data = tag.value;
|
||||
},
|
||||
WXXX: function(tag) {
|
||||
var i;
|
||||
if (tag.data[0] !== 3) {
|
||||
// ignore frames with unrecognized character encodings
|
||||
return;
|
||||
}
|
||||
|
||||
for (i = 1; i < tag.data.length; i++) {
|
||||
if (tag.data[i] === 0) {
|
||||
// parse the description and URL fields
|
||||
tag.description = parseUtf8(tag.data, 1, i);
|
||||
tag.url = parseUtf8(tag.data, i + 1, tag.data.length);
|
||||
break;
|
||||
}
|
||||
}
|
||||
},
|
||||
PRIV: function(tag) {
|
||||
var i;
|
||||
|
||||
for (i = 0; i < tag.data.length; i++) {
|
||||
if (tag.data[i] === 0) {
|
||||
// parse the description and URL fields
|
||||
tag.owner = parseIso88591(tag.data, 0, i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
tag.privateData = tag.data.subarray(i + 1);
|
||||
tag.data = tag.privateData;
|
||||
}
|
||||
},
|
||||
MetadataStream;
|
||||
|
||||
MetadataStream = function(options) {
|
||||
var
|
||||
settings = {
|
||||
debug: !!(options && options.debug),
|
||||
|
||||
// the bytes of the program-level descriptor field in MP2T
|
||||
// see ISO/IEC 13818-1:2013 (E), section 2.6 "Program and
|
||||
// program element descriptors"
|
||||
descriptor: options && options.descriptor
|
||||
},
|
||||
// the total size in bytes of the ID3 tag being parsed
|
||||
tagSize = 0,
|
||||
// tag data that is not complete enough to be parsed
|
||||
buffer = [],
|
||||
// the total number of bytes currently in the buffer
|
||||
bufferSize = 0,
|
||||
i;
|
||||
|
||||
MetadataStream.prototype.init.call(this);
|
||||
|
||||
// calculate the text track in-band metadata track dispatch type
|
||||
// https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-track
|
||||
this.dispatchType = StreamTypes.METADATA_STREAM_TYPE.toString(16);
|
||||
if (settings.descriptor) {
|
||||
for (i = 0; i < settings.descriptor.length; i++) {
|
||||
this.dispatchType += ('00' + settings.descriptor[i].toString(16)).slice(-2);
|
||||
}
|
||||
}
|
||||
|
||||
this.push = function(chunk) {
|
||||
var tag, frameStart, frameSize, frame, i, frameHeader;
|
||||
if (chunk.type !== 'timed-metadata') {
|
||||
return;
|
||||
}
|
||||
|
||||
// if data_alignment_indicator is set in the PES header,
|
||||
// we must have the start of a new ID3 tag. Assume anything
|
||||
// remaining in the buffer was malformed and throw it out
|
||||
if (chunk.dataAlignmentIndicator) {
|
||||
bufferSize = 0;
|
||||
buffer.length = 0;
|
||||
}
|
||||
|
||||
// ignore events that don't look like ID3 data
|
||||
if (buffer.length === 0 &&
|
||||
(chunk.data.length < 10 ||
|
||||
chunk.data[0] !== 'I'.charCodeAt(0) ||
|
||||
chunk.data[1] !== 'D'.charCodeAt(0) ||
|
||||
chunk.data[2] !== '3'.charCodeAt(0))) {
|
||||
if (settings.debug) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log('Skipping unrecognized metadata packet');
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// add this chunk to the data we've collected so far
|
||||
|
||||
buffer.push(chunk);
|
||||
bufferSize += chunk.data.byteLength;
|
||||
|
||||
// grab the size of the entire frame from the ID3 header
|
||||
if (buffer.length === 1) {
|
||||
// the frame size is transmitted as a 28-bit integer in the
|
||||
// last four bytes of the ID3 header.
|
||||
// The most significant bit of each byte is dropped and the
|
||||
// results concatenated to recover the actual value.
|
||||
tagSize = parseSyncSafeInteger(chunk.data.subarray(6, 10));
|
||||
|
||||
// ID3 reports the tag size excluding the header but it's more
|
||||
// convenient for our comparisons to include it
|
||||
tagSize += 10;
|
||||
}
|
||||
|
||||
// if the entire frame has not arrived, wait for more data
|
||||
if (bufferSize < tagSize) {
|
||||
return;
|
||||
}
|
||||
|
||||
// collect the entire frame so it can be parsed
|
||||
tag = {
|
||||
data: new Uint8Array(tagSize),
|
||||
frames: [],
|
||||
pts: buffer[0].pts,
|
||||
dts: buffer[0].dts
|
||||
};
|
||||
for (i = 0; i < tagSize;) {
|
||||
tag.data.set(buffer[0].data.subarray(0, tagSize - i), i);
|
||||
i += buffer[0].data.byteLength;
|
||||
bufferSize -= buffer[0].data.byteLength;
|
||||
buffer.shift();
|
||||
}
|
||||
|
||||
// find the start of the first frame and the end of the tag
|
||||
frameStart = 10;
|
||||
if (tag.data[5] & 0x40) {
|
||||
// advance the frame start past the extended header
|
||||
frameStart += 4; // header size field
|
||||
frameStart += parseSyncSafeInteger(tag.data.subarray(10, 14));
|
||||
|
||||
// clip any padding off the end
|
||||
tagSize -= parseSyncSafeInteger(tag.data.subarray(16, 20));
|
||||
}
|
||||
|
||||
// parse one or more ID3 frames
|
||||
// http://id3.org/id3v2.3.0#ID3v2_frame_overview
|
||||
do {
|
||||
// determine the number of bytes in this frame
|
||||
frameSize = parseSyncSafeInteger(tag.data.subarray(frameStart + 4, frameStart + 8));
|
||||
if (frameSize < 1) {
|
||||
// eslint-disable-next-line no-console
|
||||
return console.log('Malformed ID3 frame encountered. Skipping metadata parsing.');
|
||||
}
|
||||
frameHeader = String.fromCharCode(tag.data[frameStart],
|
||||
tag.data[frameStart + 1],
|
||||
tag.data[frameStart + 2],
|
||||
tag.data[frameStart + 3]);
|
||||
|
||||
|
||||
frame = {
|
||||
id: frameHeader,
|
||||
data: tag.data.subarray(frameStart + 10, frameStart + frameSize + 10)
|
||||
};
|
||||
frame.key = frame.id;
|
||||
if (tagParsers[frame.id]) {
|
||||
tagParsers[frame.id](frame);
|
||||
|
||||
// handle the special PRIV frame used to indicate the start
|
||||
// time for raw AAC data
|
||||
if (frame.owner === 'com.apple.streaming.transportStreamTimestamp') {
|
||||
var
|
||||
d = frame.data,
|
||||
size = ((d[3] & 0x01) << 30) |
|
||||
(d[4] << 22) |
|
||||
(d[5] << 14) |
|
||||
(d[6] << 6) |
|
||||
(d[7] >>> 2);
|
||||
|
||||
size *= 4;
|
||||
size += d[7] & 0x03;
|
||||
frame.timeStamp = size;
|
||||
// in raw AAC, all subsequent data will be timestamped based
|
||||
// on the value of this frame
|
||||
// we couldn't have known the appropriate pts and dts before
|
||||
// parsing this ID3 tag so set those values now
|
||||
if (tag.pts === undefined && tag.dts === undefined) {
|
||||
tag.pts = frame.timeStamp;
|
||||
tag.dts = frame.timeStamp;
|
||||
}
|
||||
this.trigger('timestamp', frame);
|
||||
}
|
||||
}
|
||||
tag.frames.push(frame);
|
||||
|
||||
frameStart += 10; // advance past the frame header
|
||||
frameStart += frameSize; // advance past the frame body
|
||||
} while (frameStart < tagSize);
|
||||
this.trigger('data', tag);
|
||||
};
|
||||
};
|
||||
MetadataStream.prototype = new Stream();
|
||||
|
||||
module.exports = MetadataStream;
|
287
node_modules/mux.js/lib/m2ts/probe.js
generated
vendored
Normal file
287
node_modules/mux.js/lib/m2ts/probe.js
generated
vendored
Normal file
|
@ -0,0 +1,287 @@
|
|||
/**
|
||||
* mux.js
|
||||
*
|
||||
* Copyright (c) Brightcove
|
||||
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
|
||||
*
|
||||
* Utilities to detect basic properties and metadata about TS Segments.
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
var StreamTypes = require('./stream-types.js');
|
||||
|
||||
var parsePid = function(packet) {
|
||||
var pid = packet[1] & 0x1f;
|
||||
pid <<= 8;
|
||||
pid |= packet[2];
|
||||
return pid;
|
||||
};
|
||||
|
||||
var parsePayloadUnitStartIndicator = function(packet) {
|
||||
return !!(packet[1] & 0x40);
|
||||
};
|
||||
|
||||
var parseAdaptionField = function(packet) {
|
||||
var offset = 0;
|
||||
// if an adaption field is present, its length is specified by the
|
||||
// fifth byte of the TS packet header. The adaptation field is
|
||||
// used to add stuffing to PES packets that don't fill a complete
|
||||
// TS packet, and to specify some forms of timing and control data
|
||||
// that we do not currently use.
|
||||
if (((packet[3] & 0x30) >>> 4) > 0x01) {
|
||||
offset += packet[4] + 1;
|
||||
}
|
||||
return offset;
|
||||
};
|
||||
|
||||
var parseType = function(packet, pmtPid) {
|
||||
var pid = parsePid(packet);
|
||||
if (pid === 0) {
|
||||
return 'pat';
|
||||
} else if (pid === pmtPid) {
|
||||
return 'pmt';
|
||||
} else if (pmtPid) {
|
||||
return 'pes';
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
var parsePat = function(packet) {
|
||||
var pusi = parsePayloadUnitStartIndicator(packet);
|
||||
var offset = 4 + parseAdaptionField(packet);
|
||||
|
||||
if (pusi) {
|
||||
offset += packet[offset] + 1;
|
||||
}
|
||||
|
||||
return (packet[offset + 10] & 0x1f) << 8 | packet[offset + 11];
|
||||
};
|
||||
|
||||
var parsePmt = function(packet) {
|
||||
var programMapTable = {};
|
||||
var pusi = parsePayloadUnitStartIndicator(packet);
|
||||
var payloadOffset = 4 + parseAdaptionField(packet);
|
||||
|
||||
if (pusi) {
|
||||
payloadOffset += packet[payloadOffset] + 1;
|
||||
}
|
||||
|
||||
// PMTs can be sent ahead of the time when they should actually
|
||||
// take effect. We don't believe this should ever be the case
|
||||
// for HLS but we'll ignore "forward" PMT declarations if we see
|
||||
// them. Future PMT declarations have the current_next_indicator
|
||||
// set to zero.
|
||||
if (!(packet[payloadOffset + 5] & 0x01)) {
|
||||
return;
|
||||
}
|
||||
|
||||
var sectionLength, tableEnd, programInfoLength;
|
||||
// the mapping table ends at the end of the current section
|
||||
sectionLength = (packet[payloadOffset + 1] & 0x0f) << 8 | packet[payloadOffset + 2];
|
||||
tableEnd = 3 + sectionLength - 4;
|
||||
|
||||
// to determine where the table is, we have to figure out how
|
||||
// long the program info descriptors are
|
||||
programInfoLength = (packet[payloadOffset + 10] & 0x0f) << 8 | packet[payloadOffset + 11];
|
||||
|
||||
// advance the offset to the first entry in the mapping table
|
||||
var offset = 12 + programInfoLength;
|
||||
while (offset < tableEnd) {
|
||||
var i = payloadOffset + offset;
|
||||
// add an entry that maps the elementary_pid to the stream_type
|
||||
programMapTable[(packet[i + 1] & 0x1F) << 8 | packet[i + 2]] = packet[i];
|
||||
|
||||
// move to the next table entry
|
||||
// skip past the elementary stream descriptors, if present
|
||||
offset += ((packet[i + 3] & 0x0F) << 8 | packet[i + 4]) + 5;
|
||||
}
|
||||
return programMapTable;
|
||||
};
|
||||
|
||||
var parsePesType = function(packet, programMapTable) {
|
||||
var pid = parsePid(packet);
|
||||
var type = programMapTable[pid];
|
||||
switch (type) {
|
||||
case StreamTypes.H264_STREAM_TYPE:
|
||||
return 'video';
|
||||
case StreamTypes.ADTS_STREAM_TYPE:
|
||||
return 'audio';
|
||||
case StreamTypes.METADATA_STREAM_TYPE:
|
||||
return 'timed-metadata';
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
var parsePesTime = function(packet) {
|
||||
var pusi = parsePayloadUnitStartIndicator(packet);
|
||||
if (!pusi) {
|
||||
return null;
|
||||
}
|
||||
|
||||
var offset = 4 + parseAdaptionField(packet);
|
||||
|
||||
if (offset >= packet.byteLength) {
|
||||
// From the H 222.0 MPEG-TS spec
|
||||
// "For transport stream packets carrying PES packets, stuffing is needed when there
|
||||
// is insufficient PES packet data to completely fill the transport stream packet
|
||||
// payload bytes. Stuffing is accomplished by defining an adaptation field longer than
|
||||
// the sum of the lengths of the data elements in it, so that the payload bytes
|
||||
// remaining after the adaptation field exactly accommodates the available PES packet
|
||||
// data."
|
||||
//
|
||||
// If the offset is >= the length of the packet, then the packet contains no data
|
||||
// and instead is just adaption field stuffing bytes
|
||||
return null;
|
||||
}
|
||||
|
||||
var pes = null;
|
||||
var ptsDtsFlags;
|
||||
|
||||
// PES packets may be annotated with a PTS value, or a PTS value
|
||||
// and a DTS value. Determine what combination of values is
|
||||
// available to work with.
|
||||
ptsDtsFlags = packet[offset + 7];
|
||||
|
||||
// PTS and DTS are normally stored as a 33-bit number. Javascript
|
||||
// performs all bitwise operations on 32-bit integers but javascript
|
||||
// supports a much greater range (52-bits) of integer using standard
|
||||
// mathematical operations.
|
||||
// We construct a 31-bit value using bitwise operators over the 31
|
||||
// most significant bits and then multiply by 4 (equal to a left-shift
|
||||
// of 2) before we add the final 2 least significant bits of the
|
||||
// timestamp (equal to an OR.)
|
||||
if (ptsDtsFlags & 0xC0) {
|
||||
pes = {};
|
||||
// the PTS and DTS are not written out directly. For information
|
||||
// on how they are encoded, see
|
||||
// http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
|
||||
pes.pts = (packet[offset + 9] & 0x0E) << 27 |
|
||||
(packet[offset + 10] & 0xFF) << 20 |
|
||||
(packet[offset + 11] & 0xFE) << 12 |
|
||||
(packet[offset + 12] & 0xFF) << 5 |
|
||||
(packet[offset + 13] & 0xFE) >>> 3;
|
||||
pes.pts *= 4; // Left shift by 2
|
||||
pes.pts += (packet[offset + 13] & 0x06) >>> 1; // OR by the two LSBs
|
||||
pes.dts = pes.pts;
|
||||
if (ptsDtsFlags & 0x40) {
|
||||
pes.dts = (packet[offset + 14] & 0x0E) << 27 |
|
||||
(packet[offset + 15] & 0xFF) << 20 |
|
||||
(packet[offset + 16] & 0xFE) << 12 |
|
||||
(packet[offset + 17] & 0xFF) << 5 |
|
||||
(packet[offset + 18] & 0xFE) >>> 3;
|
||||
pes.dts *= 4; // Left shift by 2
|
||||
pes.dts += (packet[offset + 18] & 0x06) >>> 1; // OR by the two LSBs
|
||||
}
|
||||
}
|
||||
return pes;
|
||||
};
|
||||
|
||||
var parseNalUnitType = function(type) {
|
||||
switch (type) {
|
||||
case 0x05:
|
||||
return 'slice_layer_without_partitioning_rbsp_idr';
|
||||
case 0x06:
|
||||
return 'sei_rbsp';
|
||||
case 0x07:
|
||||
return 'seq_parameter_set_rbsp';
|
||||
case 0x08:
|
||||
return 'pic_parameter_set_rbsp';
|
||||
case 0x09:
|
||||
return 'access_unit_delimiter_rbsp';
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
var videoPacketContainsKeyFrame = function(packet) {
|
||||
var offset = 4 + parseAdaptionField(packet);
|
||||
var frameBuffer = packet.subarray(offset);
|
||||
var frameI = 0;
|
||||
var frameSyncPoint = 0;
|
||||
var foundKeyFrame = false;
|
||||
var nalType;
|
||||
|
||||
// advance the sync point to a NAL start, if necessary
|
||||
for (; frameSyncPoint < frameBuffer.byteLength - 3; frameSyncPoint++) {
|
||||
if (frameBuffer[frameSyncPoint + 2] === 1) {
|
||||
// the sync point is properly aligned
|
||||
frameI = frameSyncPoint + 5;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
while (frameI < frameBuffer.byteLength) {
|
||||
// look at the current byte to determine if we've hit the end of
|
||||
// a NAL unit boundary
|
||||
switch (frameBuffer[frameI]) {
|
||||
case 0:
|
||||
// skip past non-sync sequences
|
||||
if (frameBuffer[frameI - 1] !== 0) {
|
||||
frameI += 2;
|
||||
break;
|
||||
} else if (frameBuffer[frameI - 2] !== 0) {
|
||||
frameI++;
|
||||
break;
|
||||
}
|
||||
|
||||
if (frameSyncPoint + 3 !== frameI - 2) {
|
||||
nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
|
||||
if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
|
||||
foundKeyFrame = true;
|
||||
}
|
||||
}
|
||||
|
||||
// drop trailing zeroes
|
||||
do {
|
||||
frameI++;
|
||||
} while (frameBuffer[frameI] !== 1 && frameI < frameBuffer.length);
|
||||
frameSyncPoint = frameI - 2;
|
||||
frameI += 3;
|
||||
break;
|
||||
case 1:
|
||||
// skip past non-sync sequences
|
||||
if (frameBuffer[frameI - 1] !== 0 ||
|
||||
frameBuffer[frameI - 2] !== 0) {
|
||||
frameI += 3;
|
||||
break;
|
||||
}
|
||||
|
||||
nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
|
||||
if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
|
||||
foundKeyFrame = true;
|
||||
}
|
||||
frameSyncPoint = frameI - 2;
|
||||
frameI += 3;
|
||||
break;
|
||||
default:
|
||||
// the current byte isn't a one or zero, so it cannot be part
|
||||
// of a sync sequence
|
||||
frameI += 3;
|
||||
break;
|
||||
}
|
||||
}
|
||||
frameBuffer = frameBuffer.subarray(frameSyncPoint);
|
||||
frameI -= frameSyncPoint;
|
||||
frameSyncPoint = 0;
|
||||
// parse the final nal
|
||||
if (frameBuffer && frameBuffer.byteLength > 3) {
|
||||
nalType = parseNalUnitType(frameBuffer[frameSyncPoint + 3] & 0x1f);
|
||||
if (nalType === 'slice_layer_without_partitioning_rbsp_idr') {
|
||||
foundKeyFrame = true;
|
||||
}
|
||||
}
|
||||
|
||||
return foundKeyFrame;
|
||||
};
|
||||
|
||||
|
||||
module.exports = {
|
||||
parseType: parseType,
|
||||
parsePat: parsePat,
|
||||
parsePmt: parsePmt,
|
||||
parsePayloadUnitStartIndicator: parsePayloadUnitStartIndicator,
|
||||
parsePesType: parsePesType,
|
||||
parsePesTime: parsePesTime,
|
||||
videoPacketContainsKeyFrame: videoPacketContainsKeyFrame
|
||||
};
|
13
node_modules/mux.js/lib/m2ts/stream-types.js
generated
vendored
Normal file
13
node_modules/mux.js/lib/m2ts/stream-types.js
generated
vendored
Normal file
|
@ -0,0 +1,13 @@
|
|||
/**
|
||||
* mux.js
|
||||
*
|
||||
* Copyright (c) Brightcove
|
||||
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
|
||||
*/
|
||||
'use strict';
|
||||
|
||||
module.exports = {
|
||||
H264_STREAM_TYPE: 0x1B,
|
||||
ADTS_STREAM_TYPE: 0x0F,
|
||||
METADATA_STREAM_TYPE: 0x15
|
||||
};
|
101
node_modules/mux.js/lib/m2ts/timestamp-rollover-stream.js
generated
vendored
Normal file
101
node_modules/mux.js/lib/m2ts/timestamp-rollover-stream.js
generated
vendored
Normal file
|
@ -0,0 +1,101 @@
|
|||
/**
|
||||
* mux.js
|
||||
*
|
||||
* Copyright (c) Brightcove
|
||||
* Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
|
||||
*
|
||||
* Accepts program elementary stream (PES) data events and corrects
|
||||
* decode and presentation time stamps to account for a rollover
|
||||
* of the 33 bit value.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
var Stream = require('../utils/stream');
|
||||
|
||||
var MAX_TS = 8589934592;
|
||||
|
||||
var RO_THRESH = 4294967296;
|
||||
|
||||
var TYPE_SHARED = 'shared';
|
||||
|
||||
var handleRollover = function(value, reference) {
|
||||
var direction = 1;
|
||||
|
||||
if (value > reference) {
|
||||
// If the current timestamp value is greater than our reference timestamp and we detect a
|
||||
// timestamp rollover, this means the roll over is happening in the opposite direction.
|
||||
// Example scenario: Enter a long stream/video just after a rollover occurred. The reference
|
||||
// point will be set to a small number, e.g. 1. The user then seeks backwards over the
|
||||
// rollover point. In loading this segment, the timestamp values will be very large,
|
||||
// e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust
|
||||
// the time stamp to be `value - 2^33`.
|
||||
direction = -1;
|
||||
}
|
||||
|
||||
// Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will
|
||||
// cause an incorrect adjustment.
|
||||
while (Math.abs(reference - value) > RO_THRESH) {
|
||||
value += (direction * MAX_TS);
|
||||
}
|
||||
|
||||
return value;
|
||||
};
|
||||
|
||||
var TimestampRolloverStream = function(type) {
|
||||
var lastDTS, referenceDTS;
|
||||
|
||||
TimestampRolloverStream.prototype.init.call(this);
|
||||
|
||||
// The "shared" type is used in cases where a stream will contain muxed
|
||||
// video and audio. We could use `undefined` here, but having a string
|
||||
// makes debugging a little clearer.
|
||||
this.type_ = type || TYPE_SHARED;
|
||||
|
||||
this.push = function(data) {
|
||||
|
||||
// Any "shared" rollover streams will accept _all_ data. Otherwise,
|
||||
// streams will only accept data that matches their type.
|
||||
if (this.type_ !== TYPE_SHARED && data.type !== this.type_) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (referenceDTS === undefined) {
|
||||
referenceDTS = data.dts;
|
||||
}
|
||||
|
||||
data.dts = handleRollover(data.dts, referenceDTS);
|
||||
data.pts = handleRollover(data.pts, referenceDTS);
|
||||
|
||||
lastDTS = data.dts;
|
||||
|
||||
this.trigger('data', data);
|
||||
};
|
||||
|
||||
this.flush = function() {
|
||||
referenceDTS = lastDTS;
|
||||
this.trigger('done');
|
||||
};
|
||||
|
||||
this.endTimeline = function() {
|
||||
this.flush();
|
||||
this.trigger('endedtimeline');
|
||||
};
|
||||
|
||||
this.discontinuity = function() {
|
||||
referenceDTS = void 0;
|
||||
lastDTS = void 0;
|
||||
};
|
||||
|
||||
this.reset = function() {
|
||||
this.discontinuity();
|
||||
this.trigger('reset');
|
||||
};
|
||||
};
|
||||
|
||||
TimestampRolloverStream.prototype = new Stream();
|
||||
|
||||
module.exports = {
|
||||
TimestampRolloverStream: TimestampRolloverStream,
|
||||
handleRollover: handleRollover
|
||||
};
|
Loading…
Add table
Add a link
Reference in a new issue