muxflv

ttt

此脚本不应直接安装。它是供其他脚本使用的外部库,要使用该库请加入元指令 // @require https://update.gf.qytechs.cn/scripts/386218/709245/muxflv.js

  1. (function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.muxjs = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
  2. /**
  3. * mux.js
  4. *
  5. * Copyright (c) Brightcove
  6. * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
  7. */
  8. 'use strict';
  9.  
  10. var Stream = require(17);
  11. var ONE_SECOND_IN_TS = require(15).ONE_SECOND_IN_TS;
  12.  
  13. var AdtsStream;
  14.  
  15. var
  16. ADTS_SAMPLING_FREQUENCIES = [
  17. 96000,
  18. 88200,
  19. 64000,
  20. 48000,
  21. 44100,
  22. 32000,
  23. 24000,
  24. 22050,
  25. 16000,
  26. 12000,
  27. 11025,
  28. 8000,
  29. 7350
  30. ];
  31.  
  32. /*
  33. * Accepts a ElementaryStream and emits data events with parsed
  34. * AAC Audio Frames of the individual packets. Input audio in ADTS
  35. * format is unpacked and re-emitted as AAC frames.
  36. *
  37. * @see http://wiki.multimedia.cx/index.php?title=ADTS
  38. * @see http://wiki.multimedia.cx/?title=Understanding_AAC
  39. */
  40. AdtsStream = function(handlePartialSegments) {
  41. var
  42. buffer,
  43. frameNum = 0;
  44.  
  45. AdtsStream.prototype.init.call(this);
  46.  
  47. this.push = function(packet) {
  48. var
  49. i = 0,
  50. frameLength,
  51. protectionSkipBytes,
  52. frameEnd,
  53. oldBuffer,
  54. sampleCount,
  55. adtsFrameDuration;
  56.  
  57. if (!handlePartialSegments) {
  58. frameNum = 0;
  59. }
  60.  
  61. if (packet.type !== 'audio') {
  62. // ignore non-audio data
  63. return;
  64. }
  65.  
  66. // Prepend any data in the buffer to the input data so that we can parse
  67. // aac frames the cross a PES packet boundary
  68. if (buffer) {
  69. oldBuffer = buffer;
  70. buffer = new Uint8Array(oldBuffer.byteLength + packet.data.byteLength);
  71. buffer.set(oldBuffer);
  72. buffer.set(packet.data, oldBuffer.byteLength);
  73. } else {
  74. buffer = packet.data;
  75. }
  76.  
  77. // unpack any ADTS frames which have been fully received
  78. // for details on the ADTS header, see http://wiki.multimedia.cx/index.php?title=ADTS
  79. while (i + 5 < buffer.length) {
  80.  
  81. // Look for the start of an ADTS header..
  82. if ((buffer[i] !== 0xFF) || (buffer[i + 1] & 0xF6) !== 0xF0) {
  83. // If a valid header was not found, jump one forward and attempt to
  84. // find a valid ADTS header starting at the next byte
  85. i++;
  86. continue;
  87. }
  88.  
  89. // The protection skip bit tells us if we have 2 bytes of CRC data at the
  90. // end of the ADTS header
  91. protectionSkipBytes = (~buffer[i + 1] & 0x01) * 2;
  92.  
  93. // Frame length is a 13 bit integer starting 16 bits from the
  94. // end of the sync sequence
  95. frameLength = ((buffer[i + 3] & 0x03) << 11) |
  96. (buffer[i + 4] << 3) |
  97. ((buffer[i + 5] & 0xe0) >> 5);
  98.  
  99. sampleCount = ((buffer[i + 6] & 0x03) + 1) * 1024;
  100. adtsFrameDuration = (sampleCount * ONE_SECOND_IN_TS) /
  101. ADTS_SAMPLING_FREQUENCIES[(buffer[i + 2] & 0x3c) >>> 2];
  102.  
  103. frameEnd = i + frameLength;
  104.  
  105. // If we don't have enough data to actually finish this ADTS frame, return
  106. // and wait for more data
  107. if (buffer.byteLength < frameEnd) {
  108. return;
  109. }
  110.  
  111. // Otherwise, deliver the complete AAC frame
  112. this.trigger('data', {
  113. pts: packet.pts + (frameNum * adtsFrameDuration),
  114. dts: packet.dts + (frameNum * adtsFrameDuration),
  115. sampleCount: sampleCount,
  116. audioobjecttype: ((buffer[i + 2] >>> 6) & 0x03) + 1,
  117. channelcount: ((buffer[i + 2] & 1) << 2) |
  118. ((buffer[i + 3] & 0xc0) >>> 6),
  119. samplerate: ADTS_SAMPLING_FREQUENCIES[(buffer[i + 2] & 0x3c) >>> 2],
  120. samplingfrequencyindex: (buffer[i + 2] & 0x3c) >>> 2,
  121. // assume ISO/IEC 14496-12 AudioSampleEntry default of 16
  122. samplesize: 16,
  123. data: buffer.subarray(i + 7 + protectionSkipBytes, frameEnd)
  124. });
  125.  
  126. frameNum++;
  127.  
  128. // If the buffer is empty, clear it and return
  129. if (buffer.byteLength === frameEnd) {
  130. buffer = undefined;
  131. return;
  132. }
  133.  
  134. // Remove the finished frame from the buffer and start the process again
  135. buffer = buffer.subarray(frameEnd);
  136. }
  137. };
  138.  
  139. this.flush = function() {
  140. frameNum = 0;
  141. this.trigger('done');
  142. };
  143.  
  144. this.reset = function() {
  145. buffer = void 0;
  146. this.trigger('reset');
  147. };
  148.  
  149. this.endTimeline = function() {
  150. buffer = void 0;
  151. this.trigger('endedtimeline');
  152. };
  153. };
  154.  
  155. AdtsStream.prototype = new Stream();
  156.  
  157. module.exports = AdtsStream;
  158.  
  159. },{"15":15,"17":17}],2:[function(require,module,exports){
  160. /**
  161. * mux.js
  162. *
  163. * Copyright (c) Brightcove
  164. * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
  165. */
  166. 'use strict';
  167.  
  168. var Stream = require(17);
  169. var ExpGolomb = require(16);
  170.  
  171. var H264Stream, NalByteStream;
  172. var PROFILES_WITH_OPTIONAL_SPS_DATA;
  173.  
  174. /**
  175. * Accepts a NAL unit byte stream and unpacks the embedded NAL units.
  176. */
  177. NalByteStream = function() {
  178. var
  179. syncPoint = 0,
  180. i,
  181. buffer;
  182. NalByteStream.prototype.init.call(this);
  183.  
  184. /*
  185. * Scans a byte stream and triggers a data event with the NAL units found.
  186. * @param {Object} data Event received from H264Stream
  187. * @param {Uint8Array} data.data The h264 byte stream to be scanned
  188. *
  189. * @see H264Stream.push
  190. */
  191. this.push = function(data) {
  192. var swapBuffer;
  193.  
  194. if (!buffer) {
  195. buffer = data.data;
  196. } else {
  197. swapBuffer = new Uint8Array(buffer.byteLength + data.data.byteLength);
  198. swapBuffer.set(buffer);
  199. swapBuffer.set(data.data, buffer.byteLength);
  200. buffer = swapBuffer;
  201. }
  202.  
  203. // Rec. ITU-T H.264, Annex B
  204. // scan for NAL unit boundaries
  205.  
  206. // a match looks like this:
  207. // 0 0 1 .. NAL .. 0 0 1
  208. // ^ sync point ^ i
  209. // or this:
  210. // 0 0 1 .. NAL .. 0 0 0
  211. // ^ sync point ^ i
  212.  
  213. // advance the sync point to a NAL start, if necessary
  214. for (; syncPoint < buffer.byteLength - 3; syncPoint++) {
  215. if (buffer[syncPoint + 2] === 1) {
  216. // the sync point is properly aligned
  217. i = syncPoint + 5;
  218. break;
  219. }
  220. }
  221.  
  222. while (i < buffer.byteLength) {
  223. // look at the current byte to determine if we've hit the end of
  224. // a NAL unit boundary
  225. switch (buffer[i]) {
  226. case 0:
  227. // skip past non-sync sequences
  228. if (buffer[i - 1] !== 0) {
  229. i += 2;
  230. break;
  231. } else if (buffer[i - 2] !== 0) {
  232. i++;
  233. break;
  234. }
  235.  
  236. // deliver the NAL unit if it isn't empty
  237. if (syncPoint + 3 !== i - 2) {
  238. this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
  239. }
  240.  
  241. // drop trailing zeroes
  242. do {
  243. i++;
  244. } while (buffer[i] !== 1 && i < buffer.length);
  245. syncPoint = i - 2;
  246. i += 3;
  247. break;
  248. case 1:
  249. // skip past non-sync sequences
  250. if (buffer[i - 1] !== 0 ||
  251. buffer[i - 2] !== 0) {
  252. i += 3;
  253. break;
  254. }
  255.  
  256. // deliver the NAL unit
  257. this.trigger('data', buffer.subarray(syncPoint + 3, i - 2));
  258. syncPoint = i - 2;
  259. i += 3;
  260. break;
  261. default:
  262. // the current byte isn't a one or zero, so it cannot be part
  263. // of a sync sequence
  264. i += 3;
  265. break;
  266. }
  267. }
  268. // filter out the NAL units that were delivered
  269. buffer = buffer.subarray(syncPoint);
  270. i -= syncPoint;
  271. syncPoint = 0;
  272. };
  273.  
  274. this.reset = function() {
  275. buffer = null;
  276. syncPoint = 0;
  277. this.trigger('reset');
  278. };
  279.  
  280. this.flush = function() {
  281. // deliver the last buffered NAL unit
  282. if (buffer && buffer.byteLength > 3) {
  283. this.trigger('data', buffer.subarray(syncPoint + 3));
  284. }
  285. // reset the stream state
  286. buffer = null;
  287. syncPoint = 0;
  288. this.trigger('done');
  289. };
  290.  
  291. this.endTimeline = function() {
  292. this.flush();
  293. this.trigger('endedtimeline');
  294. };
  295. };
  296. NalByteStream.prototype = new Stream();
  297.  
  298. // values of profile_idc that indicate additional fields are included in the SPS
  299. // see Recommendation ITU-T H.264 (4/2013),
  300. // 7.3.2.1.1 Sequence parameter set data syntax
  301. PROFILES_WITH_OPTIONAL_SPS_DATA = {
  302. 100: true,
  303. 110: true,
  304. 122: true,
  305. 244: true,
  306. 44: true,
  307. 83: true,
  308. 86: true,
  309. 118: true,
  310. 128: true,
  311. 138: true,
  312. 139: true,
  313. 134: true
  314. };
  315.  
  316. /**
  317. * Accepts input from a ElementaryStream and produces H.264 NAL unit data
  318. * events.
  319. */
  320. H264Stream = function() {
  321. var
  322. nalByteStream = new NalByteStream(),
  323. self,
  324. trackId,
  325. currentPts,
  326. currentDts,
  327.  
  328. discardEmulationPreventionBytes,
  329. readSequenceParameterSet,
  330. skipScalingList;
  331.  
  332. H264Stream.prototype.init.call(this);
  333. self = this;
  334.  
  335. /*
  336. * Pushes a packet from a stream onto the NalByteStream
  337. *
  338. * @param {Object} packet - A packet received from a stream
  339. * @param {Uint8Array} packet.data - The raw bytes of the packet
  340. * @param {Number} packet.dts - Decode timestamp of the packet
  341. * @param {Number} packet.pts - Presentation timestamp of the packet
  342. * @param {Number} packet.trackId - The id of the h264 track this packet came from
  343. * @param {('video'|'audio')} packet.type - The type of packet
  344. *
  345. */
  346. this.push = function(packet) {
  347. if (packet.type !== 'video') {
  348. return;
  349. }
  350. trackId = packet.trackId;
  351. currentPts = packet.pts;
  352. currentDts = packet.dts;
  353.  
  354. nalByteStream.push(packet);
  355. };
  356.  
  357. /*
  358. * Identify NAL unit types and pass on the NALU, trackId, presentation and decode timestamps
  359. * for the NALUs to the next stream component.
  360. * Also, preprocess caption and sequence parameter NALUs.
  361. *
  362. * @param {Uint8Array} data - A NAL unit identified by `NalByteStream.push`
  363. * @see NalByteStream.push
  364. */
  365. nalByteStream.on('data', function(data) {
  366. var
  367. event = {
  368. trackId: trackId,
  369. pts: currentPts,
  370. dts: currentDts,
  371. data: data
  372. };
  373.  
  374. switch (data[0] & 0x1f) {
  375. case 0x05:
  376. event.nalUnitType = 'slice_layer_without_partitioning_rbsp_idr';
  377. break;
  378. case 0x06:
  379. event.nalUnitType = 'sei_rbsp';
  380. event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
  381. break;
  382. case 0x07:
  383. event.nalUnitType = 'seq_parameter_set_rbsp';
  384. event.escapedRBSP = discardEmulationPreventionBytes(data.subarray(1));
  385. event.config = readSequenceParameterSet(event.escapedRBSP);
  386. break;
  387. case 0x08:
  388. event.nalUnitType = 'pic_parameter_set_rbsp';
  389. break;
  390. case 0x09:
  391. event.nalUnitType = 'access_unit_delimiter_rbsp';
  392. break;
  393.  
  394. default:
  395. break;
  396. }
  397. // This triggers data on the H264Stream
  398. self.trigger('data', event);
  399. });
  400. nalByteStream.on('done', function() {
  401. self.trigger('done');
  402. });
  403. nalByteStream.on('partialdone', function() {
  404. self.trigger('partialdone');
  405. });
  406. nalByteStream.on('reset', function() {
  407. self.trigger('reset');
  408. });
  409. nalByteStream.on('endedtimeline', function() {
  410. self.trigger('endedtimeline');
  411. });
  412.  
  413. this.flush = function() {
  414. nalByteStream.flush();
  415. };
  416.  
  417. this.partialFlush = function() {
  418. nalByteStream.partialFlush();
  419. };
  420.  
  421. this.reset = function() {
  422. nalByteStream.reset();
  423. };
  424.  
  425. this.endTimeline = function() {
  426. nalByteStream.endTimeline();
  427. };
  428.  
  429. /**
  430. * Advance the ExpGolomb decoder past a scaling list. The scaling
  431. * list is optionally transmitted as part of a sequence parameter
  432. * set and is not relevant to transmuxing.
  433. * @param count {number} the number of entries in this scaling list
  434. * @param expGolombDecoder {object} an ExpGolomb pointed to the
  435. * start of a scaling list
  436. * @see Recommendation ITU-T H.264, Section 7.3.2.1.1.1
  437. */
  438. skipScalingList = function(count, expGolombDecoder) {
  439. var
  440. lastScale = 8,
  441. nextScale = 8,
  442. j,
  443. deltaScale;
  444.  
  445. for (j = 0; j < count; j++) {
  446. if (nextScale !== 0) {
  447. deltaScale = expGolombDecoder.readExpGolomb();
  448. nextScale = (lastScale + deltaScale + 256) % 256;
  449. }
  450.  
  451. lastScale = (nextScale === 0) ? lastScale : nextScale;
  452. }
  453. };
  454.  
  455. /**
  456. * Expunge any "Emulation Prevention" bytes from a "Raw Byte
  457. * Sequence Payload"
  458. * @param data {Uint8Array} the bytes of a RBSP from a NAL
  459. * unit
  460. * @return {Uint8Array} the RBSP without any Emulation
  461. * Prevention Bytes
  462. */
  463. discardEmulationPreventionBytes = function(data) {
  464. var
  465. length = data.byteLength,
  466. emulationPreventionBytesPositions = [],
  467. i = 1,
  468. newLength, newData;
  469.  
  470. // Find all `Emulation Prevention Bytes`
  471. while (i < length - 2) {
  472. if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
  473. emulationPreventionBytesPositions.push(i + 2);
  474. i += 2;
  475. } else {
  476. i++;
  477. }
  478. }
  479.  
  480. // If no Emulation Prevention Bytes were found just return the original
  481. // array
  482. if (emulationPreventionBytesPositions.length === 0) {
  483. return data;
  484. }
  485.  
  486. // Create a new array to hold the NAL unit data
  487. newLength = length - emulationPreventionBytesPositions.length;
  488. newData = new Uint8Array(newLength);
  489. var sourceIndex = 0;
  490.  
  491. for (i = 0; i < newLength; sourceIndex++, i++) {
  492. if (sourceIndex === emulationPreventionBytesPositions[0]) {
  493. // Skip this byte
  494. sourceIndex++;
  495. // Remove this position index
  496. emulationPreventionBytesPositions.shift();
  497. }
  498. newData[i] = data[sourceIndex];
  499. }
  500.  
  501. return newData;
  502. };
  503.  
  504. /**
  505. * Read a sequence parameter set and return some interesting video
  506. * properties. A sequence parameter set is the H264 metadata that
  507. * describes the properties of upcoming video frames.
  508. * @param data {Uint8Array} the bytes of a sequence parameter set
  509. * @return {object} an object with configuration parsed from the
  510. * sequence parameter set, including the dimensions of the
  511. * associated video frames.
  512. */
  513. readSequenceParameterSet = function(data) {
  514. var
  515. frameCropLeftOffset = 0,
  516. frameCropRightOffset = 0,
  517. frameCropTopOffset = 0,
  518. frameCropBottomOffset = 0,
  519. sarScale = 1,
  520. expGolombDecoder, profileIdc, levelIdc, profileCompatibility,
  521. chromaFormatIdc, picOrderCntType,
  522. numRefFramesInPicOrderCntCycle, picWidthInMbsMinus1,
  523. picHeightInMapUnitsMinus1,
  524. frameMbsOnlyFlag,
  525. scalingListCount,
  526. sarRatio,
  527. aspectRatioIdc,
  528. i;
  529.  
  530. expGolombDecoder = new ExpGolomb(data);
  531. profileIdc = expGolombDecoder.readUnsignedByte(); // profile_idc
  532. profileCompatibility = expGolombDecoder.readUnsignedByte(); // constraint_set[0-5]_flag
  533. levelIdc = expGolombDecoder.readUnsignedByte(); // level_idc u(8)
  534. expGolombDecoder.skipUnsignedExpGolomb(); // seq_parameter_set_id
  535.  
  536. // some profiles have more optional data we don't need
  537. if (PROFILES_WITH_OPTIONAL_SPS_DATA[profileIdc]) {
  538. chromaFormatIdc = expGolombDecoder.readUnsignedExpGolomb();
  539. if (chromaFormatIdc === 3) {
  540. expGolombDecoder.skipBits(1); // separate_colour_plane_flag
  541. }
  542. expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_luma_minus8
  543. expGolombDecoder.skipUnsignedExpGolomb(); // bit_depth_chroma_minus8
  544. expGolombDecoder.skipBits(1); // qpprime_y_zero_transform_bypass_flag
  545. if (expGolombDecoder.readBoolean()) { // seq_scaling_matrix_present_flag
  546. scalingListCount = (chromaFormatIdc !== 3) ? 8 : 12;
  547. for (i = 0; i < scalingListCount; i++) {
  548. if (expGolombDecoder.readBoolean()) { // seq_scaling_list_present_flag[ i ]
  549. if (i < 6) {
  550. skipScalingList(16, expGolombDecoder);
  551. } else {
  552. skipScalingList(64, expGolombDecoder);
  553. }
  554. }
  555. }
  556. }
  557. }
  558.  
  559. expGolombDecoder.skipUnsignedExpGolomb(); // log2_max_frame_num_minus4
  560. picOrderCntType = expGolombDecoder.readUnsignedExpGolomb();
  561.  
  562. if (picOrderCntType === 0) {
  563. expGolombDecoder.readUnsignedExpGolomb(); // log2_max_pic_order_cnt_lsb_minus4
  564. } else if (picOrderCntType === 1) {
  565. expGolombDecoder.skipBits(1); // delta_pic_order_always_zero_flag
  566. expGolombDecoder.skipExpGolomb(); // offset_for_non_ref_pic
  567. expGolombDecoder.skipExpGolomb(); // offset_for_top_to_bottom_field
  568. numRefFramesInPicOrderCntCycle = expGolombDecoder.readUnsignedExpGolomb();
  569. for (i = 0; i < numRefFramesInPicOrderCntCycle; i++) {
  570. expGolombDecoder.skipExpGolomb(); // offset_for_ref_frame[ i ]
  571. }
  572. }
  573.  
  574. expGolombDecoder.skipUnsignedExpGolomb(); // max_num_ref_frames
  575. expGolombDecoder.skipBits(1); // gaps_in_frame_num_value_allowed_flag
  576.  
  577. picWidthInMbsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
  578. picHeightInMapUnitsMinus1 = expGolombDecoder.readUnsignedExpGolomb();
  579.  
  580. frameMbsOnlyFlag = expGolombDecoder.readBits(1);
  581. if (frameMbsOnlyFlag === 0) {
  582. expGolombDecoder.skipBits(1); // mb_adaptive_frame_field_flag
  583. }
  584.  
  585. expGolombDecoder.skipBits(1); // direct_8x8_inference_flag
  586. if (expGolombDecoder.readBoolean()) { // frame_cropping_flag
  587. frameCropLeftOffset = expGolombDecoder.readUnsignedExpGolomb();
  588. frameCropRightOffset = expGolombDecoder.readUnsignedExpGolomb();
  589. frameCropTopOffset = expGolombDecoder.readUnsignedExpGolomb();
  590. frameCropBottomOffset = expGolombDecoder.readUnsignedExpGolomb();
  591. }
  592. if (expGolombDecoder.readBoolean()) {
  593. // vui_parameters_present_flag
  594. if (expGolombDecoder.readBoolean()) {
  595. // aspect_ratio_info_present_flag
  596. aspectRatioIdc = expGolombDecoder.readUnsignedByte();
  597. switch (aspectRatioIdc) {
  598. case 1: sarRatio = [1, 1]; break;
  599. case 2: sarRatio = [12, 11]; break;
  600. case 3: sarRatio = [10, 11]; break;
  601. case 4: sarRatio = [16, 11]; break;
  602. case 5: sarRatio = [40, 33]; break;
  603. case 6: sarRatio = [24, 11]; break;
  604. case 7: sarRatio = [20, 11]; break;
  605. case 8: sarRatio = [32, 11]; break;
  606. case 9: sarRatio = [80, 33]; break;
  607. case 10: sarRatio = [18, 11]; break;
  608. case 11: sarRatio = [15, 11]; break;
  609. case 12: sarRatio = [64, 33]; break;
  610. case 13: sarRatio = [160, 99]; break;
  611. case 14: sarRatio = [4, 3]; break;
  612. case 15: sarRatio = [3, 2]; break;
  613. case 16: sarRatio = [2, 1]; break;
  614. case 255: {
  615. sarRatio = [expGolombDecoder.readUnsignedByte() << 8 |
  616. expGolombDecoder.readUnsignedByte(),
  617. expGolombDecoder.readUnsignedByte() << 8 |
  618. expGolombDecoder.readUnsignedByte() ];
  619. break;
  620. }
  621. }
  622. if (sarRatio) {
  623. sarScale = sarRatio[0] / sarRatio[1];
  624. }
  625. }
  626. }
  627. return {
  628. profileIdc: profileIdc,
  629. levelIdc: levelIdc,
  630. profileCompatibility: profileCompatibility,
  631. width: Math.ceil((((picWidthInMbsMinus1 + 1) * 16) - frameCropLeftOffset * 2 - frameCropRightOffset * 2) * sarScale),
  632. height: ((2 - frameMbsOnlyFlag) * (picHeightInMapUnitsMinus1 + 1) * 16) - (frameCropTopOffset * 2) - (frameCropBottomOffset * 2)
  633. };
  634. };
  635.  
  636. };
  637. H264Stream.prototype = new Stream();
  638.  
  639. module.exports = {
  640. H264Stream: H264Stream,
  641. NalByteStream: NalByteStream
  642. };
  643.  
  644. },{"16":16,"17":17}],3:[function(require,module,exports){
  645. /**
  646. * mux.js
  647. *
  648. * Copyright (c) Brightcove
  649. * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
  650. */
  651. 'use strict';
  652.  
  653. var Stream = require(17);
  654.  
  655. /**
  656. * The final stage of the transmuxer that emits the flv tags
  657. * for audio, video, and metadata. Also tranlates in time and
  658. * outputs caption data and id3 cues.
  659. */
  660. var CoalesceStream = function(options) {
  661. // Number of Tracks per output segment
  662. // If greater than 1, we combine multiple
  663. // tracks into a single segment
  664. this.numberOfTracks = 0;
  665. this.metadataStream = options.metadataStream;
  666.  
  667. this.videoTags = [];
  668. this.audioTags = [];
  669. this.videoTrack = null;
  670. this.audioTrack = null;
  671. this.pendingCaptions = [];
  672. this.pendingMetadata = [];
  673. this.pendingTracks = 0;
  674. this.processedTracks = 0;
  675.  
  676. CoalesceStream.prototype.init.call(this);
  677.  
  678. // Take output from multiple
  679. this.push = function(output) {
  680. // buffer incoming captions until the associated video segment
  681. // finishes
  682. if (output.text) {
  683. return this.pendingCaptions.push(output);
  684. }
  685. // buffer incoming id3 tags until the final flush
  686. if (output.frames) {
  687. return this.pendingMetadata.push(output);
  688. }
  689.  
  690. if (output.track.type === 'video') {
  691. this.videoTrack = output.track;
  692. this.videoTags = output.tags;
  693. this.pendingTracks++;
  694. }
  695. if (output.track.type === 'audio') {
  696. this.audioTrack = output.track;
  697. this.audioTags = output.tags;
  698. this.pendingTracks++;
  699. }
  700. };
  701. };
  702.  
  703. CoalesceStream.prototype = new Stream();
  704. CoalesceStream.prototype.flush = function(flushSource) {
  705. var
  706. id3,
  707. caption,
  708. i,
  709. timelineStartPts,
  710. event = {
  711. tags: {},
  712. captions: [],
  713. captionStreams: {},
  714. metadata: []
  715. };
  716.  
  717. if (this.pendingTracks < this.numberOfTracks) {
  718. if (flushSource !== 'VideoSegmentStream' &&
  719. flushSource !== 'AudioSegmentStream') {
  720. // Return because we haven't received a flush from a data-generating
  721. // portion of the segment (meaning that we have only recieved meta-data
  722. // or captions.)
  723. return;
  724. } else if (this.pendingTracks === 0) {
  725. // In the case where we receive a flush without any data having been
  726. // received we consider it an emitted track for the purposes of coalescing
  727. // `done` events.
  728. // We do this for the case where there is an audio and video track in the
  729. // segment but no audio data. (seen in several playlists with alternate
  730. // audio tracks and no audio present in the main TS segments.)
  731. this.processedTracks++;
  732.  
  733. if (this.processedTracks < this.numberOfTracks) {
  734. return;
  735. }
  736. }
  737. }
  738.  
  739. this.processedTracks += this.pendingTracks;
  740. this.pendingTracks = 0;
  741.  
  742. if (this.processedTracks < this.numberOfTracks) {
  743. return;
  744. }
  745.  
  746. if (this.videoTrack) {
  747. timelineStartPts = this.videoTrack.timelineStartInfo.pts;
  748. } else if (this.audioTrack) {
  749. timelineStartPts = this.audioTrack.timelineStartInfo.pts;
  750. }
  751.  
  752. event.tags.videoTags = this.videoTags;
  753. event.tags.audioTags = this.audioTags;
  754.  
  755. // Translate caption PTS times into second offsets into the
  756. // video timeline for the segment, and add track info
  757. for (i = 0; i < this.pendingCaptions.length; i++) {
  758. caption = this.pendingCaptions[i];
  759. caption.startTime = caption.startPts - timelineStartPts;
  760. caption.startTime /= 90e3;
  761. caption.endTime = caption.endPts - timelineStartPts;
  762. caption.endTime /= 90e3;
  763. event.captionStreams[caption.stream] = true;
  764. event.captions.push(caption);
  765. }
  766.  
  767. // Translate ID3 frame PTS times into second offsets into the
  768. // video timeline for the segment
  769. for (i = 0; i < this.pendingMetadata.length; i++) {
  770. id3 = this.pendingMetadata[i];
  771. id3.cueTime = id3.pts - timelineStartPts;
  772. id3.cueTime /= 90e3;
  773. event.metadata.push(id3);
  774. }
  775. // We add this to every single emitted segment even though we only need
  776. // it for the first
  777. event.metadata.dispatchType = this.metadataStream.dispatchType;
  778.  
  779. // Reset stream state
  780. this.videoTrack = null;
  781. this.audioTrack = null;
  782. this.videoTags = [];
  783. this.audioTags = [];
  784. this.pendingCaptions.length = 0;
  785. this.pendingMetadata.length = 0;
  786. this.pendingTracks = 0;
  787. this.processedTracks = 0;
  788.  
  789. // Emit the final segment
  790. this.trigger('data', event);
  791.  
  792. this.trigger('done');
  793. };
  794.  
  795. module.exports = CoalesceStream;
  796.  
  797. },{"17":17}],4:[function(require,module,exports){
  798. /**
  799. * mux.js
  800. *
  801. * Copyright (c) Brightcove
  802. * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
  803. */
  804. 'use strict';
  805.  
  806. var FlvTag = require(5);
  807.  
  808. // For information on the FLV format, see
  809. // http://download.macromedia.com/f4v/video_file_format_spec_v10_1.pdf.
  810. // Technically, this function returns the header and a metadata FLV tag
  811. // if duration is greater than zero
  812. // duration in seconds
  813. // @return {object} the bytes of the FLV header as a Uint8Array
  814. var getFlvHeader = function(duration, audio, video) { // :ByteArray {
  815. var
  816. headBytes = new Uint8Array(3 + 1 + 1 + 4),
  817. head = new DataView(headBytes.buffer),
  818. metadata,
  819. result,
  820. metadataLength;
  821.  
  822. // default arguments
  823. duration = duration || 0;
  824. audio = audio === undefined ? true : audio;
  825. video = video === undefined ? true : video;
  826.  
  827. // signature
  828. head.setUint8(0, 0x46); // 'F'
  829. head.setUint8(1, 0x4c); // 'L'
  830. head.setUint8(2, 0x56); // 'V'
  831.  
  832. // version
  833. head.setUint8(3, 0x01);
  834.  
  835. // flags
  836. head.setUint8(4, (audio ? 0x04 : 0x00) | (video ? 0x01 : 0x00));
  837.  
  838. // data offset, should be 9 for FLV v1
  839. head.setUint32(5, headBytes.byteLength);
  840.  
  841. // init the first FLV tag
  842. if (duration <= 0) {
  843. // no duration available so just write the first field of the first
  844. // FLV tag
  845. result = new Uint8Array(headBytes.byteLength + 4);
  846. result.set(headBytes);
  847. result.set([0, 0, 0, 0], headBytes.byteLength);
  848. return result;
  849. }
  850.  
  851. // write out the duration metadata tag
  852. metadata = new FlvTag(FlvTag.METADATA_TAG);
  853. metadata.pts = metadata.dts = 0;
  854. metadata.writeMetaDataDouble('duration', duration);
  855. metadataLength = metadata.finalize().length;
  856. result = new Uint8Array(headBytes.byteLength + metadataLength);
  857. result.set(headBytes);
  858. result.set(head.byteLength, metadataLength);
  859.  
  860. return result;
  861. };
  862.  
  863. module.exports = getFlvHeader;
  864.  
  865. },{"5":5}],5:[function(require,module,exports){
  866. /**
  867. * mux.js
  868. *
  869. * Copyright (c) Brightcove
  870. * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
  871. *
  872. * An object that stores the bytes of an FLV tag and methods for
  873. * querying and manipulating that data.
  874. * @see http://download.macromedia.com/f4v/video_file_format_spec_v10_1.pdf
  875. */
  876. 'use strict';
  877.  
  878. var FlvTag;
  879.  
  880. // (type:uint, extraData:Boolean = false) extends ByteArray
  881. FlvTag = function(type, extraData) {
  882. var
  883. // Counter if this is a metadata tag, nal start marker if this is a video
  884. // tag. unused if this is an audio tag
  885. adHoc = 0, // :uint
  886.  
  887. // The default size is 16kb but this is not enough to hold iframe
  888. // data and the resizing algorithm costs a bit so we create a larger
  889. // starting buffer for video tags
  890. bufferStartSize = 16384,
  891.  
  892. // checks whether the FLV tag has enough capacity to accept the proposed
  893. // write and re-allocates the internal buffers if necessary
  894. prepareWrite = function(flv, count) {
  895. var
  896. bytes,
  897. minLength = flv.position + count;
  898. if (minLength < flv.bytes.byteLength) {
  899. // there's enough capacity so do nothing
  900. return;
  901. }
  902.  
  903. // allocate a new buffer and copy over the data that will not be modified
  904. bytes = new Uint8Array(minLength * 2);
  905. bytes.set(flv.bytes.subarray(0, flv.position), 0);
  906. flv.bytes = bytes;
  907. flv.view = new DataView(flv.bytes.buffer);
  908. },
  909.  
  910. // commonly used metadata properties
  911. widthBytes = FlvTag.widthBytes || new Uint8Array('width'.length),
  912. heightBytes = FlvTag.heightBytes || new Uint8Array('height'.length),
  913. videocodecidBytes = FlvTag.videocodecidBytes || new Uint8Array('videocodecid'.length),
  914. i;
  915.  
  916. if (!FlvTag.widthBytes) {
  917. // calculating the bytes of common metadata names ahead of time makes the
  918. // corresponding writes faster because we don't have to loop over the
  919. // characters
  920. // re-test with test/perf.html if you're planning on changing this
  921. for (i = 0; i < 'width'.length; i++) {
  922. widthBytes[i] = 'width'.charCodeAt(i);
  923. }
  924. for (i = 0; i < 'height'.length; i++) {
  925. heightBytes[i] = 'height'.charCodeAt(i);
  926. }
  927. for (i = 0; i < 'videocodecid'.length; i++) {
  928. videocodecidBytes[i] = 'videocodecid'.charCodeAt(i);
  929. }
  930.  
  931. FlvTag.widthBytes = widthBytes;
  932. FlvTag.heightBytes = heightBytes;
  933. FlvTag.videocodecidBytes = videocodecidBytes;
  934. }
  935.  
  936. this.keyFrame = false; // :Boolean
  937.  
  938. switch (type) {
  939. case FlvTag.VIDEO_TAG:
  940. this.length = 16;
  941. // Start the buffer at 256k
  942. bufferStartSize *= 6;
  943. break;
  944. case FlvTag.AUDIO_TAG:
  945. this.length = 13;
  946. this.keyFrame = true;
  947. break;
  948. case FlvTag.METADATA_TAG:
  949. this.length = 29;
  950. this.keyFrame = true;
  951. break;
  952. default:
  953. throw new Error('Unknown FLV tag type');
  954. }
  955.  
  956. this.bytes = new Uint8Array(bufferStartSize);
  957. this.view = new DataView(this.bytes.buffer);
  958. this.bytes[0] = type;
  959. this.position = this.length;
  960. this.keyFrame = extraData; // Defaults to false
  961.  
  962. // presentation timestamp
  963. this.pts = 0;
  964. // decoder timestamp
  965. this.dts = 0;
  966.  
  967. // ByteArray#writeBytes(bytes:ByteArray, offset:uint = 0, length:uint = 0)
  968. this.writeBytes = function(bytes, offset, length) {
  969. var
  970. start = offset || 0,
  971. end;
  972. length = length || bytes.byteLength;
  973. end = start + length;
  974.  
  975. prepareWrite(this, length);
  976. this.bytes.set(bytes.subarray(start, end), this.position);
  977.  
  978. this.position += length;
  979. this.length = Math.max(this.length, this.position);
  980. };
  981.  
  982. // ByteArray#writeByte(value:int):void
  983. this.writeByte = function(byte) {
  984. prepareWrite(this, 1);
  985. this.bytes[this.position] = byte;
  986. this.position++;
  987. this.length = Math.max(this.length, this.position);
  988. };
  989.  
  990. // ByteArray#writeShort(value:int):void
  991. this.writeShort = function(short) {
  992. prepareWrite(this, 2);
  993. this.view.setUint16(this.position, short);
  994. this.position += 2;
  995. this.length = Math.max(this.length, this.position);
  996. };
  997.  
  998. // Negative index into array
  999. // (pos:uint):int
  1000. this.negIndex = function(pos) {
  1001. return this.bytes[this.length - pos];
  1002. };
  1003.  
  1004. // The functions below ONLY work when this[0] == VIDEO_TAG.
  1005. // We are not going to check for that because we dont want the overhead
  1006. // (nal:ByteArray = null):int
  1007. this.nalUnitSize = function() {
  1008. if (adHoc === 0) {
  1009. return 0;
  1010. }
  1011.  
  1012. return this.length - (adHoc + 4);
  1013. };
  1014.  
  1015. this.startNalUnit = function() {
  1016. // remember position and add 4 bytes
  1017. if (adHoc > 0) {
  1018. throw new Error('Attempted to create new NAL wihout closing the old one');
  1019. }
  1020.  
  1021. // reserve 4 bytes for nal unit size
  1022. adHoc = this.length;
  1023. this.length += 4;
  1024. this.position = this.length;
  1025. };
  1026.  
  1027. // (nal:ByteArray = null):void
  1028. this.endNalUnit = function(nalContainer) {
  1029. var
  1030. nalStart, // :uint
  1031. nalLength; // :uint
  1032.  
  1033. // Rewind to the marker and write the size
  1034. if (this.length === adHoc + 4) {
  1035. // we started a nal unit, but didnt write one, so roll back the 4 byte size value
  1036. this.length -= 4;
  1037. } else if (adHoc > 0) {
  1038. nalStart = adHoc + 4;
  1039. nalLength = this.length - nalStart;
  1040.  
  1041. this.position = adHoc;
  1042. this.view.setUint32(this.position, nalLength);
  1043. this.position = this.length;
  1044.  
  1045. if (nalContainer) {
  1046. // Add the tag to the NAL unit
  1047. nalContainer.push(this.bytes.subarray(nalStart, nalStart + nalLength));
  1048. }
  1049. }
  1050.  
  1051. adHoc = 0;
  1052. };
  1053.  
  1054. /**
  1055. * Write out a 64-bit floating point valued metadata property. This method is
  1056. * called frequently during a typical parse and needs to be fast.
  1057. */
  1058. // (key:String, val:Number):void
  1059. this.writeMetaDataDouble = function(key, val) {
  1060. var i;
  1061. prepareWrite(this, 2 + key.length + 9);
  1062.  
  1063. // write size of property name
  1064. this.view.setUint16(this.position, key.length);
  1065. this.position += 2;
  1066.  
  1067. // this next part looks terrible but it improves parser throughput by
  1068. // 10kB/s in my testing
  1069.  
  1070. // write property name
  1071. if (key === 'width') {
  1072. this.bytes.set(widthBytes, this.position);
  1073. this.position += 5;
  1074. } else if (key === 'height') {
  1075. this.bytes.set(heightBytes, this.position);
  1076. this.position += 6;
  1077. } else if (key === 'videocodecid') {
  1078. this.bytes.set(videocodecidBytes, this.position);
  1079. this.position += 12;
  1080. } else {
  1081. for (i = 0; i < key.length; i++) {
  1082. this.bytes[this.position] = key.charCodeAt(i);
  1083. this.position++;
  1084. }
  1085. }
  1086.  
  1087. // skip null byte
  1088. this.position++;
  1089.  
  1090. // write property value
  1091. this.view.setFloat64(this.position, val);
  1092. this.position += 8;
  1093.  
  1094. // update flv tag length
  1095. this.length = Math.max(this.length, this.position);
  1096. ++adHoc;
  1097. };
  1098.  
  1099. // (key:String, val:Boolean):void
  1100. this.writeMetaDataBoolean = function(key, val) {
  1101. var i;
  1102. prepareWrite(this, 2);
  1103. this.view.setUint16(this.position, key.length);
  1104. this.position += 2;
  1105. for (i = 0; i < key.length; i++) {
  1106. // if key.charCodeAt(i) >= 255, handle error
  1107. prepareWrite(this, 1);
  1108. this.bytes[this.position] = key.charCodeAt(i);
  1109. this.position++;
  1110. }
  1111. prepareWrite(this, 2);
  1112. this.view.setUint8(this.position, 0x01);
  1113. this.position++;
  1114. this.view.setUint8(this.position, val ? 0x01 : 0x00);
  1115. this.position++;
  1116. this.length = Math.max(this.length, this.position);
  1117. ++adHoc;
  1118. };
  1119.  
  1120. // ():ByteArray
  1121. this.finalize = function() {
  1122. var
  1123. dtsDelta, // :int
  1124. len; // :int
  1125.  
  1126. switch (this.bytes[0]) {
  1127. // Video Data
  1128. case FlvTag.VIDEO_TAG:
  1129. // We only support AVC, 1 = key frame (for AVC, a seekable
  1130. // frame), 2 = inter frame (for AVC, a non-seekable frame)
  1131. this.bytes[11] = ((this.keyFrame || extraData) ? 0x10 : 0x20) | 0x07;
  1132. this.bytes[12] = extraData ? 0x00 : 0x01;
  1133.  
  1134. dtsDelta = this.pts - this.dts;
  1135. this.bytes[13] = (dtsDelta & 0x00FF0000) >>> 16;
  1136. this.bytes[14] = (dtsDelta & 0x0000FF00) >>> 8;
  1137. this.bytes[15] = (dtsDelta & 0x000000FF) >>> 0;
  1138. break;
  1139.  
  1140. case FlvTag.AUDIO_TAG:
  1141. this.bytes[11] = 0xAF; // 44 kHz, 16-bit stereo
  1142. this.bytes[12] = extraData ? 0x00 : 0x01;
  1143. break;
  1144.  
  1145. case FlvTag.METADATA_TAG:
  1146. this.position = 11;
  1147. this.view.setUint8(this.position, 0x02); // String type
  1148. this.position++;
  1149. this.view.setUint16(this.position, 0x0A); // 10 Bytes
  1150. this.position += 2;
  1151. // set "onMetaData"
  1152. this.bytes.set([0x6f, 0x6e, 0x4d, 0x65,
  1153. 0x74, 0x61, 0x44, 0x61,
  1154. 0x74, 0x61], this.position);
  1155. this.position += 10;
  1156. this.bytes[this.position] = 0x08; // Array type
  1157. this.position++;
  1158. this.view.setUint32(this.position, adHoc);
  1159. this.position = this.length;
  1160. this.bytes.set([0, 0, 9], this.position);
  1161. this.position += 3; // End Data Tag
  1162. this.length = this.position;
  1163. break;
  1164. }
  1165.  
  1166. len = this.length - 11;
  1167.  
  1168. // write the DataSize field
  1169. this.bytes[ 1] = (len & 0x00FF0000) >>> 16;
  1170. this.bytes[ 2] = (len & 0x0000FF00) >>> 8;
  1171. this.bytes[ 3] = (len & 0x000000FF) >>> 0;
  1172. // write the Timestamp
  1173. this.bytes[ 4] = (this.dts & 0x00FF0000) >>> 16;
  1174. this.bytes[ 5] = (this.dts & 0x0000FF00) >>> 8;
  1175. this.bytes[ 6] = (this.dts & 0x000000FF) >>> 0;
  1176. this.bytes[ 7] = (this.dts & 0xFF000000) >>> 24;
  1177. // write the StreamID
  1178. this.bytes[ 8] = 0;
  1179. this.bytes[ 9] = 0;
  1180. this.bytes[10] = 0;
  1181.  
  1182. // Sometimes we're at the end of the view and have one slot to write a
  1183. // uint32, so, prepareWrite of count 4, since, view is uint8
  1184. prepareWrite(this, 4);
  1185. this.view.setUint32(this.length, this.length);
  1186. this.length += 4;
  1187. this.position += 4;
  1188.  
  1189. // trim down the byte buffer to what is actually being used
  1190. this.bytes = this.bytes.subarray(0, this.length);
  1191. this.frameTime = FlvTag.frameTime(this.bytes);
  1192. // if bytes.bytelength isn't equal to this.length, handle error
  1193. return this;
  1194. };
  1195. };
  1196.  
  1197. FlvTag.AUDIO_TAG = 0x08; // == 8, :uint
  1198. FlvTag.VIDEO_TAG = 0x09; // == 9, :uint
  1199. FlvTag.METADATA_TAG = 0x12; // == 18, :uint
  1200.  
  1201. // (tag:ByteArray):Boolean {
  1202. FlvTag.isAudioFrame = function(tag) {
  1203. return FlvTag.AUDIO_TAG === tag[0];
  1204. };
  1205.  
  1206. // (tag:ByteArray):Boolean {
  1207. FlvTag.isVideoFrame = function(tag) {
  1208. return FlvTag.VIDEO_TAG === tag[0];
  1209. };
  1210.  
  1211. // (tag:ByteArray):Boolean {
  1212. FlvTag.isMetaData = function(tag) {
  1213. return FlvTag.METADATA_TAG === tag[0];
  1214. };
  1215.  
  1216. // (tag:ByteArray):Boolean {
  1217. FlvTag.isKeyFrame = function(tag) {
  1218. if (FlvTag.isVideoFrame(tag)) {
  1219. return tag[11] === 0x17;
  1220. }
  1221.  
  1222. if (FlvTag.isAudioFrame(tag)) {
  1223. return true;
  1224. }
  1225.  
  1226. if (FlvTag.isMetaData(tag)) {
  1227. return true;
  1228. }
  1229.  
  1230. return false;
  1231. };
  1232.  
  1233. // (tag:ByteArray):uint {
  1234. FlvTag.frameTime = function(tag) {
  1235. var pts = tag[ 4] << 16; // :uint
  1236. pts |= tag[ 5] << 8;
  1237. pts |= tag[ 6] << 0;
  1238. pts |= tag[ 7] << 24;
  1239. return pts;
  1240. };
  1241.  
  1242. module.exports = FlvTag;
  1243.  
  1244. },{}],6:[function(require,module,exports){
  1245. /**
  1246. * mux.js
  1247. *
  1248. * Copyright (c) Brightcove
  1249. * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
  1250. */
  1251. module.exports = {
  1252. tag: require(5),
  1253. Transmuxer: require(8),
  1254. getFlvHeader: require(4)
  1255. };
  1256.  
  1257. },{"4":4,"5":5,"8":8}],7:[function(require,module,exports){
  1258. /**
  1259. * mux.js
  1260. *
  1261. * Copyright (c) Brightcove
  1262. * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
  1263. */
  1264. 'use strict';
  1265.  
  1266. var TagList = function() {
  1267. var self = this;
  1268.  
  1269. this.list = [];
  1270.  
  1271. this.push = function(tag) {
  1272. this.list.push({
  1273. bytes: tag.bytes,
  1274. dts: tag.dts,
  1275. pts: tag.pts,
  1276. keyFrame: tag.keyFrame,
  1277. metaDataTag: tag.metaDataTag
  1278. });
  1279. };
  1280.  
  1281. Object.defineProperty(this, 'length', {
  1282. get: function() {
  1283. return self.list.length;
  1284. }
  1285. });
  1286. };
  1287.  
  1288. module.exports = TagList;
  1289.  
  1290. },{}],8:[function(require,module,exports){
  1291. /**
  1292. * mux.js
  1293. *
  1294. * Copyright (c) Brightcove
  1295. * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
  1296. */
  1297. 'use strict';
  1298.  
  1299. var Stream = require(17);
  1300. var FlvTag = require(5);
  1301. var m2ts = require(10);
  1302. var AdtsStream = require(1);
  1303. var H264Stream = require(2).H264Stream;
  1304. var CoalesceStream = require(3);
  1305. var TagList = require(7);
  1306.  
  1307. var
  1308. Transmuxer,
  1309. VideoSegmentStream,
  1310. AudioSegmentStream,
  1311. collectTimelineInfo,
  1312. metaDataTag,
  1313. extraDataTag;
  1314.  
  1315. /**
  1316. * Store information about the start and end of the tracka and the
  1317. * duration for each frame/sample we process in order to calculate
  1318. * the baseMediaDecodeTime
  1319. */
  1320. collectTimelineInfo = function(track, data) {
  1321. if (typeof data.pts === 'number') {
  1322. if (track.timelineStartInfo.pts === undefined) {
  1323. track.timelineStartInfo.pts = data.pts;
  1324. } else {
  1325. track.timelineStartInfo.pts =
  1326. Math.min(track.timelineStartInfo.pts, data.pts);
  1327. }
  1328. }
  1329.  
  1330. if (typeof data.dts === 'number') {
  1331. if (track.timelineStartInfo.dts === undefined) {
  1332. track.timelineStartInfo.dts = data.dts;
  1333. } else {
  1334. track.timelineStartInfo.dts =
  1335. Math.min(track.timelineStartInfo.dts, data.dts);
  1336. }
  1337. }
  1338. };
  1339.  
  1340. metaDataTag = function(track, pts) {
  1341. var
  1342. tag = new FlvTag(FlvTag.METADATA_TAG); // :FlvTag
  1343.  
  1344. tag.dts = pts;
  1345. tag.pts = pts;
  1346.  
  1347. tag.writeMetaDataDouble('videocodecid', 7);
  1348. tag.writeMetaDataDouble('width', track.width);
  1349. tag.writeMetaDataDouble('height', track.height);
  1350.  
  1351. return tag;
  1352. };
  1353.  
  1354. extraDataTag = function(track, pts) {
  1355. var
  1356. i,
  1357. tag = new FlvTag(FlvTag.VIDEO_TAG, true);
  1358.  
  1359. tag.dts = pts;
  1360. tag.pts = pts;
  1361.  
  1362. tag.writeByte(0x01);// version
  1363. tag.writeByte(track.profileIdc);// profile
  1364. tag.writeByte(track.profileCompatibility);// compatibility
  1365. tag.writeByte(track.levelIdc);// level
  1366. tag.writeByte(0xFC | 0x03); // reserved (6 bits), NULA length size - 1 (2 bits)
  1367. tag.writeByte(0xE0 | 0x01); // reserved (3 bits), num of SPS (5 bits)
  1368. tag.writeShort(track.sps[0].length); // data of SPS
  1369. tag.writeBytes(track.sps[0]); // SPS
  1370.  
  1371. tag.writeByte(track.pps.length); // num of PPS (will there ever be more that 1 PPS?)
  1372. for (i = 0; i < track.pps.length; ++i) {
  1373. tag.writeShort(track.pps[i].length); // 2 bytes for length of PPS
  1374. tag.writeBytes(track.pps[i]); // data of PPS
  1375. }
  1376.  
  1377. return tag;
  1378. };
  1379.  
  1380. /**
  1381. * Constructs a single-track, media segment from AAC data
  1382. * events. The output of this stream can be fed to flash.
  1383. */
  1384. AudioSegmentStream = function(track) {
  1385. var
  1386. adtsFrames = [],
  1387. videoKeyFrames = [],
  1388. oldExtraData;
  1389.  
  1390. AudioSegmentStream.prototype.init.call(this);
  1391.  
  1392. this.push = function(data) {
  1393. collectTimelineInfo(track, data);
  1394.  
  1395. if (track) {
  1396. track.audioobjecttype = data.audioobjecttype;
  1397. track.channelcount = data.channelcount;
  1398. track.samplerate = data.samplerate;
  1399. track.samplingfrequencyindex = data.samplingfrequencyindex;
  1400. track.samplesize = data.samplesize;
  1401. track.extraData = (track.audioobjecttype << 11) |
  1402. (track.samplingfrequencyindex << 7) |
  1403. (track.channelcount << 3);
  1404. }
  1405.  
  1406. data.pts = Math.round(data.pts / 90);
  1407. data.dts = Math.round(data.dts / 90);
  1408.  
  1409. // buffer audio data until end() is called
  1410. adtsFrames.push(data);
  1411. };
  1412.  
  1413. this.flush = function() {
  1414. var currentFrame, adtsFrame, lastMetaPts, tags = new TagList();
  1415. // return early if no audio data has been observed
  1416. if (adtsFrames.length === 0) {
  1417. this.trigger('done', 'AudioSegmentStream');
  1418. return;
  1419. }
  1420.  
  1421. lastMetaPts = -Infinity;
  1422.  
  1423. while (adtsFrames.length) {
  1424. currentFrame = adtsFrames.shift();
  1425.  
  1426. // write out a metadata frame at every video key frame
  1427. if (videoKeyFrames.length && currentFrame.pts >= videoKeyFrames[0]) {
  1428. lastMetaPts = videoKeyFrames.shift();
  1429. this.writeMetaDataTags(tags, lastMetaPts);
  1430. }
  1431.  
  1432. // also write out metadata tags every 1 second so that the decoder
  1433. // is re-initialized quickly after seeking into a different
  1434. // audio configuration.
  1435. if (track.extraData !== oldExtraData || currentFrame.pts - lastMetaPts >= 1000) {
  1436. this.writeMetaDataTags(tags, currentFrame.pts);
  1437. oldExtraData = track.extraData;
  1438. lastMetaPts = currentFrame.pts;
  1439. }
  1440.  
  1441. adtsFrame = new FlvTag(FlvTag.AUDIO_TAG);
  1442. adtsFrame.pts = currentFrame.pts;
  1443. adtsFrame.dts = currentFrame.dts;
  1444.  
  1445. adtsFrame.writeBytes(currentFrame.data);
  1446.  
  1447. tags.push(adtsFrame.finalize());
  1448. }
  1449.  
  1450. videoKeyFrames.length = 0;
  1451. oldExtraData = null;
  1452. this.trigger('data', {track: track, tags: tags.list});
  1453.  
  1454. this.trigger('done', 'AudioSegmentStream');
  1455. };
  1456.  
  1457. this.writeMetaDataTags = function(tags, pts) {
  1458. var adtsFrame;
  1459.  
  1460. adtsFrame = new FlvTag(FlvTag.METADATA_TAG);
  1461. // For audio, DTS is always the same as PTS. We want to set the DTS
  1462. // however so we can compare with video DTS to determine approximate
  1463. // packet order
  1464. adtsFrame.pts = pts;
  1465. adtsFrame.dts = pts;
  1466.  
  1467. // AAC is always 10
  1468. adtsFrame.writeMetaDataDouble('audiocodecid', 10);
  1469. adtsFrame.writeMetaDataBoolean('stereo', track.channelcount === 2);
  1470. adtsFrame.writeMetaDataDouble('audiosamplerate', track.samplerate);
  1471. // Is AAC always 16 bit?
  1472. adtsFrame.writeMetaDataDouble('audiosamplesize', 16);
  1473.  
  1474. tags.push(adtsFrame.finalize());
  1475.  
  1476. adtsFrame = new FlvTag(FlvTag.AUDIO_TAG, true);
  1477. // For audio, DTS is always the same as PTS. We want to set the DTS
  1478. // however so we can compare with video DTS to determine approximate
  1479. // packet order
  1480. adtsFrame.pts = pts;
  1481. adtsFrame.dts = pts;
  1482.  
  1483. adtsFrame.view.setUint16(adtsFrame.position, track.extraData);
  1484. adtsFrame.position += 2;
  1485. adtsFrame.length = Math.max(adtsFrame.length, adtsFrame.position);
  1486.  
  1487. tags.push(adtsFrame.finalize());
  1488. };
  1489.  
  1490. this.onVideoKeyFrame = function(pts) {
  1491. videoKeyFrames.push(pts);
  1492. };
  1493. };
  1494. AudioSegmentStream.prototype = new Stream();
  1495.  
  1496. /**
  1497. * Store FlvTags for the h264 stream
  1498. * @param track {object} track metadata configuration
  1499. */
  1500. VideoSegmentStream = function(track) {
  1501. var
  1502. nalUnits = [],
  1503. config,
  1504. h264Frame;
  1505. VideoSegmentStream.prototype.init.call(this);
  1506.  
  1507. this.finishFrame = function(tags, frame) {
  1508. if (!frame) {
  1509. return;
  1510. }
  1511. // Check if keyframe and the length of tags.
  1512. // This makes sure we write metadata on the first frame of a segment.
  1513. if (config && track && track.newMetadata &&
  1514. (frame.keyFrame || tags.length === 0)) {
  1515. // Push extra data on every IDR frame in case we did a stream change + seek
  1516. var metaTag = metaDataTag(config, frame.dts).finalize();
  1517. var extraTag = extraDataTag(track, frame.dts).finalize();
  1518.  
  1519. metaTag.metaDataTag = extraTag.metaDataTag = true;
  1520.  
  1521. tags.push(metaTag);
  1522. tags.push(extraTag);
  1523. track.newMetadata = false;
  1524.  
  1525. this.trigger('keyframe', frame.dts);
  1526. }
  1527.  
  1528. frame.endNalUnit();
  1529. tags.push(frame.finalize());
  1530. h264Frame = null;
  1531. };
  1532.  
  1533. this.push = function(data) {
  1534. collectTimelineInfo(track, data);
  1535.  
  1536. data.pts = Math.round(data.pts / 90);
  1537. data.dts = Math.round(data.dts / 90);
  1538.  
  1539. // buffer video until flush() is called
  1540. nalUnits.push(data);
  1541. };
  1542.  
  1543. this.flush = function() {
  1544. var
  1545. currentNal,
  1546. tags = new TagList();
  1547.  
  1548. // Throw away nalUnits at the start of the byte stream until we find
  1549. // the first AUD
  1550. while (nalUnits.length) {
  1551. if (nalUnits[0].nalUnitType === 'access_unit_delimiter_rbsp') {
  1552. break;
  1553. }
  1554. nalUnits.shift();
  1555. }
  1556.  
  1557. // return early if no video data has been observed
  1558. if (nalUnits.length === 0) {
  1559. this.trigger('done', 'VideoSegmentStream');
  1560. return;
  1561. }
  1562.  
  1563. while (nalUnits.length) {
  1564. currentNal = nalUnits.shift();
  1565.  
  1566. // record the track config
  1567. if (currentNal.nalUnitType === 'seq_parameter_set_rbsp') {
  1568. track.newMetadata = true;
  1569. config = currentNal.config;
  1570. track.width = config.width;
  1571. track.height = config.height;
  1572. track.sps = [currentNal.data];
  1573. track.profileIdc = config.profileIdc;
  1574. track.levelIdc = config.levelIdc;
  1575. track.profileCompatibility = config.profileCompatibility;
  1576. h264Frame.endNalUnit();
  1577. } else if (currentNal.nalUnitType === 'pic_parameter_set_rbsp') {
  1578. track.newMetadata = true;
  1579. track.pps = [currentNal.data];
  1580. h264Frame.endNalUnit();
  1581. } else if (currentNal.nalUnitType === 'access_unit_delimiter_rbsp') {
  1582. if (h264Frame) {
  1583. this.finishFrame(tags, h264Frame);
  1584. }
  1585. h264Frame = new FlvTag(FlvTag.VIDEO_TAG);
  1586. h264Frame.pts = currentNal.pts;
  1587. h264Frame.dts = currentNal.dts;
  1588. } else {
  1589. if (currentNal.nalUnitType === 'slice_layer_without_partitioning_rbsp_idr') {
  1590. // the current sample is a key frame
  1591. h264Frame.keyFrame = true;
  1592. }
  1593. h264Frame.endNalUnit();
  1594. }
  1595. h264Frame.startNalUnit();
  1596. h264Frame.writeBytes(currentNal.data);
  1597. }
  1598. if (h264Frame) {
  1599. this.finishFrame(tags, h264Frame);
  1600. }
  1601.  
  1602. this.trigger('data', {track: track, tags: tags.list});
  1603.  
  1604. // Continue with the flush process now
  1605. this.trigger('done', 'VideoSegmentStream');
  1606. };
  1607. };
  1608.  
  1609. VideoSegmentStream.prototype = new Stream();
  1610.  
  1611. /**
  1612. * An object that incrementally transmuxes MPEG2 Trasport Stream
  1613. * chunks into an FLV.
  1614. */
  1615. Transmuxer = function(options) {
  1616. var
  1617. self = this,
  1618.  
  1619. packetStream, parseStream, elementaryStream,
  1620. videoTimestampRolloverStream, audioTimestampRolloverStream,
  1621. timedMetadataTimestampRolloverStream,
  1622. adtsStream, h264Stream,
  1623. videoSegmentStream, audioSegmentStream, captionStream,
  1624. coalesceStream;
  1625.  
  1626. Transmuxer.prototype.init.call(this);
  1627.  
  1628. options = options || {};
  1629.  
  1630. // expose the metadata stream
  1631. this.metadataStream = new m2ts.MetadataStream();
  1632.  
  1633. options.metadataStream = this.metadataStream;
  1634.  
  1635. // set up the parsing pipeline
  1636. packetStream = new m2ts.TransportPacketStream();
  1637. parseStream = new m2ts.TransportParseStream();
  1638. elementaryStream = new m2ts.ElementaryStream();
  1639. videoTimestampRolloverStream = new m2ts.TimestampRolloverStream('video');
  1640. audioTimestampRolloverStream = new m2ts.TimestampRolloverStream('audio');
  1641. timedMetadataTimestampRolloverStream = new m2ts.TimestampRolloverStream('timed-metadata');
  1642.  
  1643. adtsStream = new AdtsStream();
  1644. h264Stream = new H264Stream();
  1645. coalesceStream = new CoalesceStream(options);
  1646.  
  1647. // disassemble MPEG2-TS packets into elementary streams
  1648. packetStream
  1649. .pipe(parseStream)
  1650. .pipe(elementaryStream);
  1651.  
  1652. // !!THIS ORDER IS IMPORTANT!!
  1653. // demux the streams
  1654. elementaryStream
  1655. .pipe(videoTimestampRolloverStream)
  1656. .pipe(h264Stream);
  1657. elementaryStream
  1658. .pipe(audioTimestampRolloverStream)
  1659. .pipe(adtsStream);
  1660.  
  1661. elementaryStream
  1662. .pipe(timedMetadataTimestampRolloverStream)
  1663. .pipe(this.metadataStream)
  1664. .pipe(coalesceStream);
  1665. // if CEA-708 parsing is available, hook up a caption stream
  1666. captionStream = new m2ts.CaptionStream();
  1667. h264Stream.pipe(captionStream)
  1668. .pipe(coalesceStream);
  1669.  
  1670. // hook up the segment streams once track metadata is delivered
  1671. elementaryStream.on('data', function(data) {
  1672. var i, videoTrack, audioTrack;
  1673.  
  1674. if (data.type === 'metadata') {
  1675. i = data.tracks.length;
  1676.  
  1677. // scan the tracks listed in the metadata
  1678. while (i--) {
  1679. if (data.tracks[i].type === 'video') {
  1680. videoTrack = data.tracks[i];
  1681. } else if (data.tracks[i].type === 'audio') {
  1682. audioTrack = data.tracks[i];
  1683. }
  1684. }
  1685.  
  1686. // hook up the video segment stream to the first track with h264 data
  1687. if (videoTrack && !videoSegmentStream) {
  1688. coalesceStream.numberOfTracks++;
  1689. videoSegmentStream = new VideoSegmentStream(videoTrack);
  1690.  
  1691. // Set up the final part of the video pipeline
  1692. h264Stream
  1693. .pipe(videoSegmentStream)
  1694. .pipe(coalesceStream);
  1695. }
  1696.  
  1697. if (audioTrack && !audioSegmentStream) {
  1698. // hook up the audio segment stream to the first track with aac data
  1699. coalesceStream.numberOfTracks++;
  1700. audioSegmentStream = new AudioSegmentStream(audioTrack);
  1701.  
  1702. // Set up the final part of the audio pipeline
  1703. adtsStream
  1704. .pipe(audioSegmentStream)
  1705. .pipe(coalesceStream);
  1706.  
  1707. if (videoSegmentStream) {
  1708. videoSegmentStream.on('keyframe', audioSegmentStream.onVideoKeyFrame);
  1709. }
  1710. }
  1711. }
  1712. });
  1713.  
  1714. // feed incoming data to the front of the parsing pipeline
  1715. this.push = function(data) {
  1716. packetStream.push(data);
  1717. };
  1718.  
  1719. // flush any buffered data
  1720. this.flush = function() {
  1721. // Start at the top of the pipeline and flush all pending work
  1722. packetStream.flush();
  1723. };
  1724.  
  1725. // Caption data has to be reset when seeking outside buffered range
  1726. this.resetCaptions = function() {
  1727. captionStream.reset();
  1728. };
  1729.  
  1730. // Re-emit any data coming from the coalesce stream to the outside world
  1731. coalesceStream.on('data', function(event) {
  1732. self.trigger('data', event);
  1733. });
  1734.  
  1735. // Let the consumer know we have finished flushing the entire pipeline
  1736. coalesceStream.on('done', function() {
  1737. self.trigger('done');
  1738. });
  1739. };
  1740. Transmuxer.prototype = new Stream();
  1741.  
  1742. // forward compatibility
  1743. module.exports = Transmuxer;
  1744.  
  1745. },{"1":1,"10":10,"17":17,"2":2,"3":3,"5":5,"7":7}],9:[function(require,module,exports){
  1746. /**
  1747. * mux.js
  1748. *
  1749. * Copyright (c) Brightcove
  1750. * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
  1751. *
  1752. * Reads in-band caption information from a video elementary
  1753. * stream. Captions must follow the CEA-708 standard for injection
  1754. * into an MPEG-2 transport streams.
  1755. * @see https://en.wikipedia.org/wiki/CEA-708
  1756. * @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf
  1757. */
  1758.  
  1759. 'use strict';
  1760.  
  1761. // -----------------
  1762. // Link To Transport
  1763. // -----------------
  1764.  
  1765. var Stream = require(17);
  1766. var cea708Parser = require(14);
  1767.  
  1768. var CaptionStream = function() {
  1769.  
  1770. CaptionStream.prototype.init.call(this);
  1771.  
  1772. this.captionPackets_ = [];
  1773.  
  1774. this.ccStreams_ = [
  1775. new Cea608Stream(0, 0), // eslint-disable-line no-use-before-define
  1776. new Cea608Stream(0, 1), // eslint-disable-line no-use-before-define
  1777. new Cea608Stream(1, 0), // eslint-disable-line no-use-before-define
  1778. new Cea608Stream(1, 1) // eslint-disable-line no-use-before-define
  1779. ];
  1780.  
  1781. this.reset();
  1782.  
  1783. // forward data and done events from CCs to this CaptionStream
  1784. this.ccStreams_.forEach(function(cc) {
  1785. cc.on('data', this.trigger.bind(this, 'data'));
  1786. cc.on('partialdone', this.trigger.bind(this, 'partialdone'));
  1787. cc.on('done', this.trigger.bind(this, 'done'));
  1788. }, this);
  1789.  
  1790. };
  1791.  
  1792. CaptionStream.prototype = new Stream();
  1793. CaptionStream.prototype.push = function(event) {
  1794. var sei, userData, newCaptionPackets;
  1795.  
  1796. // only examine SEI NALs
  1797. if (event.nalUnitType !== 'sei_rbsp') {
  1798. return;
  1799. }
  1800.  
  1801. // parse the sei
  1802. sei = cea708Parser.parseSei(event.escapedRBSP);
  1803.  
  1804. // ignore everything but user_data_registered_itu_t_t35
  1805. if (sei.payloadType !== cea708Parser.USER_DATA_REGISTERED_ITU_T_T35) {
  1806. return;
  1807. }
  1808.  
  1809. // parse out the user data payload
  1810. userData = cea708Parser.parseUserData(sei);
  1811.  
  1812. // ignore unrecognized userData
  1813. if (!userData) {
  1814. return;
  1815. }
  1816.  
  1817. // Sometimes, the same segment # will be downloaded twice. To stop the
  1818. // caption data from being processed twice, we track the latest dts we've
  1819. // received and ignore everything with a dts before that. However, since
  1820. // data for a specific dts can be split across packets on either side of
  1821. // a segment boundary, we need to make sure we *don't* ignore the packets
  1822. // from the *next* segment that have dts === this.latestDts_. By constantly
  1823. // tracking the number of packets received with dts === this.latestDts_, we
  1824. // know how many should be ignored once we start receiving duplicates.
  1825. if (event.dts < this.latestDts_) {
  1826. // We've started getting older data, so set the flag.
  1827. this.ignoreNextEqualDts_ = true;
  1828. return;
  1829. } else if ((event.dts === this.latestDts_) && (this.ignoreNextEqualDts_)) {
  1830. this.numSameDts_--;
  1831. if (!this.numSameDts_) {
  1832. // We've received the last duplicate packet, time to start processing again
  1833. this.ignoreNextEqualDts_ = false;
  1834. }
  1835. return;
  1836. }
  1837.  
  1838. // parse out CC data packets and save them for later
  1839. newCaptionPackets = cea708Parser.parseCaptionPackets(event.pts, userData);
  1840. this.captionPackets_ = this.captionPackets_.concat(newCaptionPackets);
  1841. if (this.latestDts_ !== event.dts) {
  1842. this.numSameDts_ = 0;
  1843. }
  1844. this.numSameDts_++;
  1845. this.latestDts_ = event.dts;
  1846. };
  1847.  
  1848. CaptionStream.prototype.flushCCStreams = function(flushType) {
  1849. this.ccStreams_.forEach(function(cc) {
  1850. return flushType === 'flush' ? cc.flush() : cc.partialFlush();
  1851. }, this);
  1852. };
  1853.  
  1854. CaptionStream.prototype.flushStream = function(flushType) {
  1855. // make sure we actually parsed captions before proceeding
  1856. if (!this.captionPackets_.length) {
  1857. this.flushCCStreams(flushType);
  1858. return;
  1859. }
  1860.  
  1861. // In Chrome, the Array#sort function is not stable so add a
  1862. // presortIndex that we can use to ensure we get a stable-sort
  1863. this.captionPackets_.forEach(function(elem, idx) {
  1864. elem.presortIndex = idx;
  1865. });
  1866.  
  1867. // sort caption byte-pairs based on their PTS values
  1868. this.captionPackets_.sort(function(a, b) {
  1869. if (a.pts === b.pts) {
  1870. return a.presortIndex - b.presortIndex;
  1871. }
  1872. return a.pts - b.pts;
  1873. });
  1874.  
  1875. this.captionPackets_.forEach(function(packet) {
  1876. if (packet.type < 2) {
  1877. // Dispatch packet to the right Cea608Stream
  1878. this.dispatchCea608Packet(packet);
  1879. }
  1880. // this is where an 'else' would go for a dispatching packets
  1881. // to a theoretical Cea708Stream that handles SERVICEn data
  1882. }, this);
  1883.  
  1884. this.captionPackets_.length = 0;
  1885. this.flushCCStreams(flushType);
  1886. };
  1887.  
  1888. CaptionStream.prototype.flush = function() {
  1889. return this.flushStream('flush');
  1890. };
  1891.  
  1892. // Only called if handling partial data
  1893. CaptionStream.prototype.partialFlush = function() {
  1894. return this.flushStream('partialFlush');
  1895. };
  1896.  
  1897. CaptionStream.prototype.reset = function() {
  1898. this.latestDts_ = null;
  1899. this.ignoreNextEqualDts_ = false;
  1900. this.numSameDts_ = 0;
  1901. this.activeCea608Channel_ = [null, null];
  1902. this.ccStreams_.forEach(function(ccStream) {
  1903. ccStream.reset();
  1904. });
  1905. };
  1906.  
  1907. // From the CEA-608 spec:
  1908. /*
  1909. * When XDS sub-packets are interleaved with other services, the end of each sub-packet shall be followed
  1910. * by a control pair to change to a different service. When any of the control codes from 0x10 to 0x1F is
  1911. * used to begin a control code pair, it indicates the return to captioning or Text data. The control code pair
  1912. * and subsequent data should then be processed according to the FCC rules. It may be necessary for the
  1913. * line 21 data encoder to automatically insert a control code pair (i.e. RCL, RU2, RU3, RU4, RDC, or RTD)
  1914. * to switch to captioning or Text.
  1915. */
  1916. // With that in mind, we ignore any data between an XDS control code and a
  1917. // subsequent closed-captioning control code.
  1918. CaptionStream.prototype.dispatchCea608Packet = function(packet) {
  1919. // NOTE: packet.type is the CEA608 field
  1920. if (this.setsTextOrXDSActive(packet)) {
  1921. this.activeCea608Channel_[packet.type] = null;
  1922. } else if (this.setsChannel1Active(packet)) {
  1923. this.activeCea608Channel_[packet.type] = 0;
  1924. } else if (this.setsChannel2Active(packet)) {
  1925. this.activeCea608Channel_[packet.type] = 1;
  1926. }
  1927. if (this.activeCea608Channel_[packet.type] === null) {
  1928. // If we haven't received anything to set the active channel, or the
  1929. // packets are Text/XDS data, discard the data; we don't want jumbled
  1930. // captions
  1931. return;
  1932. }
  1933. this.ccStreams_[(packet.type << 1) + this.activeCea608Channel_[packet.type]].push(packet);
  1934. };
  1935.  
  1936. CaptionStream.prototype.setsChannel1Active = function(packet) {
  1937. return ((packet.ccData & 0x7800) === 0x1000);
  1938. };
  1939. CaptionStream.prototype.setsChannel2Active = function(packet) {
  1940. return ((packet.ccData & 0x7800) === 0x1800);
  1941. };
  1942. CaptionStream.prototype.setsTextOrXDSActive = function(packet) {
  1943. return ((packet.ccData & 0x7100) === 0x0100) ||
  1944. ((packet.ccData & 0x78fe) === 0x102a) ||
  1945. ((packet.ccData & 0x78fe) === 0x182a);
  1946. };
  1947.  
  1948. // ----------------------
  1949. // Session to Application
  1950. // ----------------------
  1951.  
  1952. // This hash maps non-ASCII, special, and extended character codes to their
  1953. // proper Unicode equivalent. The first keys that are only a single byte
  1954. // are the non-standard ASCII characters, which simply map the CEA608 byte
  1955. // to the standard ASCII/Unicode. The two-byte keys that follow are the CEA608
  1956. // character codes, but have their MSB bitmasked with 0x03 so that a lookup
  1957. // can be performed regardless of the field and data channel on which the
  1958. // character code was received.
  1959. var CHARACTER_TRANSLATION = {
  1960. 0x2a: 0xe1, // ГЎ
  1961. 0x5c: 0xe9, // Г©
  1962. 0x5e: 0xed, // Г­
  1963. 0x5f: 0xf3, // Гі
  1964. 0x60: 0xfa, // Гє
  1965. 0x7b: 0xe7, // Г§
  1966. 0x7c: 0xf7, // Г·
  1967. 0x7d: 0xd1, // Г‘
  1968. 0x7e: 0xf1, // Г±
  1969. 0x7f: 0x2588, // в–€
  1970. 0x0130: 0xae, // В®
  1971. 0x0131: 0xb0, // В°
  1972. 0x0132: 0xbd, // ВЅ
  1973. 0x0133: 0xbf, // Вї
  1974. 0x0134: 0x2122, // в„ў
  1975. 0x0135: 0xa2, // Вў
  1976. 0x0136: 0xa3, // ВЈ
  1977. 0x0137: 0x266a, // в™Є
  1978. 0x0138: 0xe0, // Г 
  1979. 0x0139: 0xa0, //
  1980. 0x013a: 0xe8, // ГЁ
  1981. 0x013b: 0xe2, // Гў
  1982. 0x013c: 0xea, // ГЄ
  1983. 0x013d: 0xee, // Г®
  1984. 0x013e: 0xf4, // Гґ
  1985. 0x013f: 0xfb, // Г»
  1986. 0x0220: 0xc1, // ГЃ
  1987. 0x0221: 0xc9, // Г‰
  1988. 0x0222: 0xd3, // Г“
  1989. 0x0223: 0xda, // Гљ
  1990. 0x0224: 0xdc, // Гњ
  1991. 0x0225: 0xfc, // Гј
  1992. 0x0226: 0x2018, // ‘
  1993. 0x0227: 0xa1, // ВЎ
  1994. 0x0228: 0x2a, // *
  1995. 0x0229: 0x27, // '
  1996. 0x022a: 0x2014, // —
  1997. 0x022b: 0xa9, // В©
  1998. 0x022c: 0x2120, // в„ 
  1999. 0x022d: 0x2022, // •
  2000. 0x022e: 0x201c, // “
  2001. 0x022f: 0x201d, // ”
  2002. 0x0230: 0xc0, // ГЂ
  2003. 0x0231: 0xc2, // Г‚
  2004. 0x0232: 0xc7, // Г‡
  2005. 0x0233: 0xc8, // Г€
  2006. 0x0234: 0xca, // ГЉ
  2007. 0x0235: 0xcb, // Г‹
  2008. 0x0236: 0xeb, // Г«
  2009. 0x0237: 0xce, // ГЋ
  2010. 0x0238: 0xcf, // ГЏ
  2011. 0x0239: 0xef, // ГЇ
  2012. 0x023a: 0xd4, // Г”
  2013. 0x023b: 0xd9, // Г™
  2014. 0x023c: 0xf9, // Г№
  2015. 0x023d: 0xdb, // Г›
  2016. 0x023e: 0xab, // В«
  2017. 0x023f: 0xbb, // В»
  2018. 0x0320: 0xc3, // Гѓ
  2019. 0x0321: 0xe3, // ГЈ
  2020. 0x0322: 0xcd, // ГЌ
  2021. 0x0323: 0xcc, // ГЊ
  2022. 0x0324: 0xec, // Г¬
  2023. 0x0325: 0xd2, // Г’
  2024. 0x0326: 0xf2, // ГІ
  2025. 0x0327: 0xd5, // Г•
  2026. 0x0328: 0xf5, // Гµ
  2027. 0x0329: 0x7b, // {
  2028. 0x032a: 0x7d, // }
  2029. 0x032b: 0x5c, // \
  2030. 0x032c: 0x5e, // ^
  2031. 0x032d: 0x5f, // _
  2032. 0x032e: 0x7c, // |
  2033. 0x032f: 0x7e, // ~
  2034. 0x0330: 0xc4, // Г„
  2035. 0x0331: 0xe4, // Г¤
  2036. 0x0332: 0xd6, // Г–
  2037. 0x0333: 0xf6, // Г¶
  2038. 0x0334: 0xdf, // Гџ
  2039. 0x0335: 0xa5, // ВҐ
  2040. 0x0336: 0xa4, // В¤
  2041. 0x0337: 0x2502, // в”‚
  2042. 0x0338: 0xc5, // Г…
  2043. 0x0339: 0xe5, // ГҐ
  2044. 0x033a: 0xd8, // Ø
  2045. 0x033b: 0xf8, // Гё
  2046. 0x033c: 0x250c, // в”Њ
  2047. 0x033d: 0x2510, // в”ђ
  2048. 0x033e: 0x2514, // в””
  2049. 0x033f: 0x2518 // ┘
  2050. };
  2051.  
  2052. var getCharFromCode = function(code) {
  2053. if (code === null) {
  2054. return '';
  2055. }
  2056. code = CHARACTER_TRANSLATION[code] || code;
  2057. return String.fromCharCode(code);
  2058. };
  2059.  
  2060. // the index of the last row in a CEA-608 display buffer
  2061. var BOTTOM_ROW = 14;
  2062.  
  2063. // This array is used for mapping PACs -> row #, since there's no way of
  2064. // getting it through bit logic.
  2065. var ROWS = [0x1100, 0x1120, 0x1200, 0x1220, 0x1500, 0x1520, 0x1600, 0x1620,
  2066. 0x1700, 0x1720, 0x1000, 0x1300, 0x1320, 0x1400, 0x1420];
  2067.  
  2068. // CEA-608 captions are rendered onto a 34x15 matrix of character
  2069. // cells. The "bottom" row is the last element in the outer array.
  2070. var createDisplayBuffer = function() {
  2071. var result = [], i = BOTTOM_ROW + 1;
  2072. while (i--) {
  2073. result.push('');
  2074. }
  2075. return result;
  2076. };
  2077.  
  2078. var Cea608Stream = function(field, dataChannel) {
  2079. Cea608Stream.prototype.init.call(this);
  2080.  
  2081. this.field_ = field || 0;
  2082. this.dataChannel_ = dataChannel || 0;
  2083.  
  2084. this.name_ = 'CC' + (((this.field_ << 1) | this.dataChannel_) + 1);
  2085.  
  2086. this.setConstants();
  2087. this.reset();
  2088.  
  2089. this.push = function(packet) {
  2090. var data, swap, char0, char1, text;
  2091. // remove the parity bits
  2092. data = packet.ccData & 0x7f7f;
  2093.  
  2094. // ignore duplicate control codes; the spec demands they're sent twice
  2095. if (data === this.lastControlCode_) {
  2096. this.lastControlCode_ = null;
  2097. return;
  2098. }
  2099.  
  2100. // Store control codes
  2101. if ((data & 0xf000) === 0x1000) {
  2102. this.lastControlCode_ = data;
  2103. } else if (data !== this.PADDING_) {
  2104. this.lastControlCode_ = null;
  2105. }
  2106.  
  2107. char0 = data >>> 8;
  2108. char1 = data & 0xff;
  2109.  
  2110. if (data === this.PADDING_) {
  2111. return;
  2112.  
  2113. } else if (data === this.RESUME_CAPTION_LOADING_) {
  2114. this.mode_ = 'popOn';
  2115.  
  2116. } else if (data === this.END_OF_CAPTION_) {
  2117. // If an EOC is received while in paint-on mode, the displayed caption
  2118. // text should be swapped to non-displayed memory as if it was a pop-on
  2119. // caption. Because of that, we should explicitly switch back to pop-on
  2120. // mode
  2121. this.mode_ = 'popOn';
  2122. this.clearFormatting(packet.pts);
  2123. // if a caption was being displayed, it's gone now
  2124. this.flushDisplayed(packet.pts);
  2125.  
  2126. // flip memory
  2127. swap = this.displayed_;
  2128. this.displayed_ = this.nonDisplayed_;
  2129. this.nonDisplayed_ = swap;
  2130.  
  2131. // start measuring the time to display the caption
  2132. this.startPts_ = packet.pts;
  2133.  
  2134. } else if (data === this.ROLL_UP_2_ROWS_) {
  2135. this.rollUpRows_ = 2;
  2136. this.setRollUp(packet.pts);
  2137. } else if (data === this.ROLL_UP_3_ROWS_) {
  2138. this.rollUpRows_ = 3;
  2139. this.setRollUp(packet.pts);
  2140. } else if (data === this.ROLL_UP_4_ROWS_) {
  2141. this.rollUpRows_ = 4;
  2142. this.setRollUp(packet.pts);
  2143. } else if (data === this.CARRIAGE_RETURN_) {
  2144. this.clearFormatting(packet.pts);
  2145. this.flushDisplayed(packet.pts);
  2146. this.shiftRowsUp_();
  2147. this.startPts_ = packet.pts;
  2148.  
  2149. } else if (data === this.BACKSPACE_) {
  2150. if (this.mode_ === 'popOn') {
  2151. this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
  2152. } else {
  2153. this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
  2154. }
  2155. } else if (data === this.ERASE_DISPLAYED_MEMORY_) {
  2156. this.flushDisplayed(packet.pts);
  2157. this.displayed_ = createDisplayBuffer();
  2158. } else if (data === this.ERASE_NON_DISPLAYED_MEMORY_) {
  2159. this.nonDisplayed_ = createDisplayBuffer();
  2160.  
  2161. } else if (data === this.RESUME_DIRECT_CAPTIONING_) {
  2162. if (this.mode_ !== 'paintOn') {
  2163. // NOTE: This should be removed when proper caption positioning is
  2164. // implemented
  2165. this.flushDisplayed(packet.pts);
  2166. this.displayed_ = createDisplayBuffer();
  2167. }
  2168. this.mode_ = 'paintOn';
  2169. this.startPts_ = packet.pts;
  2170.  
  2171. // Append special characters to caption text
  2172. } else if (this.isSpecialCharacter(char0, char1)) {
  2173. // Bitmask char0 so that we can apply character transformations
  2174. // regardless of field and data channel.
  2175. // Then byte-shift to the left and OR with char1 so we can pass the
  2176. // entire character code to `getCharFromCode`.
  2177. char0 = (char0 & 0x03) << 8;
  2178. text = getCharFromCode(char0 | char1);
  2179. this[this.mode_](packet.pts, text);
  2180. this.column_++;
  2181.  
  2182. // Append extended characters to caption text
  2183. } else if (this.isExtCharacter(char0, char1)) {
  2184. // Extended characters always follow their "non-extended" equivalents.
  2185. // IE if a "ГЁ" is desired, you'll always receive "eГЁ"; non-compliant
  2186. // decoders are supposed to drop the "ГЁ", while compliant decoders
  2187. // backspace the "e" and insert "ГЁ".
  2188.  
  2189. // Delete the previous character
  2190. if (this.mode_ === 'popOn') {
  2191. this.nonDisplayed_[this.row_] = this.nonDisplayed_[this.row_].slice(0, -1);
  2192. } else {
  2193. this.displayed_[this.row_] = this.displayed_[this.row_].slice(0, -1);
  2194. }
  2195.  
  2196. // Bitmask char0 so that we can apply character transformations
  2197. // regardless of field and data channel.
  2198. // Then byte-shift to the left and OR with char1 so we can pass the
  2199. // entire character code to `getCharFromCode`.
  2200. char0 = (char0 & 0x03) << 8;
  2201. text = getCharFromCode(char0 | char1);
  2202. this[this.mode_](packet.pts, text);
  2203. this.column_++;
  2204.  
  2205. // Process mid-row codes
  2206. } else if (this.isMidRowCode(char0, char1)) {
  2207. // Attributes are not additive, so clear all formatting
  2208. this.clearFormatting(packet.pts);
  2209.  
  2210. // According to the standard, mid-row codes
  2211. // should be replaced with spaces, so add one now
  2212. this[this.mode_](packet.pts, ' ');
  2213. this.column_++;
  2214.  
  2215. if ((char1 & 0xe) === 0xe) {
  2216. this.addFormatting(packet.pts, ['i']);
  2217. }
  2218.  
  2219. if ((char1 & 0x1) === 0x1) {
  2220. this.addFormatting(packet.pts, ['u']);
  2221. }
  2222.  
  2223. // Detect offset control codes and adjust cursor
  2224. } else if (this.isOffsetControlCode(char0, char1)) {
  2225. // Cursor position is set by indent PAC (see below) in 4-column
  2226. // increments, with an additional offset code of 1-3 to reach any
  2227. // of the 32 columns specified by CEA-608. So all we need to do
  2228. // here is increment the column cursor by the given offset.
  2229. this.column_ += (char1 & 0x03);
  2230.  
  2231. // Detect PACs (Preamble Address Codes)
  2232. } else if (this.isPAC(char0, char1)) {
  2233.  
  2234. // There's no logic for PAC -> row mapping, so we have to just
  2235. // find the row code in an array and use its index :(
  2236. var row = ROWS.indexOf(data & 0x1f20);
  2237.  
  2238. // Configure the caption window if we're in roll-up mode
  2239. if (this.mode_ === 'rollUp') {
  2240. // This implies that the base row is incorrectly set.
  2241. // As per the recommendation in CEA-608(Base Row Implementation), defer to the number
  2242. // of roll-up rows set.
  2243. if (row - this.rollUpRows_ + 1 < 0) {
  2244. row = this.rollUpRows_ - 1;
  2245. }
  2246.  
  2247. this.setRollUp(packet.pts, row);
  2248. }
  2249.  
  2250. if (row !== this.row_) {
  2251. // formatting is only persistent for current row
  2252. this.clearFormatting(packet.pts);
  2253. this.row_ = row;
  2254. }
  2255. // All PACs can apply underline, so detect and apply
  2256. // (All odd-numbered second bytes set underline)
  2257. if ((char1 & 0x1) && (this.formatting_.indexOf('u') === -1)) {
  2258. this.addFormatting(packet.pts, ['u']);
  2259. }
  2260.  
  2261. if ((data & 0x10) === 0x10) {
  2262. // We've got an indent level code. Each successive even number
  2263. // increments the column cursor by 4, so we can get the desired
  2264. // column position by bit-shifting to the right (to get n/2)
  2265. // and multiplying by 4.
  2266. this.column_ = ((data & 0xe) >> 1) * 4;
  2267. }
  2268.  
  2269. if (this.isColorPAC(char1)) {
  2270. // it's a color code, though we only support white, which
  2271. // can be either normal or italicized. white italics can be
  2272. // either 0x4e or 0x6e depending on the row, so we just
  2273. // bitwise-and with 0xe to see if italics should be turned on
  2274. if ((char1 & 0xe) === 0xe) {
  2275. this.addFormatting(packet.pts, ['i']);
  2276. }
  2277. }
  2278.  
  2279. // We have a normal character in char0, and possibly one in char1
  2280. } else if (this.isNormalChar(char0)) {
  2281. if (char1 === 0x00) {
  2282. char1 = null;
  2283. }
  2284. text = getCharFromCode(char0);
  2285. text += getCharFromCode(char1);
  2286. this[this.mode_](packet.pts, text);
  2287. this.column_ += text.length;
  2288.  
  2289. } // finish data processing
  2290.  
  2291. };
  2292. };
  2293. Cea608Stream.prototype = new Stream();
  2294. // Trigger a cue point that captures the current state of the
  2295. // display buffer
  2296. Cea608Stream.prototype.flushDisplayed = function(pts) {
  2297. var content = this.displayed_
  2298. // remove spaces from the start and end of the string
  2299. .map(function(row) {
  2300. try {
  2301. return row.trim();
  2302. } catch (e) {
  2303. // Ordinarily, this shouldn't happen. However, caption
  2304. // parsing errors should not throw exceptions and
  2305. // break playback.
  2306. // eslint-disable-next-line no-console
  2307. console.error('Skipping malformed caption.');
  2308. return '';
  2309. }
  2310. })
  2311. // combine all text rows to display in one cue
  2312. .join('\n')
  2313. // and remove blank rows from the start and end, but not the middle
  2314. .replace(/^\n+|\n+$/g, '');
  2315.  
  2316. if (content.length) {
  2317. this.trigger('data', {
  2318. startPts: this.startPts_,
  2319. endPts: pts,
  2320. text: content,
  2321. stream: this.name_
  2322. });
  2323. }
  2324. };
  2325.  
  2326. /**
  2327. * Zero out the data, used for startup and on seek
  2328. */
  2329. Cea608Stream.prototype.reset = function() {
  2330. this.mode_ = 'popOn';
  2331. // When in roll-up mode, the index of the last row that will
  2332. // actually display captions. If a caption is shifted to a row
  2333. // with a lower index than this, it is cleared from the display
  2334. // buffer
  2335. this.topRow_ = 0;
  2336. this.startPts_ = 0;
  2337. this.displayed_ = createDisplayBuffer();
  2338. this.nonDisplayed_ = createDisplayBuffer();
  2339. this.lastControlCode_ = null;
  2340.  
  2341. // Track row and column for proper line-breaking and spacing
  2342. this.column_ = 0;
  2343. this.row_ = BOTTOM_ROW;
  2344. this.rollUpRows_ = 2;
  2345.  
  2346. // This variable holds currently-applied formatting
  2347. this.formatting_ = [];
  2348. };
  2349.  
  2350. /**
  2351. * Sets up control code and related constants for this instance
  2352. */
  2353. Cea608Stream.prototype.setConstants = function() {
  2354. // The following attributes have these uses:
  2355. // ext_ : char0 for mid-row codes, and the base for extended
  2356. // chars (ext_+0, ext_+1, and ext_+2 are char0s for
  2357. // extended codes)
  2358. // control_: char0 for control codes, except byte-shifted to the
  2359. // left so that we can do this.control_ | CONTROL_CODE
  2360. // offset_: char0 for tab offset codes
  2361. //
  2362. // It's also worth noting that control codes, and _only_ control codes,
  2363. // differ between field 1 and field2. Field 2 control codes are always
  2364. // their field 1 value plus 1. That's why there's the "| field" on the
  2365. // control value.
  2366. if (this.dataChannel_ === 0) {
  2367. this.BASE_ = 0x10;
  2368. this.EXT_ = 0x11;
  2369. this.CONTROL_ = (0x14 | this.field_) << 8;
  2370. this.OFFSET_ = 0x17;
  2371. } else if (this.dataChannel_ === 1) {
  2372. this.BASE_ = 0x18;
  2373. this.EXT_ = 0x19;
  2374. this.CONTROL_ = (0x1c | this.field_) << 8;
  2375. this.OFFSET_ = 0x1f;
  2376. }
  2377.  
  2378. // Constants for the LSByte command codes recognized by Cea608Stream. This
  2379. // list is not exhaustive. For a more comprehensive listing and semantics see
  2380. // http://www.gpo.gov/fdsys/pkg/CFR-2010-title47-vol1/pdf/CFR-2010-title47-vol1-sec15-119.pdf
  2381. // Padding
  2382. this.PADDING_ = 0x0000;
  2383. // Pop-on Mode
  2384. this.RESUME_CAPTION_LOADING_ = this.CONTROL_ | 0x20;
  2385. this.END_OF_CAPTION_ = this.CONTROL_ | 0x2f;
  2386. // Roll-up Mode
  2387. this.ROLL_UP_2_ROWS_ = this.CONTROL_ | 0x25;
  2388. this.ROLL_UP_3_ROWS_ = this.CONTROL_ | 0x26;
  2389. this.ROLL_UP_4_ROWS_ = this.CONTROL_ | 0x27;
  2390. this.CARRIAGE_RETURN_ = this.CONTROL_ | 0x2d;
  2391. // paint-on mode
  2392. this.RESUME_DIRECT_CAPTIONING_ = this.CONTROL_ | 0x29;
  2393. // Erasure
  2394. this.BACKSPACE_ = this.CONTROL_ | 0x21;
  2395. this.ERASE_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2c;
  2396. this.ERASE_NON_DISPLAYED_MEMORY_ = this.CONTROL_ | 0x2e;
  2397. };
  2398.  
  2399. /**
  2400. * Detects if the 2-byte packet data is a special character
  2401. *
  2402. * Special characters have a second byte in the range 0x30 to 0x3f,
  2403. * with the first byte being 0x11 (for data channel 1) or 0x19 (for
  2404. * data channel 2).
  2405. *
  2406. * @param {Integer} char0 The first byte
  2407. * @param {Integer} char1 The second byte
  2408. * @return {Boolean} Whether the 2 bytes are an special character
  2409. */
  2410. Cea608Stream.prototype.isSpecialCharacter = function(char0, char1) {
  2411. return (char0 === this.EXT_ && char1 >= 0x30 && char1 <= 0x3f);
  2412. };
  2413.  
  2414. /**
  2415. * Detects if the 2-byte packet data is an extended character
  2416. *
  2417. * Extended characters have a second byte in the range 0x20 to 0x3f,
  2418. * with the first byte being 0x12 or 0x13 (for data channel 1) or
  2419. * 0x1a or 0x1b (for data channel 2).
  2420. *
  2421. * @param {Integer} char0 The first byte
  2422. * @param {Integer} char1 The second byte
  2423. * @return {Boolean} Whether the 2 bytes are an extended character
  2424. */
  2425. Cea608Stream.prototype.isExtCharacter = function(char0, char1) {
  2426. return ((char0 === (this.EXT_ + 1) || char0 === (this.EXT_ + 2)) &&
  2427. (char1 >= 0x20 && char1 <= 0x3f));
  2428. };
  2429.  
  2430. /**
  2431. * Detects if the 2-byte packet is a mid-row code
  2432. *
  2433. * Mid-row codes have a second byte in the range 0x20 to 0x2f, with
  2434. * the first byte being 0x11 (for data channel 1) or 0x19 (for data
  2435. * channel 2).
  2436. *
  2437. * @param {Integer} char0 The first byte
  2438. * @param {Integer} char1 The second byte
  2439. * @return {Boolean} Whether the 2 bytes are a mid-row code
  2440. */
  2441. Cea608Stream.prototype.isMidRowCode = function(char0, char1) {
  2442. return (char0 === this.EXT_ && (char1 >= 0x20 && char1 <= 0x2f));
  2443. };
  2444.  
  2445. /**
  2446. * Detects if the 2-byte packet is an offset control code
  2447. *
  2448. * Offset control codes have a second byte in the range 0x21 to 0x23,
  2449. * with the first byte being 0x17 (for data channel 1) or 0x1f (for
  2450. * data channel 2).
  2451. *
  2452. * @param {Integer} char0 The first byte
  2453. * @param {Integer} char1 The second byte
  2454. * @return {Boolean} Whether the 2 bytes are an offset control code
  2455. */
  2456. Cea608Stream.prototype.isOffsetControlCode = function(char0, char1) {
  2457. return (char0 === this.OFFSET_ && (char1 >= 0x21 && char1 <= 0x23));
  2458. };
  2459.  
  2460. /**
  2461. * Detects if the 2-byte packet is a Preamble Address Code
  2462. *
  2463. * PACs have a first byte in the range 0x10 to 0x17 (for data channel 1)
  2464. * or 0x18 to 0x1f (for data channel 2), with the second byte in the
  2465. * range 0x40 to 0x7f.
  2466. *
  2467. * @param {Integer} char0 The first byte
  2468. * @param {Integer} char1 The second byte
  2469. * @return {Boolean} Whether the 2 bytes are a PAC
  2470. */
  2471. Cea608Stream.prototype.isPAC = function(char0, char1) {
  2472. return (char0 >= this.BASE_ && char0 < (this.BASE_ + 8) &&
  2473. (char1 >= 0x40 && char1 <= 0x7f));
  2474. };
  2475.  
  2476. /**
  2477. * Detects if a packet's second byte is in the range of a PAC color code
  2478. *
  2479. * PAC color codes have the second byte be in the range 0x40 to 0x4f, or
  2480. * 0x60 to 0x6f.
  2481. *
  2482. * @param {Integer} char1 The second byte
  2483. * @return {Boolean} Whether the byte is a color PAC
  2484. */
  2485. Cea608Stream.prototype.isColorPAC = function(char1) {
  2486. return ((char1 >= 0x40 && char1 <= 0x4f) || (char1 >= 0x60 && char1 <= 0x7f));
  2487. };
  2488.  
  2489. /**
  2490. * Detects if a single byte is in the range of a normal character
  2491. *
  2492. * Normal text bytes are in the range 0x20 to 0x7f.
  2493. *
  2494. * @param {Integer} char The byte
  2495. * @return {Boolean} Whether the byte is a normal character
  2496. */
  2497. Cea608Stream.prototype.isNormalChar = function(char) {
  2498. return (char >= 0x20 && char <= 0x7f);
  2499. };
  2500.  
  2501. /**
  2502. * Configures roll-up
  2503. *
  2504. * @param {Integer} pts Current PTS
  2505. * @param {Integer} newBaseRow Used by PACs to slide the current window to
  2506. * a new position
  2507. */
  2508. Cea608Stream.prototype.setRollUp = function(pts, newBaseRow) {
  2509. // Reset the base row to the bottom row when switching modes
  2510. if (this.mode_ !== 'rollUp') {
  2511. this.row_ = BOTTOM_ROW;
  2512. this.mode_ = 'rollUp';
  2513. // Spec says to wipe memories when switching to roll-up
  2514. this.flushDisplayed(pts);
  2515. this.nonDisplayed_ = createDisplayBuffer();
  2516. this.displayed_ = createDisplayBuffer();
  2517. }
  2518.  
  2519. if (newBaseRow !== undefined && newBaseRow !== this.row_) {
  2520. // move currently displayed captions (up or down) to the new base row
  2521. for (var i = 0; i < this.rollUpRows_; i++) {
  2522. this.displayed_[newBaseRow - i] = this.displayed_[this.row_ - i];
  2523. this.displayed_[this.row_ - i] = '';
  2524. }
  2525. }
  2526.  
  2527. if (newBaseRow === undefined) {
  2528. newBaseRow = this.row_;
  2529. }
  2530.  
  2531. this.topRow_ = newBaseRow - this.rollUpRows_ + 1;
  2532. };
  2533.  
  2534. // Adds the opening HTML tag for the passed character to the caption text,
  2535. // and keeps track of it for later closing
  2536. Cea608Stream.prototype.addFormatting = function(pts, format) {
  2537. this.formatting_ = this.formatting_.concat(format);
  2538. var text = format.reduce(function(text, format) {
  2539. return text + '<' + format + '>';
  2540. }, '');
  2541. this[this.mode_](pts, text);
  2542. };
  2543.  
  2544. // Adds HTML closing tags for current formatting to caption text and
  2545. // clears remembered formatting
  2546. Cea608Stream.prototype.clearFormatting = function(pts) {
  2547. if (!this.formatting_.length) {
  2548. return;
  2549. }
  2550. var text = this.formatting_.reverse().reduce(function(text, format) {
  2551. return text + '</' + format + '>';
  2552. }, '');
  2553. this.formatting_ = [];
  2554. this[this.mode_](pts, text);
  2555. };
  2556.  
  2557. // Mode Implementations
  2558. Cea608Stream.prototype.popOn = function(pts, text) {
  2559. var baseRow = this.nonDisplayed_[this.row_];
  2560.  
  2561. // buffer characters
  2562. baseRow += text;
  2563. this.nonDisplayed_[this.row_] = baseRow;
  2564. };
  2565.  
  2566. Cea608Stream.prototype.rollUp = function(pts, text) {
  2567. var baseRow = this.displayed_[this.row_];
  2568.  
  2569. baseRow += text;
  2570. this.displayed_[this.row_] = baseRow;
  2571.  
  2572. };
  2573.  
  2574. Cea608Stream.prototype.shiftRowsUp_ = function() {
  2575. var i;
  2576. // clear out inactive rows
  2577. for (i = 0; i < this.topRow_; i++) {
  2578. this.displayed_[i] = '';
  2579. }
  2580. for (i = this.row_ + 1; i < BOTTOM_ROW + 1; i++) {
  2581. this.displayed_[i] = '';
  2582. }
  2583. // shift displayed rows up
  2584. for (i = this.topRow_; i < this.row_; i++) {
  2585. this.displayed_[i] = this.displayed_[i + 1];
  2586. }
  2587. // clear out the bottom row
  2588. this.displayed_[this.row_] = '';
  2589. };
  2590.  
  2591. Cea608Stream.prototype.paintOn = function(pts, text) {
  2592. var baseRow = this.displayed_[this.row_];
  2593.  
  2594. baseRow += text;
  2595. this.displayed_[this.row_] = baseRow;
  2596. };
  2597.  
  2598. // exports
  2599. module.exports = {
  2600. CaptionStream: CaptionStream,
  2601. Cea608Stream: Cea608Stream
  2602. };
  2603.  
  2604. },{"14":14,"17":17}],10:[function(require,module,exports){
  2605. /**
  2606. * mux.js
  2607. *
  2608. * Copyright (c) Brightcove
  2609. * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
  2610. *
  2611. * A stream-based mp2t to mp4 converter. This utility can be used to
  2612. * deliver mp4s to a SourceBuffer on platforms that support native
  2613. * Media Source Extensions.
  2614. */
  2615. 'use strict';
  2616. var Stream = require(17),
  2617. CaptionStream = require(9),
  2618. StreamTypes = require(12),
  2619. TimestampRolloverStream = require(13).TimestampRolloverStream;
  2620.  
  2621. var m2tsStreamTypes = require(12);
  2622.  
  2623. // object types
  2624. var TransportPacketStream, TransportParseStream, ElementaryStream;
  2625.  
  2626. // constants
  2627. var
  2628. MP2T_PACKET_LENGTH = 188, // bytes
  2629. SYNC_BYTE = 0x47;
  2630.  
  2631. /**
  2632. * Splits an incoming stream of binary data into MPEG-2 Transport
  2633. * Stream packets.
  2634. */
  2635. TransportPacketStream = function() {
  2636. var
  2637. buffer = new Uint8Array(MP2T_PACKET_LENGTH),
  2638. bytesInBuffer = 0;
  2639.  
  2640. TransportPacketStream.prototype.init.call(this);
  2641.  
  2642. // Deliver new bytes to the stream.
  2643.  
  2644. /**
  2645. * Split a stream of data into M2TS packets
  2646. **/
  2647. this.push = function(bytes) {
  2648. var
  2649. startIndex = 0,
  2650. endIndex = MP2T_PACKET_LENGTH,
  2651. everything;
  2652.  
  2653. // If there are bytes remaining from the last segment, prepend them to the
  2654. // bytes that were pushed in
  2655. if (bytesInBuffer) {
  2656. everything = new Uint8Array(bytes.byteLength + bytesInBuffer);
  2657. everything.set(buffer.subarray(0, bytesInBuffer));
  2658. everything.set(bytes, bytesInBuffer);
  2659. bytesInBuffer = 0;
  2660. } else {
  2661. everything = bytes;
  2662. }
  2663.  
  2664. // While we have enough data for a packet
  2665. while (endIndex < everything.byteLength) {
  2666. // Look for a pair of start and end sync bytes in the data..
  2667. if (everything[startIndex] === SYNC_BYTE && everything[endIndex] === SYNC_BYTE) {
  2668. // We found a packet so emit it and jump one whole packet forward in
  2669. // the stream
  2670. this.trigger('data', everything.subarray(startIndex, endIndex));
  2671. startIndex += MP2T_PACKET_LENGTH;
  2672. endIndex += MP2T_PACKET_LENGTH;
  2673. continue;
  2674. }
  2675. // If we get here, we have somehow become de-synchronized and we need to step
  2676. // forward one byte at a time until we find a pair of sync bytes that denote
  2677. // a packet
  2678. startIndex++;
  2679. endIndex++;
  2680. }
  2681.  
  2682. // If there was some data left over at the end of the segment that couldn't
  2683. // possibly be a whole packet, keep it because it might be the start of a packet
  2684. // that continues in the next segment
  2685. if (startIndex < everything.byteLength) {
  2686. buffer.set(everything.subarray(startIndex), 0);
  2687. bytesInBuffer = everything.byteLength - startIndex;
  2688. }
  2689. };
  2690.  
  2691. /**
  2692. * Passes identified M2TS packets to the TransportParseStream to be parsed
  2693. **/
  2694. this.flush = function() {
  2695. // If the buffer contains a whole packet when we are being flushed, emit it
  2696. // and empty the buffer. Otherwise hold onto the data because it may be
  2697. // important for decoding the next segment
  2698. if (bytesInBuffer === MP2T_PACKET_LENGTH && buffer[0] === SYNC_BYTE) {
  2699. this.trigger('data', buffer);
  2700. bytesInBuffer = 0;
  2701. }
  2702. this.trigger('done');
  2703. };
  2704.  
  2705. this.endTimeline = function() {
  2706. this.flush();
  2707. this.trigger('endedtimeline');
  2708. };
  2709.  
  2710. this.reset = function() {
  2711. bytesInBuffer = 0;
  2712. this.trigger('reset');
  2713. };
  2714. };
  2715. TransportPacketStream.prototype = new Stream();
  2716.  
  2717. /**
  2718. * Accepts an MP2T TransportPacketStream and emits data events with parsed
  2719. * forms of the individual transport stream packets.
  2720. */
  2721. TransportParseStream = function() {
  2722. var parsePsi, parsePat, parsePmt, self;
  2723. TransportParseStream.prototype.init.call(this);
  2724. self = this;
  2725.  
  2726. this.packetsWaitingForPmt = [];
  2727. this.programMapTable = undefined;
  2728.  
  2729. parsePsi = function(payload, psi) {
  2730. var offset = 0;
  2731.  
  2732. // PSI packets may be split into multiple sections and those
  2733. // sections may be split into multiple packets. If a PSI
  2734. // section starts in this packet, the payload_unit_start_indicator
  2735. // will be true and the first byte of the payload will indicate
  2736. // the offset from the current position to the start of the
  2737. // section.
  2738. if (psi.payloadUnitStartIndicator) {
  2739. offset += payload[offset] + 1;
  2740. }
  2741.  
  2742. if (psi.type === 'pat') {
  2743. parsePat(payload.subarray(offset), psi);
  2744. } else {
  2745. parsePmt(payload.subarray(offset), psi);
  2746. }
  2747. };
  2748.  
  2749. parsePat = function(payload, pat) {
  2750. pat.section_number = payload[7]; // eslint-disable-line camelcase
  2751. pat.last_section_number = payload[8]; // eslint-disable-line camelcase
  2752.  
  2753. // skip the PSI header and parse the first PMT entry
  2754. self.pmtPid = (payload[10] & 0x1F) << 8 | payload[11];
  2755. pat.pmtPid = self.pmtPid;
  2756. };
  2757.  
  2758. /**
  2759. * Parse out the relevant fields of a Program Map Table (PMT).
  2760. * @param payload {Uint8Array} the PMT-specific portion of an MP2T
  2761. * packet. The first byte in this array should be the table_id
  2762. * field.
  2763. * @param pmt {object} the object that should be decorated with
  2764. * fields parsed from the PMT.
  2765. */
  2766. parsePmt = function(payload, pmt) {
  2767. var sectionLength, tableEnd, programInfoLength, offset;
  2768.  
  2769. // PMTs can be sent ahead of the time when they should actually
  2770. // take effect. We don't believe this should ever be the case
  2771. // for HLS but we'll ignore "forward" PMT declarations if we see
  2772. // them. Future PMT declarations have the current_next_indicator
  2773. // set to zero.
  2774. if (!(payload[5] & 0x01)) {
  2775. return;
  2776. }
  2777.  
  2778. // overwrite any existing program map table
  2779. self.programMapTable = {
  2780. video: null,
  2781. audio: null,
  2782. 'timed-metadata': {}
  2783. };
  2784.  
  2785. // the mapping table ends at the end of the current section
  2786. sectionLength = (payload[1] & 0x0f) << 8 | payload[2];
  2787. tableEnd = 3 + sectionLength - 4;
  2788.  
  2789. // to determine where the table is, we have to figure out how
  2790. // long the program info descriptors are
  2791. programInfoLength = (payload[10] & 0x0f) << 8 | payload[11];
  2792.  
  2793. // advance the offset to the first entry in the mapping table
  2794. offset = 12 + programInfoLength;
  2795. while (offset < tableEnd) {
  2796. var streamType = payload[offset];
  2797. var pid = (payload[offset + 1] & 0x1F) << 8 | payload[offset + 2];
  2798.  
  2799. // only map a single elementary_pid for audio and video stream types
  2800. // TODO: should this be done for metadata too? for now maintain behavior of
  2801. // multiple metadata streams
  2802. if (streamType === StreamTypes.H264_STREAM_TYPE &&
  2803. self.programMapTable.video === null) {
  2804. self.programMapTable.video = pid;
  2805. } else if (streamType === StreamTypes.ADTS_STREAM_TYPE &&
  2806. self.programMapTable.audio === null) {
  2807. self.programMapTable.audio = pid;
  2808. } else if (streamType === StreamTypes.METADATA_STREAM_TYPE) {
  2809. // map pid to stream type for metadata streams
  2810. self.programMapTable['timed-metadata'][pid] = streamType;
  2811. }
  2812.  
  2813. // move to the next table entry
  2814. // skip past the elementary stream descriptors, if present
  2815. offset += ((payload[offset + 3] & 0x0F) << 8 | payload[offset + 4]) + 5;
  2816. }
  2817.  
  2818. // record the map on the packet as well
  2819. pmt.programMapTable = self.programMapTable;
  2820. };
  2821.  
  2822. /**
  2823. * Deliver a new MP2T packet to the next stream in the pipeline.
  2824. */
  2825. this.push = function(packet) {
  2826. var
  2827. result = {},
  2828. offset = 4;
  2829.  
  2830. result.payloadUnitStartIndicator = !!(packet[1] & 0x40);
  2831.  
  2832. // pid is a 13-bit field starting at the last bit of packet[1]
  2833. result.pid = packet[1] & 0x1f;
  2834. result.pid <<= 8;
  2835. result.pid |= packet[2];
  2836.  
  2837. // if an adaption field is present, its length is specified by the
  2838. // fifth byte of the TS packet header. The adaptation field is
  2839. // used to add stuffing to PES packets that don't fill a complete
  2840. // TS packet, and to specify some forms of timing and control data
  2841. // that we do not currently use.
  2842. if (((packet[3] & 0x30) >>> 4) > 0x01) {
  2843. offset += packet[offset] + 1;
  2844. }
  2845.  
  2846. // parse the rest of the packet based on the type
  2847. if (result.pid === 0) {
  2848. result.type = 'pat';
  2849. parsePsi(packet.subarray(offset), result);
  2850. this.trigger('data', result);
  2851. } else if (result.pid === this.pmtPid) {
  2852. result.type = 'pmt';
  2853. parsePsi(packet.subarray(offset), result);
  2854. this.trigger('data', result);
  2855.  
  2856. // if there are any packets waiting for a PMT to be found, process them now
  2857. while (this.packetsWaitingForPmt.length) {
  2858. this.processPes_.apply(this, this.packetsWaitingForPmt.shift());
  2859. }
  2860. } else if (this.programMapTable === undefined) {
  2861. // When we have not seen a PMT yet, defer further processing of
  2862. // PES packets until one has been parsed
  2863. this.packetsWaitingForPmt.push([packet, offset, result]);
  2864. } else {
  2865. this.processPes_(packet, offset, result);
  2866. }
  2867. };
  2868.  
  2869. this.processPes_ = function(packet, offset, result) {
  2870. // set the appropriate stream type
  2871. if (result.pid === this.programMapTable.video) {
  2872. result.streamType = StreamTypes.H264_STREAM_TYPE;
  2873. } else if (result.pid === this.programMapTable.audio) {
  2874. result.streamType = StreamTypes.ADTS_STREAM_TYPE;
  2875. } else {
  2876. // if not video or audio, it is timed-metadata or unknown
  2877. // if unknown, streamType will be undefined
  2878. result.streamType = this.programMapTable['timed-metadata'][result.pid];
  2879. }
  2880.  
  2881. result.type = 'pes';
  2882. result.data = packet.subarray(offset);
  2883. this.trigger('data', result);
  2884. };
  2885. };
  2886. TransportParseStream.prototype = new Stream();
  2887. TransportParseStream.STREAM_TYPES = {
  2888. h264: 0x1b,
  2889. adts: 0x0f
  2890. };
  2891.  
  2892. /**
  2893. * Reconsistutes program elementary stream (PES) packets from parsed
  2894. * transport stream packets. That is, if you pipe an
  2895. * mp2t.TransportParseStream into a mp2t.ElementaryStream, the output
  2896. * events will be events which capture the bytes for individual PES
  2897. * packets plus relevant metadata that has been extracted from the
  2898. * container.
  2899. */
  2900. ElementaryStream = function() {
  2901. var
  2902. self = this,
  2903. // PES packet fragments
  2904. video = {
  2905. data: [],
  2906. size: 0
  2907. },
  2908. audio = {
  2909. data: [],
  2910. size: 0
  2911. },
  2912. timedMetadata = {
  2913. data: [],
  2914. size: 0
  2915. },
  2916. programMapTable,
  2917. parsePes = function(payload, pes) {
  2918. var ptsDtsFlags;
  2919.  
  2920. // get the packet length, this will be 0 for video
  2921. pes.packetLength = 6 + ((payload[4] << 8) | payload[5]);
  2922.  
  2923. // find out if this packets starts a new keyframe
  2924. pes.dataAlignmentIndicator = (payload[6] & 0x04) !== 0;
  2925. // PES packets may be annotated with a PTS value, or a PTS value
  2926. // and a DTS value. Determine what combination of values is
  2927. // available to work with.
  2928. ptsDtsFlags = payload[7];
  2929.  
  2930. // PTS and DTS are normally stored as a 33-bit number. Javascript
  2931. // performs all bitwise operations on 32-bit integers but javascript
  2932. // supports a much greater range (52-bits) of integer using standard
  2933. // mathematical operations.
  2934. // We construct a 31-bit value using bitwise operators over the 31
  2935. // most significant bits and then multiply by 4 (equal to a left-shift
  2936. // of 2) before we add the final 2 least significant bits of the
  2937. // timestamp (equal to an OR.)
  2938. if (ptsDtsFlags & 0xC0) {
  2939. // the PTS and DTS are not written out directly. For information
  2940. // on how they are encoded, see
  2941. // http://dvd.sourceforge.net/dvdinfo/pes-hdr.html
  2942. pes.pts = (payload[9] & 0x0E) << 27 |
  2943. (payload[10] & 0xFF) << 20 |
  2944. (payload[11] & 0xFE) << 12 |
  2945. (payload[12] & 0xFF) << 5 |
  2946. (payload[13] & 0xFE) >>> 3;
  2947. pes.pts *= 4; // Left shift by 2
  2948. pes.pts += (payload[13] & 0x06) >>> 1; // OR by the two LSBs
  2949. pes.dts = pes.pts;
  2950. if (ptsDtsFlags & 0x40) {
  2951. pes.dts = (payload[14] & 0x0E) << 27 |
  2952. (payload[15] & 0xFF) << 20 |
  2953. (payload[16] & 0xFE) << 12 |
  2954. (payload[17] & 0xFF) << 5 |
  2955. (payload[18] & 0xFE) >>> 3;
  2956. pes.dts *= 4; // Left shift by 2
  2957. pes.dts += (payload[18] & 0x06) >>> 1; // OR by the two LSBs
  2958. }
  2959. }
  2960. // the data section starts immediately after the PES header.
  2961. // pes_header_data_length specifies the number of header bytes
  2962. // that follow the last byte of the field.
  2963. pes.data = payload.subarray(9 + payload[8]);
  2964. },
  2965. /**
  2966. * Pass completely parsed PES packets to the next stream in the pipeline
  2967. **/
  2968. flushStream = function(stream, type, forceFlush) {
  2969. var
  2970. packetData = new Uint8Array(stream.size),
  2971. event = {
  2972. type: type
  2973. },
  2974. i = 0,
  2975. offset = 0,
  2976. packetFlushable = false,
  2977. fragment;
  2978.  
  2979. // do nothing if there is not enough buffered data for a complete
  2980. // PES header
  2981. if (!stream.data.length || stream.size < 9) {
  2982. return;
  2983. }
  2984. event.trackId = stream.data[0].pid;
  2985.  
  2986. // reassemble the packet
  2987. for (i = 0; i < stream.data.length; i++) {
  2988. fragment = stream.data[i];
  2989.  
  2990. packetData.set(fragment.data, offset);
  2991. offset += fragment.data.byteLength;
  2992. }
  2993.  
  2994. // parse assembled packet's PES header
  2995. parsePes(packetData, event);
  2996.  
  2997. // non-video PES packets MUST have a non-zero PES_packet_length
  2998. // check that there is enough stream data to fill the packet
  2999. packetFlushable = type === 'video' || event.packetLength <= stream.size;
  3000.  
  3001. // flush pending packets if the conditions are right
  3002. if (forceFlush || packetFlushable) {
  3003. stream.size = 0;
  3004. stream.data.length = 0;
  3005. }
  3006.  
  3007. // only emit packets that are complete. this is to avoid assembling
  3008. // incomplete PES packets due to poor segmentation
  3009. if (packetFlushable) {
  3010. self.trigger('data', event);
  3011. }
  3012. };
  3013.  
  3014. ElementaryStream.prototype.init.call(this);
  3015.  
  3016. /**
  3017. * Identifies M2TS packet types and parses PES packets using metadata
  3018. * parsed from the PMT
  3019. **/
  3020. this.push = function(data) {
  3021. ({
  3022. pat: function() {
  3023. // we have to wait for the PMT to arrive as well before we
  3024. // have any meaningful metadata
  3025. },
  3026. pes: function() {
  3027. var stream, streamType;
  3028.  
  3029. switch (data.streamType) {
  3030. case StreamTypes.H264_STREAM_TYPE:
  3031. case m2tsStreamTypes.H264_STREAM_TYPE:
  3032. stream = video;
  3033. streamType = 'video';
  3034. break;
  3035. case StreamTypes.ADTS_STREAM_TYPE:
  3036. stream = audio;
  3037. streamType = 'audio';
  3038. break;
  3039. case StreamTypes.METADATA_STREAM_TYPE:
  3040. stream = timedMetadata;
  3041. streamType = 'timed-metadata';
  3042. break;
  3043. default:
  3044. // ignore unknown stream types
  3045. return;
  3046. }
  3047.  
  3048. // if a new packet is starting, we can flush the completed
  3049. // packet
  3050. if (data.payloadUnitStartIndicator) {
  3051. flushStream(stream, streamType, true);
  3052. }
  3053.  
  3054. // buffer this fragment until we are sure we've received the
  3055. // complete payload
  3056. stream.data.push(data);
  3057. stream.size += data.data.byteLength;
  3058. },
  3059. pmt: function() {
  3060. var
  3061. event = {
  3062. type: 'metadata',
  3063. tracks: []
  3064. };
  3065.  
  3066. programMapTable = data.programMapTable;
  3067.  
  3068. // translate audio and video streams to tracks
  3069. if (programMapTable.video !== null) {
  3070. event.tracks.push({
  3071. timelineStartInfo: {
  3072. baseMediaDecodeTime: 0
  3073. },
  3074. id: +programMapTable.video,
  3075. codec: 'avc',
  3076. type: 'video'
  3077. });
  3078. }
  3079. if (programMapTable.audio !== null) {
  3080. event.tracks.push({
  3081. timelineStartInfo: {
  3082. baseMediaDecodeTime: 0
  3083. },
  3084. id: +programMapTable.audio,
  3085. codec: 'adts',
  3086. type: 'audio'
  3087. });
  3088. }
  3089.  
  3090. self.trigger('data', event);
  3091. }
  3092. })[data.type]();
  3093. };
  3094.  
  3095. this.reset = function() {
  3096. video.size = 0;
  3097. video.data.length = 0;
  3098. audio.size = 0;
  3099. audio.data.length = 0;
  3100. this.trigger('reset');
  3101. };
  3102.  
  3103. /**
  3104. * Flush any remaining input. Video PES packets may be of variable
  3105. * length. Normally, the start of a new video packet can trigger the
  3106. * finalization of the previous packet. That is not possible if no
  3107. * more video is forthcoming, however. In that case, some other
  3108. * mechanism (like the end of the file) has to be employed. When it is
  3109. * clear that no additional data is forthcoming, calling this method
  3110. * will flush the buffered packets.
  3111. */
  3112. this.flushStreams_ = function() {
  3113. // !!THIS ORDER IS IMPORTANT!!
  3114. // video first then audio
  3115. flushStream(video, 'video');
  3116. flushStream(audio, 'audio');
  3117. flushStream(timedMetadata, 'timed-metadata');
  3118. };
  3119.  
  3120. this.flush = function() {
  3121. this.flushStreams_();
  3122. this.trigger('done');
  3123. };
  3124. };
  3125. ElementaryStream.prototype = new Stream();
  3126.  
  3127. var m2ts = {
  3128. PAT_PID: 0x0000,
  3129. MP2T_PACKET_LENGTH: MP2T_PACKET_LENGTH,
  3130. TransportPacketStream: TransportPacketStream,
  3131. TransportParseStream: TransportParseStream,
  3132. ElementaryStream: ElementaryStream,
  3133. TimestampRolloverStream: TimestampRolloverStream,
  3134. CaptionStream: CaptionStream.CaptionStream,
  3135. Cea608Stream: CaptionStream.Cea608Stream,
  3136. MetadataStream: require(11)
  3137. };
  3138.  
  3139. for (var type in StreamTypes) {
  3140. if (StreamTypes.hasOwnProperty(type)) {
  3141. m2ts[type] = StreamTypes[type];
  3142. }
  3143. }
  3144.  
  3145. module.exports = m2ts;
  3146.  
  3147. },{"11":11,"12":12,"13":13,"17":17,"9":9}],11:[function(require,module,exports){
  3148. /**
  3149. * mux.js
  3150. *
  3151. * Copyright (c) Brightcove
  3152. * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
  3153. *
  3154. * Accepts program elementary stream (PES) data events and parses out
  3155. * ID3 metadata from them, if present.
  3156. * @see http://id3.org/id3v2.3.0
  3157. */
  3158. 'use strict';
  3159. var
  3160. Stream = require(17),
  3161. StreamTypes = require(12),
  3162. // return a percent-encoded representation of the specified byte range
  3163. // @see http://en.wikipedia.org/wiki/Percent-encoding
  3164. percentEncode = function(bytes, start, end) {
  3165. var i, result = '';
  3166. for (i = start; i < end; i++) {
  3167. result += '%' + ('00' + bytes[i].toString(16)).slice(-2);
  3168. }
  3169. return result;
  3170. },
  3171. // return the string representation of the specified byte range,
  3172. // interpreted as UTf-8.
  3173. parseUtf8 = function(bytes, start, end) {
  3174. return decodeURIComponent(percentEncode(bytes, start, end));
  3175. },
  3176. // return the string representation of the specified byte range,
  3177. // interpreted as ISO-8859-1.
  3178. parseIso88591 = function(bytes, start, end) {
  3179. return unescape(percentEncode(bytes, start, end)); // jshint ignore:line
  3180. },
  3181. parseSyncSafeInteger = function(data) {
  3182. return (data[0] << 21) |
  3183. (data[1] << 14) |
  3184. (data[2] << 7) |
  3185. (data[3]);
  3186. },
  3187. tagParsers = {
  3188. TXXX: function(tag) {
  3189. var i;
  3190. if (tag.data[0] !== 3) {
  3191. // ignore frames with unrecognized character encodings
  3192. return;
  3193. }
  3194.  
  3195. for (i = 1; i < tag.data.length; i++) {
  3196. if (tag.data[i] === 0) {
  3197. // parse the text fields
  3198. tag.description = parseUtf8(tag.data, 1, i);
  3199. // do not include the null terminator in the tag value
  3200. tag.value = parseUtf8(tag.data, i + 1, tag.data.length).replace(/\0*$/, '');
  3201. break;
  3202. }
  3203. }
  3204. tag.data = tag.value;
  3205. },
  3206. WXXX: function(tag) {
  3207. var i;
  3208. if (tag.data[0] !== 3) {
  3209. // ignore frames with unrecognized character encodings
  3210. return;
  3211. }
  3212.  
  3213. for (i = 1; i < tag.data.length; i++) {
  3214. if (tag.data[i] === 0) {
  3215. // parse the description and URL fields
  3216. tag.description = parseUtf8(tag.data, 1, i);
  3217. tag.url = parseUtf8(tag.data, i + 1, tag.data.length);
  3218. break;
  3219. }
  3220. }
  3221. },
  3222. PRIV: function(tag) {
  3223. var i;
  3224.  
  3225. for (i = 0; i < tag.data.length; i++) {
  3226. if (tag.data[i] === 0) {
  3227. // parse the description and URL fields
  3228. tag.owner = parseIso88591(tag.data, 0, i);
  3229. break;
  3230. }
  3231. }
  3232. tag.privateData = tag.data.subarray(i + 1);
  3233. tag.data = tag.privateData;
  3234. }
  3235. },
  3236. MetadataStream;
  3237.  
  3238. MetadataStream = function(options) {
  3239. var
  3240. settings = {
  3241. debug: !!(options && options.debug),
  3242.  
  3243. // the bytes of the program-level descriptor field in MP2T
  3244. // see ISO/IEC 13818-1:2013 (E), section 2.6 "Program and
  3245. // program element descriptors"
  3246. descriptor: options && options.descriptor
  3247. },
  3248. // the total size in bytes of the ID3 tag being parsed
  3249. tagSize = 0,
  3250. // tag data that is not complete enough to be parsed
  3251. buffer = [],
  3252. // the total number of bytes currently in the buffer
  3253. bufferSize = 0,
  3254. i;
  3255.  
  3256. MetadataStream.prototype.init.call(this);
  3257.  
  3258. // calculate the text track in-band metadata track dispatch type
  3259. // https://html.spec.whatwg.org/multipage/embedded-content.html#steps-to-expose-a-media-resource-specific-text-track
  3260. this.dispatchType = StreamTypes.METADATA_STREAM_TYPE.toString(16);
  3261. if (settings.descriptor) {
  3262. for (i = 0; i < settings.descriptor.length; i++) {
  3263. this.dispatchType += ('00' + settings.descriptor[i].toString(16)).slice(-2);
  3264. }
  3265. }
  3266.  
  3267. this.push = function(chunk) {
  3268. var tag, frameStart, frameSize, frame, i, frameHeader;
  3269. if (chunk.type !== 'timed-metadata') {
  3270. return;
  3271. }
  3272.  
  3273. // if data_alignment_indicator is set in the PES header,
  3274. // we must have the start of a new ID3 tag. Assume anything
  3275. // remaining in the buffer was malformed and throw it out
  3276. if (chunk.dataAlignmentIndicator) {
  3277. bufferSize = 0;
  3278. buffer.length = 0;
  3279. }
  3280.  
  3281. // ignore events that don't look like ID3 data
  3282. if (buffer.length === 0 &&
  3283. (chunk.data.length < 10 ||
  3284. chunk.data[0] !== 'I'.charCodeAt(0) ||
  3285. chunk.data[1] !== 'D'.charCodeAt(0) ||
  3286. chunk.data[2] !== '3'.charCodeAt(0))) {
  3287. if (settings.debug) {
  3288. // eslint-disable-next-line no-console
  3289. console.log('Skipping unrecognized metadata packet');
  3290. }
  3291. return;
  3292. }
  3293.  
  3294. // add this chunk to the data we've collected so far
  3295.  
  3296. buffer.push(chunk);
  3297. bufferSize += chunk.data.byteLength;
  3298.  
  3299. // grab the size of the entire frame from the ID3 header
  3300. if (buffer.length === 1) {
  3301. // the frame size is transmitted as a 28-bit integer in the
  3302. // last four bytes of the ID3 header.
  3303. // The most significant bit of each byte is dropped and the
  3304. // results concatenated to recover the actual value.
  3305. tagSize = parseSyncSafeInteger(chunk.data.subarray(6, 10));
  3306.  
  3307. // ID3 reports the tag size excluding the header but it's more
  3308. // convenient for our comparisons to include it
  3309. tagSize += 10;
  3310. }
  3311.  
  3312. // if the entire frame has not arrived, wait for more data
  3313. if (bufferSize < tagSize) {
  3314. return;
  3315. }
  3316.  
  3317. // collect the entire frame so it can be parsed
  3318. tag = {
  3319. data: new Uint8Array(tagSize),
  3320. frames: [],
  3321. pts: buffer[0].pts,
  3322. dts: buffer[0].dts
  3323. };
  3324. for (i = 0; i < tagSize;) {
  3325. tag.data.set(buffer[0].data.subarray(0, tagSize - i), i);
  3326. i += buffer[0].data.byteLength;
  3327. bufferSize -= buffer[0].data.byteLength;
  3328. buffer.shift();
  3329. }
  3330.  
  3331. // find the start of the first frame and the end of the tag
  3332. frameStart = 10;
  3333. if (tag.data[5] & 0x40) {
  3334. // advance the frame start past the extended header
  3335. frameStart += 4; // header size field
  3336. frameStart += parseSyncSafeInteger(tag.data.subarray(10, 14));
  3337.  
  3338. // clip any padding off the end
  3339. tagSize -= parseSyncSafeInteger(tag.data.subarray(16, 20));
  3340. }
  3341.  
  3342. // parse one or more ID3 frames
  3343. // http://id3.org/id3v2.3.0#ID3v2_frame_overview
  3344. do {
  3345. // determine the number of bytes in this frame
  3346. frameSize = parseSyncSafeInteger(tag.data.subarray(frameStart + 4, frameStart + 8));
  3347. if (frameSize < 1) {
  3348. // eslint-disable-next-line no-console
  3349. return console.log('Malformed ID3 frame encountered. Skipping metadata parsing.');
  3350. }
  3351. frameHeader = String.fromCharCode(tag.data[frameStart],
  3352. tag.data[frameStart + 1],
  3353. tag.data[frameStart + 2],
  3354. tag.data[frameStart + 3]);
  3355.  
  3356.  
  3357. frame = {
  3358. id: frameHeader,
  3359. data: tag.data.subarray(frameStart + 10, frameStart + frameSize + 10)
  3360. };
  3361. frame.key = frame.id;
  3362. if (tagParsers[frame.id]) {
  3363. tagParsers[frame.id](frame);
  3364.  
  3365. // handle the special PRIV frame used to indicate the start
  3366. // time for raw AAC data
  3367. if (frame.owner === 'com.apple.streaming.transportStreamTimestamp') {
  3368. var
  3369. d = frame.data,
  3370. size = ((d[3] & 0x01) << 30) |
  3371. (d[4] << 22) |
  3372. (d[5] << 14) |
  3373. (d[6] << 6) |
  3374. (d[7] >>> 2);
  3375.  
  3376. size *= 4;
  3377. size += d[7] & 0x03;
  3378. frame.timeStamp = size;
  3379. // in raw AAC, all subsequent data will be timestamped based
  3380. // on the value of this frame
  3381. // we couldn't have known the appropriate pts and dts before
  3382. // parsing this ID3 tag so set those values now
  3383. if (tag.pts === undefined && tag.dts === undefined) {
  3384. tag.pts = frame.timeStamp;
  3385. tag.dts = frame.timeStamp;
  3386. }
  3387. this.trigger('timestamp', frame);
  3388. }
  3389. }
  3390. tag.frames.push(frame);
  3391.  
  3392. frameStart += 10; // advance past the frame header
  3393. frameStart += frameSize; // advance past the frame body
  3394. } while (frameStart < tagSize);
  3395. this.trigger('data', tag);
  3396. };
  3397. };
  3398. MetadataStream.prototype = new Stream();
  3399.  
  3400. module.exports = MetadataStream;
  3401.  
  3402. },{"12":12,"17":17}],12:[function(require,module,exports){
  3403. /**
  3404. * mux.js
  3405. *
  3406. * Copyright (c) Brightcove
  3407. * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
  3408. */
  3409. 'use strict';
  3410.  
  3411. module.exports = {
  3412. H264_STREAM_TYPE: 0x1B,
  3413. ADTS_STREAM_TYPE: 0x0F,
  3414. METADATA_STREAM_TYPE: 0x15
  3415. };
  3416.  
  3417. },{}],13:[function(require,module,exports){
  3418. /**
  3419. * mux.js
  3420. *
  3421. * Copyright (c) Brightcove
  3422. * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
  3423. *
  3424. * Accepts program elementary stream (PES) data events and corrects
  3425. * decode and presentation time stamps to account for a rollover
  3426. * of the 33 bit value.
  3427. */
  3428.  
  3429. 'use strict';
  3430.  
  3431. var Stream = require(17);
  3432.  
  3433. var MAX_TS = 8589934592;
  3434.  
  3435. var RO_THRESH = 4294967296;
  3436.  
  3437. var handleRollover = function(value, reference) {
  3438. var direction = 1;
  3439.  
  3440. if (value > reference) {
  3441. // If the current timestamp value is greater than our reference timestamp and we detect a
  3442. // timestamp rollover, this means the roll over is happening in the opposite direction.
  3443. // Example scenario: Enter a long stream/video just after a rollover occurred. The reference
  3444. // point will be set to a small number, e.g. 1. The user then seeks backwards over the
  3445. // rollover point. In loading this segment, the timestamp values will be very large,
  3446. // e.g. 2^33 - 1. Since this comes before the data we loaded previously, we want to adjust
  3447. // the time stamp to be `value - 2^33`.
  3448. direction = -1;
  3449. }
  3450.  
  3451. // Note: A seek forwards or back that is greater than the RO_THRESH (2^32, ~13 hours) will
  3452. // cause an incorrect adjustment.
  3453. while (Math.abs(reference - value) > RO_THRESH) {
  3454. value += (direction * MAX_TS);
  3455. }
  3456.  
  3457. return value;
  3458. };
  3459.  
  3460. var TimestampRolloverStream = function(type) {
  3461. var lastDTS, referenceDTS;
  3462.  
  3463. TimestampRolloverStream.prototype.init.call(this);
  3464.  
  3465. this.type_ = type;
  3466.  
  3467. this.push = function(data) {
  3468. if (data.type !== this.type_) {
  3469. return;
  3470. }
  3471.  
  3472. if (referenceDTS === undefined) {
  3473. referenceDTS = data.dts;
  3474. }
  3475.  
  3476. data.dts = handleRollover(data.dts, referenceDTS);
  3477. data.pts = handleRollover(data.pts, referenceDTS);
  3478.  
  3479. lastDTS = data.dts;
  3480.  
  3481. this.trigger('data', data);
  3482. };
  3483.  
  3484. this.flush = function() {
  3485. referenceDTS = lastDTS;
  3486. this.trigger('done');
  3487. };
  3488.  
  3489. this.endTimeline = function() {
  3490. this.flush();
  3491. this.trigger('endedtimeline');
  3492. };
  3493.  
  3494. this.discontinuity = function() {
  3495. referenceDTS = void 0;
  3496. lastDTS = void 0;
  3497. };
  3498.  
  3499. this.reset = function() {
  3500. this.discontinuity();
  3501. this.trigger('reset');
  3502. };
  3503. };
  3504.  
  3505. TimestampRolloverStream.prototype = new Stream();
  3506.  
  3507. module.exports = {
  3508. TimestampRolloverStream: TimestampRolloverStream,
  3509. handleRollover: handleRollover
  3510. };
  3511.  
  3512. },{"17":17}],14:[function(require,module,exports){
  3513. /**
  3514. * mux.js
  3515. *
  3516. * Copyright (c) Brightcove
  3517. * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
  3518. *
  3519. * Reads in-band caption information from a video elementary
  3520. * stream. Captions must follow the CEA-708 standard for injection
  3521. * into an MPEG-2 transport streams.
  3522. * @see https://en.wikipedia.org/wiki/CEA-708
  3523. * @see https://www.gpo.gov/fdsys/pkg/CFR-2007-title47-vol1/pdf/CFR-2007-title47-vol1-sec15-119.pdf
  3524. */
  3525.  
  3526. 'use strict';
  3527.  
  3528. // Supplemental enhancement information (SEI) NAL units have a
  3529. // payload type field to indicate how they are to be
  3530. // interpreted. CEAS-708 caption content is always transmitted with
  3531. // payload type 0x04.
  3532. var USER_DATA_REGISTERED_ITU_T_T35 = 4,
  3533. RBSP_TRAILING_BITS = 128;
  3534.  
  3535. /**
  3536. * Parse a supplemental enhancement information (SEI) NAL unit.
  3537. * Stops parsing once a message of type ITU T T35 has been found.
  3538. *
  3539. * @param bytes {Uint8Array} the bytes of a SEI NAL unit
  3540. * @return {object} the parsed SEI payload
  3541. * @see Rec. ITU-T H.264, 7.3.2.3.1
  3542. */
  3543. var parseSei = function(bytes) {
  3544. var
  3545. i = 0,
  3546. result = {
  3547. payloadType: -1,
  3548. payloadSize: 0
  3549. },
  3550. payloadType = 0,
  3551. payloadSize = 0;
  3552.  
  3553. // go through the sei_rbsp parsing each each individual sei_message
  3554. while (i < bytes.byteLength) {
  3555. // stop once we have hit the end of the sei_rbsp
  3556. if (bytes[i] === RBSP_TRAILING_BITS) {
  3557. break;
  3558. }
  3559.  
  3560. // Parse payload type
  3561. while (bytes[i] === 0xFF) {
  3562. payloadType += 255;
  3563. i++;
  3564. }
  3565. payloadType += bytes[i++];
  3566.  
  3567. // Parse payload size
  3568. while (bytes[i] === 0xFF) {
  3569. payloadSize += 255;
  3570. i++;
  3571. }
  3572. payloadSize += bytes[i++];
  3573.  
  3574. // this sei_message is a 608/708 caption so save it and break
  3575. // there can only ever be one caption message in a frame's sei
  3576. if (!result.payload && payloadType === USER_DATA_REGISTERED_ITU_T_T35) {
  3577. result.payloadType = payloadType;
  3578. result.payloadSize = payloadSize;
  3579. result.payload = bytes.subarray(i, i + payloadSize);
  3580. break;
  3581. }
  3582.  
  3583. // skip the payload and parse the next message
  3584. i += payloadSize;
  3585. payloadType = 0;
  3586. payloadSize = 0;
  3587. }
  3588.  
  3589. return result;
  3590. };
  3591.  
  3592. // see ANSI/SCTE 128-1 (2013), section 8.1
  3593. var parseUserData = function(sei) {
  3594. // itu_t_t35_contry_code must be 181 (United States) for
  3595. // captions
  3596. if (sei.payload[0] !== 181) {
  3597. return null;
  3598. }
  3599.  
  3600. // itu_t_t35_provider_code should be 49 (ATSC) for captions
  3601. if (((sei.payload[1] << 8) | sei.payload[2]) !== 49) {
  3602. return null;
  3603. }
  3604.  
  3605. // the user_identifier should be "GA94" to indicate ATSC1 data
  3606. if (String.fromCharCode(sei.payload[3],
  3607. sei.payload[4],
  3608. sei.payload[5],
  3609. sei.payload[6]) !== 'GA94') {
  3610. return null;
  3611. }
  3612.  
  3613. // finally, user_data_type_code should be 0x03 for caption data
  3614. if (sei.payload[7] !== 0x03) {
  3615. return null;
  3616. }
  3617.  
  3618. // return the user_data_type_structure and strip the trailing
  3619. // marker bits
  3620. return sei.payload.subarray(8, sei.payload.length - 1);
  3621. };
  3622.  
  3623. // see CEA-708-D, section 4.4
  3624. var parseCaptionPackets = function(pts, userData) {
  3625. var results = [], i, count, offset, data;
  3626.  
  3627. // if this is just filler, return immediately
  3628. if (!(userData[0] & 0x40)) {
  3629. return results;
  3630. }
  3631.  
  3632. // parse out the cc_data_1 and cc_data_2 fields
  3633. count = userData[0] & 0x1f;
  3634. for (i = 0; i < count; i++) {
  3635. offset = i * 3;
  3636. data = {
  3637. type: userData[offset + 2] & 0x03,
  3638. pts: pts
  3639. };
  3640.  
  3641. // capture cc data when cc_valid is 1
  3642. if (userData[offset + 2] & 0x04) {
  3643. data.ccData = (userData[offset + 3] << 8) | userData[offset + 4];
  3644. results.push(data);
  3645. }
  3646. }
  3647. return results;
  3648. };
  3649.  
  3650. var discardEmulationPreventionBytes = function(data) {
  3651. var
  3652. length = data.byteLength,
  3653. emulationPreventionBytesPositions = [],
  3654. i = 1,
  3655. newLength, newData;
  3656.  
  3657. // Find all `Emulation Prevention Bytes`
  3658. while (i < length - 2) {
  3659. if (data[i] === 0 && data[i + 1] === 0 && data[i + 2] === 0x03) {
  3660. emulationPreventionBytesPositions.push(i + 2);
  3661. i += 2;
  3662. } else {
  3663. i++;
  3664. }
  3665. }
  3666.  
  3667. // If no Emulation Prevention Bytes were found just return the original
  3668. // array
  3669. if (emulationPreventionBytesPositions.length === 0) {
  3670. return data;
  3671. }
  3672.  
  3673. // Create a new array to hold the NAL unit data
  3674. newLength = length - emulationPreventionBytesPositions.length;
  3675. newData = new Uint8Array(newLength);
  3676. var sourceIndex = 0;
  3677.  
  3678. for (i = 0; i < newLength; sourceIndex++, i++) {
  3679. if (sourceIndex === emulationPreventionBytesPositions[0]) {
  3680. // Skip this byte
  3681. sourceIndex++;
  3682. // Remove this position index
  3683. emulationPreventionBytesPositions.shift();
  3684. }
  3685. newData[i] = data[sourceIndex];
  3686. }
  3687.  
  3688. return newData;
  3689. };
  3690.  
  3691. // exports
  3692. module.exports = {
  3693. parseSei: parseSei,
  3694. parseUserData: parseUserData,
  3695. parseCaptionPackets: parseCaptionPackets,
  3696. discardEmulationPreventionBytes: discardEmulationPreventionBytes,
  3697. USER_DATA_REGISTERED_ITU_T_T35: USER_DATA_REGISTERED_ITU_T_T35
  3698. };
  3699.  
  3700. },{}],15:[function(require,module,exports){
  3701. /**
  3702. * mux.js
  3703. *
  3704. * Copyright (c) Brightcove
  3705. * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
  3706. */
  3707. var
  3708. ONE_SECOND_IN_TS = 90000, // 90kHz clock
  3709. secondsToVideoTs,
  3710. secondsToAudioTs,
  3711. videoTsToSeconds,
  3712. audioTsToSeconds,
  3713. audioTsToVideoTs,
  3714. videoTsToAudioTs,
  3715. metadataTsToSeconds;
  3716.  
  3717. secondsToVideoTs = function(seconds) {
  3718. return seconds * ONE_SECOND_IN_TS;
  3719. };
  3720.  
  3721. secondsToAudioTs = function(seconds, sampleRate) {
  3722. return seconds * sampleRate;
  3723. };
  3724.  
  3725. videoTsToSeconds = function(timestamp) {
  3726. return timestamp / ONE_SECOND_IN_TS;
  3727. };
  3728.  
  3729. audioTsToSeconds = function(timestamp, sampleRate) {
  3730. return timestamp / sampleRate;
  3731. };
  3732.  
  3733. audioTsToVideoTs = function(timestamp, sampleRate) {
  3734. return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
  3735. };
  3736.  
  3737. videoTsToAudioTs = function(timestamp, sampleRate) {
  3738. return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
  3739. };
  3740.  
  3741. /**
  3742. * Adjust ID3 tag or caption timing information by the timeline pts values
  3743. * (if keepOriginalTimestamps is false) and convert to seconds
  3744. */
  3745. metadataTsToSeconds = function(timestamp, timelineStartPts, keepOriginalTimestamps) {
  3746. return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
  3747. };
  3748.  
  3749. module.exports = {
  3750. ONE_SECOND_IN_TS: ONE_SECOND_IN_TS,
  3751. secondsToVideoTs: secondsToVideoTs,
  3752. secondsToAudioTs: secondsToAudioTs,
  3753. videoTsToSeconds: videoTsToSeconds,
  3754. audioTsToSeconds: audioTsToSeconds,
  3755. audioTsToVideoTs: audioTsToVideoTs,
  3756. videoTsToAudioTs: videoTsToAudioTs,
  3757. metadataTsToSeconds: metadataTsToSeconds
  3758. };
  3759.  
  3760. },{}],16:[function(require,module,exports){
  3761. /**
  3762. * mux.js
  3763. *
  3764. * Copyright (c) Brightcove
  3765. * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
  3766. */
  3767. 'use strict';
  3768.  
  3769. var ExpGolomb;
  3770.  
  3771. /**
  3772. * Parser for exponential Golomb codes, a variable-bitwidth number encoding
  3773. * scheme used by h264.
  3774. */
  3775. ExpGolomb = function(workingData) {
  3776. var
  3777. // the number of bytes left to examine in workingData
  3778. workingBytesAvailable = workingData.byteLength,
  3779.  
  3780. // the current word being examined
  3781. workingWord = 0, // :uint
  3782.  
  3783. // the number of bits left to examine in the current word
  3784. workingBitsAvailable = 0; // :uint;
  3785.  
  3786. // ():uint
  3787. this.length = function() {
  3788. return (8 * workingBytesAvailable);
  3789. };
  3790.  
  3791. // ():uint
  3792. this.bitsAvailable = function() {
  3793. return (8 * workingBytesAvailable) + workingBitsAvailable;
  3794. };
  3795.  
  3796. // ():void
  3797. this.loadWord = function() {
  3798. var
  3799. position = workingData.byteLength - workingBytesAvailable,
  3800. workingBytes = new Uint8Array(4),
  3801. availableBytes = Math.min(4, workingBytesAvailable);
  3802.  
  3803. if (availableBytes === 0) {
  3804. throw new Error('no bytes available');
  3805. }
  3806.  
  3807. workingBytes.set(workingData.subarray(position,
  3808. position + availableBytes));
  3809. workingWord = new DataView(workingBytes.buffer).getUint32(0);
  3810.  
  3811. // track the amount of workingData that has been processed
  3812. workingBitsAvailable = availableBytes * 8;
  3813. workingBytesAvailable -= availableBytes;
  3814. };
  3815.  
  3816. // (count:int):void
  3817. this.skipBits = function(count) {
  3818. var skipBytes; // :int
  3819. if (workingBitsAvailable > count) {
  3820. workingWord <<= count;
  3821. workingBitsAvailable -= count;
  3822. } else {
  3823. count -= workingBitsAvailable;
  3824. skipBytes = Math.floor(count / 8);
  3825.  
  3826. count -= (skipBytes * 8);
  3827. workingBytesAvailable -= skipBytes;
  3828.  
  3829. this.loadWord();
  3830.  
  3831. workingWord <<= count;
  3832. workingBitsAvailable -= count;
  3833. }
  3834. };
  3835.  
  3836. // (size:int):uint
  3837. this.readBits = function(size) {
  3838. var
  3839. bits = Math.min(workingBitsAvailable, size), // :uint
  3840. valu = workingWord >>> (32 - bits); // :uint
  3841. // if size > 31, handle error
  3842. workingBitsAvailable -= bits;
  3843. if (workingBitsAvailable > 0) {
  3844. workingWord <<= bits;
  3845. } else if (workingBytesAvailable > 0) {
  3846. this.loadWord();
  3847. }
  3848.  
  3849. bits = size - bits;
  3850. if (bits > 0) {
  3851. return valu << bits | this.readBits(bits);
  3852. }
  3853. return valu;
  3854. };
  3855.  
  3856. // ():uint
  3857. this.skipLeadingZeros = function() {
  3858. var leadingZeroCount; // :uint
  3859. for (leadingZeroCount = 0; leadingZeroCount < workingBitsAvailable; ++leadingZeroCount) {
  3860. if ((workingWord & (0x80000000 >>> leadingZeroCount)) !== 0) {
  3861. // the first bit of working word is 1
  3862. workingWord <<= leadingZeroCount;
  3863. workingBitsAvailable -= leadingZeroCount;
  3864. return leadingZeroCount;
  3865. }
  3866. }
  3867.  
  3868. // we exhausted workingWord and still have not found a 1
  3869. this.loadWord();
  3870. return leadingZeroCount + this.skipLeadingZeros();
  3871. };
  3872.  
  3873. // ():void
  3874. this.skipUnsignedExpGolomb = function() {
  3875. this.skipBits(1 + this.skipLeadingZeros());
  3876. };
  3877.  
  3878. // ():void
  3879. this.skipExpGolomb = function() {
  3880. this.skipBits(1 + this.skipLeadingZeros());
  3881. };
  3882.  
  3883. // ():uint
  3884. this.readUnsignedExpGolomb = function() {
  3885. var clz = this.skipLeadingZeros(); // :uint
  3886. return this.readBits(clz + 1) - 1;
  3887. };
  3888.  
  3889. // ():int
  3890. this.readExpGolomb = function() {
  3891. var valu = this.readUnsignedExpGolomb(); // :int
  3892. if (0x01 & valu) {
  3893. // the number is odd if the low order bit is set
  3894. return (1 + valu) >>> 1; // add 1 to make it even, and divide by 2
  3895. }
  3896. return -1 * (valu >>> 1); // divide by two then make it negative
  3897. };
  3898.  
  3899. // Some convenience functions
  3900. // :Boolean
  3901. this.readBoolean = function() {
  3902. return this.readBits(1) === 1;
  3903. };
  3904.  
  3905. // ():int
  3906. this.readUnsignedByte = function() {
  3907. return this.readBits(8);
  3908. };
  3909.  
  3910. this.loadWord();
  3911. };
  3912.  
  3913. module.exports = ExpGolomb;
  3914.  
  3915. },{}],17:[function(require,module,exports){
  3916. /**
  3917. * mux.js
  3918. *
  3919. * Copyright (c) Brightcove
  3920. * Licensed Apache-2.0 https://github.com/videojs/mux.js/blob/master/LICENSE
  3921. *
  3922. * A lightweight readable stream implemention that handles event dispatching.
  3923. * Objects that inherit from streams should call init in their constructors.
  3924. */
  3925. 'use strict';
  3926.  
  3927. var Stream = function() {
  3928. this.init = function() {
  3929. var listeners = {};
  3930. /**
  3931. * Add a listener for a specified event type.
  3932. * @param type {string} the event name
  3933. * @param listener {function} the callback to be invoked when an event of
  3934. * the specified type occurs
  3935. */
  3936. this.on = function(type, listener) {
  3937. if (!listeners[type]) {
  3938. listeners[type] = [];
  3939. }
  3940. listeners[type] = listeners[type].concat(listener);
  3941. };
  3942. /**
  3943. * Remove a listener for a specified event type.
  3944. * @param type {string} the event name
  3945. * @param listener {function} a function previously registered for this
  3946. * type of event through `on`
  3947. */
  3948. this.off = function(type, listener) {
  3949. var index;
  3950. if (!listeners[type]) {
  3951. return false;
  3952. }
  3953. index = listeners[type].indexOf(listener);
  3954. listeners[type] = listeners[type].slice();
  3955. listeners[type].splice(index, 1);
  3956. return index > -1;
  3957. };
  3958. /**
  3959. * Trigger an event of the specified type on this stream. Any additional
  3960. * arguments to this function are passed as parameters to event listeners.
  3961. * @param type {string} the event name
  3962. */
  3963. this.trigger = function(type) {
  3964. var callbacks, i, length, args;
  3965. callbacks = listeners[type];
  3966. if (!callbacks) {
  3967. return;
  3968. }
  3969. // Slicing the arguments on every invocation of this method
  3970. // can add a significant amount of overhead. Avoid the
  3971. // intermediate object creation for the common case of a
  3972. // single callback argument
  3973. if (arguments.length === 2) {
  3974. length = callbacks.length;
  3975. for (i = 0; i < length; ++i) {
  3976. callbacks[i].call(this, arguments[1]);
  3977. }
  3978. } else {
  3979. args = [];
  3980. i = arguments.length;
  3981. for (i = 1; i < arguments.length; ++i) {
  3982. args.push(arguments[i]);
  3983. }
  3984. length = callbacks.length;
  3985. for (i = 0; i < length; ++i) {
  3986. callbacks[i].apply(this, args);
  3987. }
  3988. }
  3989. };
  3990. /**
  3991. * Destroys the stream and cleans up.
  3992. */
  3993. this.dispose = function() {
  3994. listeners = {};
  3995. };
  3996. };
  3997. };
  3998.  
  3999. /**
  4000. * Forwards all `data` events on this stream to the destination stream. The
  4001. * destination stream should provide a method `push` to receive the data
  4002. * events as they arrive.
  4003. * @param destination {stream} the stream that will receive all `data` events
  4004. * @param autoFlush {boolean} if false, we will not call `flush` on the destination
  4005. * when the current stream emits a 'done' event
  4006. * @see http://nodejs.org/api/stream.html#stream_readable_pipe_destination_options
  4007. */
  4008. Stream.prototype.pipe = function(destination) {
  4009. this.on('data', function(data) {
  4010. destination.push(data);
  4011. });
  4012.  
  4013. this.on('done', function(flushSource) {
  4014. destination.flush(flushSource);
  4015. });
  4016.  
  4017. this.on('partialdone', function(flushSource) {
  4018. destination.partialFlush(flushSource);
  4019. });
  4020.  
  4021. this.on('endedtimeline', function(flushSource) {
  4022. destination.endTimeline(flushSource);
  4023. });
  4024.  
  4025. this.on('reset', function(flushSource) {
  4026. destination.reset(flushSource);
  4027. });
  4028.  
  4029. return destination;
  4030. };
  4031.  
  4032. // Default stream functions that are expected to be overridden to perform
  4033. // actual work. These are provided by the prototype as a sort of no-op
  4034. // implementation so that we don't have to check for their existence in the
  4035. // `pipe` function above.
  4036. Stream.prototype.push = function(data) {
  4037. this.trigger('data', data);
  4038. };
  4039.  
  4040. Stream.prototype.flush = function(flushSource) {
  4041. this.trigger('done', flushSource);
  4042. };
  4043.  
  4044. Stream.prototype.partialFlush = function(flushSource) {
  4045. this.trigger('partialdone', flushSource);
  4046. };
  4047.  
  4048. Stream.prototype.endTimeline = function(flushSource) {
  4049. this.trigger('endedtimeline', flushSource);
  4050. };
  4051.  
  4052. Stream.prototype.reset = function(flushSource) {
  4053. this.trigger('reset', flushSource);
  4054. };
  4055.  
  4056. module.exports = Stream;
  4057.  
  4058. },{}]},{},[6])(6)
  4059. });

QingJ © 2025

镜像随时可能失效,请加Q群300939539或关注我们的公众号极客氢云获取最新地址