Home Reference Source

src/controller/stream-controller.ts

  1. import BaseStreamController, { State } from './base-stream-controller';
  2. import { changeTypeSupported } from '../is-supported';
  3. import type { NetworkComponentAPI } from '../types/component-api';
  4. import { Events } from '../events';
  5. import { BufferHelper } from '../utils/buffer-helper';
  6. import type { FragmentTracker } from './fragment-tracker';
  7. import { FragmentState } from './fragment-tracker';
  8. import type { Level } from '../types/level';
  9. import { PlaylistLevelType } from '../types/loader';
  10. import { ElementaryStreamTypes, Fragment } from '../loader/fragment';
  11. import TransmuxerInterface from '../demux/transmuxer-interface';
  12. import type { TransmuxerResult } from '../types/transmuxer';
  13. import { ChunkMetadata } from '../types/transmuxer';
  14. import GapController from './gap-controller';
  15. import { ErrorDetails } from '../errors';
  16. import type Hls from '../hls';
  17. import type { LevelDetails } from '../loader/level-details';
  18. import type { TrackSet } from '../types/track';
  19. import type { SourceBufferName } from '../types/buffer';
  20. import type {
  21. AudioTrackSwitchedData,
  22. AudioTrackSwitchingData,
  23. BufferCreatedData,
  24. BufferEOSData,
  25. BufferFlushedData,
  26. ErrorData,
  27. FragBufferedData,
  28. FragLoadedData,
  29. FragParsingMetadataData,
  30. FragParsingUserdataData,
  31. LevelLoadedData,
  32. LevelLoadingData,
  33. LevelsUpdatedData,
  34. ManifestParsedData,
  35. MediaAttachedData,
  36. } from '../types/events';
  37.  
  38. const TICK_INTERVAL = 100; // how often to tick in ms
  39.  
  40. export default class StreamController
  41. extends BaseStreamController
  42. implements NetworkComponentAPI
  43. {
  44. private audioCodecSwap: boolean = false;
  45. private gapController: GapController | null = null;
  46. private level: number = -1;
  47. private _forceStartLoad: boolean = false;
  48. private altAudio: boolean = false;
  49. private audioOnly: boolean = false;
  50. private fragPlaying: Fragment | null = null;
  51. private onvplaying: EventListener | null = null;
  52. private onvseeked: EventListener | null = null;
  53. private fragLastKbps: number = 0;
  54. private couldBacktrack: boolean = false;
  55. private backtrackFragment: Fragment | null = null;
  56. private audioCodecSwitch: boolean = false;
  57. private videoBuffer: any | null = null;
  58.  
  59. constructor(hls: Hls, fragmentTracker: FragmentTracker) {
  60. super(hls, fragmentTracker, '[stream-controller]');
  61. this._registerListeners();
  62. }
  63.  
  64. private _registerListeners() {
  65. const { hls } = this;
  66. hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
  67. hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
  68. hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  69. hls.on(Events.MANIFEST_PARSED, this.onManifestParsed, this);
  70. hls.on(Events.LEVEL_LOADING, this.onLevelLoading, this);
  71. hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
  72. hls.on(
  73. Events.FRAG_LOAD_EMERGENCY_ABORTED,
  74. this.onFragLoadEmergencyAborted,
  75. this
  76. );
  77. hls.on(Events.ERROR, this.onError, this);
  78. hls.on(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
  79. hls.on(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
  80. hls.on(Events.BUFFER_CREATED, this.onBufferCreated, this);
  81. hls.on(Events.BUFFER_FLUSHED, this.onBufferFlushed, this);
  82. hls.on(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
  83. hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
  84. }
  85.  
  86. protected _unregisterListeners() {
  87. const { hls } = this;
  88. hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
  89. hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
  90. hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  91. hls.off(Events.MANIFEST_PARSED, this.onManifestParsed, this);
  92. hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
  93. hls.off(
  94. Events.FRAG_LOAD_EMERGENCY_ABORTED,
  95. this.onFragLoadEmergencyAborted,
  96. this
  97. );
  98. hls.off(Events.ERROR, this.onError, this);
  99. hls.off(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
  100. hls.off(Events.AUDIO_TRACK_SWITCHED, this.onAudioTrackSwitched, this);
  101. hls.off(Events.BUFFER_CREATED, this.onBufferCreated, this);
  102. hls.off(Events.BUFFER_FLUSHED, this.onBufferFlushed, this);
  103. hls.off(Events.LEVELS_UPDATED, this.onLevelsUpdated, this);
  104. hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
  105. }
  106.  
  107. protected onHandlerDestroying() {
  108. this._unregisterListeners();
  109. this.onMediaDetaching();
  110. }
  111.  
  112. public startLoad(startPosition: number): void {
  113. if (this.levels) {
  114. const { lastCurrentTime, hls } = this;
  115. this.stopLoad();
  116. this.setInterval(TICK_INTERVAL);
  117. this.level = -1;
  118. this.fragLoadError = 0;
  119. if (!this.startFragRequested) {
  120. // determine load level
  121. let startLevel = hls.startLevel;
  122. if (startLevel === -1) {
  123. if (hls.config.testBandwidth && this.levels.length > 1) {
  124. // -1 : guess start Level by doing a bitrate test by loading first fragment of lowest quality level
  125. startLevel = 0;
  126. this.bitrateTest = true;
  127. } else {
  128. startLevel = hls.nextAutoLevel;
  129. }
  130. }
  131. // set new level to playlist loader : this will trigger start level load
  132. // hls.nextLoadLevel remains until it is set to a new value or until a new frag is successfully loaded
  133. this.level = hls.nextLoadLevel = startLevel;
  134. this.loadedmetadata = false;
  135. }
  136. // if startPosition undefined but lastCurrentTime set, set startPosition to last currentTime
  137. if (lastCurrentTime > 0 && startPosition === -1) {
  138. this.log(
  139. `Override startPosition with lastCurrentTime @${lastCurrentTime.toFixed(
  140. 3
  141. )}`
  142. );
  143. startPosition = lastCurrentTime;
  144. }
  145. this.state = State.IDLE;
  146. this.nextLoadPosition =
  147. this.startPosition =
  148. this.lastCurrentTime =
  149. startPosition;
  150. this.tick();
  151. } else {
  152. this._forceStartLoad = true;
  153. this.state = State.STOPPED;
  154. }
  155. }
  156.  
  157. public stopLoad() {
  158. this._forceStartLoad = false;
  159. super.stopLoad();
  160. }
  161.  
  162. protected doTick() {
  163. switch (this.state) {
  164. case State.IDLE:
  165. this.doTickIdle();
  166. break;
  167. case State.WAITING_LEVEL: {
  168. const { levels, level } = this;
  169. const details = levels?.[level]?.details;
  170. if (details && (!details.live || this.levelLastLoaded === this.level)) {
  171. if (this.waitForCdnTuneIn(details)) {
  172. break;
  173. }
  174. this.state = State.IDLE;
  175. break;
  176. }
  177. break;
  178. }
  179. case State.FRAG_LOADING_WAITING_RETRY:
  180. {
  181. const now = self.performance.now();
  182. const retryDate = this.retryDate;
  183. // if current time is gt than retryDate, or if media seeking let's switch to IDLE state to retry loading
  184. if (!retryDate || now >= retryDate || this.media?.seeking) {
  185. this.log('retryDate reached, switch back to IDLE state');
  186. this.resetStartWhenNotLoaded(this.level);
  187. this.state = State.IDLE;
  188. }
  189. }
  190. break;
  191. default:
  192. break;
  193. }
  194. // check buffer
  195. // check/update current fragment
  196. this.onTickEnd();
  197. }
  198.  
  199. protected onTickEnd() {
  200. super.onTickEnd();
  201. this.checkBuffer();
  202. this.checkFragmentChanged();
  203. }
  204.  
  205. private doTickIdle() {
  206. const { hls, levelLastLoaded, levels, media } = this;
  207. const { config, nextLoadLevel: level } = hls;
  208.  
  209. // if start level not parsed yet OR
  210. // if video not attached AND start fragment already requested OR start frag prefetch not enabled
  211. // exit loop, as we either need more info (level not parsed) or we need media to be attached to load new fragment
  212. if (
  213. levelLastLoaded === null ||
  214. (!media && (this.startFragRequested || !config.startFragPrefetch))
  215. ) {
  216. return;
  217. }
  218.  
  219. // If the "main" level is audio-only but we are loading an alternate track in the same group, do not load anything
  220. if (this.altAudio && this.audioOnly) {
  221. return;
  222. }
  223.  
  224. if (!levels || !levels[level]) {
  225. return;
  226. }
  227.  
  228. const levelInfo = levels[level];
  229.  
  230. // if buffer length is less than maxBufLen try to load a new fragment
  231. // set next load level : this will trigger a playlist load if needed
  232. this.level = hls.nextLoadLevel = level;
  233.  
  234. const levelDetails = levelInfo.details;
  235. // if level info not retrieved yet, switch state and wait for level retrieval
  236. // if live playlist, ensure that new playlist has been refreshed to avoid loading/try to load
  237. // a useless and outdated fragment (that might even introduce load error if it is already out of the live playlist)
  238. if (
  239. !levelDetails ||
  240. this.state === State.WAITING_LEVEL ||
  241. (levelDetails.live && this.levelLastLoaded !== level)
  242. ) {
  243. this.state = State.WAITING_LEVEL;
  244. return;
  245. }
  246.  
  247. const bufferInfo = this.getMainFwdBufferInfo();
  248. if (bufferInfo === null) {
  249. return;
  250. }
  251. const bufferLen = bufferInfo.len;
  252.  
  253. // compute max Buffer Length that we could get from this load level, based on level bitrate. don't buffer more than 60 MB and more than 30s
  254. const maxBufLen = this.getMaxBufferLength(levelInfo.maxBitrate);
  255.  
  256. // Stay idle if we are still with buffer margins
  257. if (bufferLen >= maxBufLen) {
  258. return;
  259. }
  260.  
  261. if (this._streamEnded(bufferInfo, levelDetails)) {
  262. const data: BufferEOSData = {};
  263. if (this.altAudio) {
  264. data.type = 'video';
  265. }
  266.  
  267. this.hls.trigger(Events.BUFFER_EOS, data);
  268. this.state = State.ENDED;
  269. return;
  270. }
  271.  
  272. if (
  273. this.backtrackFragment &&
  274. this.backtrackFragment.start > bufferInfo.end
  275. ) {
  276. this.backtrackFragment = null;
  277. }
  278. const targetBufferTime = this.backtrackFragment
  279. ? this.backtrackFragment.start
  280. : bufferInfo.end;
  281. let frag = this.getNextFragment(targetBufferTime, levelDetails);
  282. // Avoid backtracking by loading an earlier segment in streams with segments that do not start with a key frame (flagged by `couldBacktrack`)
  283. if (
  284. this.couldBacktrack &&
  285. !this.fragPrevious &&
  286. frag &&
  287. frag.sn !== 'initSegment' &&
  288. this.fragmentTracker.getState(frag) !== FragmentState.OK
  289. ) {
  290. const backtrackSn = (this.backtrackFragment ?? frag).sn as number;
  291. const fragIdx = backtrackSn - levelDetails.startSN;
  292. const backtrackFrag = levelDetails.fragments[fragIdx - 1];
  293. if (backtrackFrag && frag.cc === backtrackFrag.cc) {
  294. frag = backtrackFrag;
  295. this.fragmentTracker.removeFragment(backtrackFrag);
  296. }
  297. } else if (this.backtrackFragment && bufferInfo.len) {
  298. this.backtrackFragment = null;
  299. }
  300. // Avoid loop loading by using nextLoadPosition set for backtracking
  301. if (
  302. frag &&
  303. this.fragmentTracker.getState(frag) === FragmentState.OK &&
  304. this.nextLoadPosition > targetBufferTime
  305. ) {
  306. // Cleanup the fragment tracker before trying to find the next unbuffered fragment
  307. const type =
  308. this.audioOnly && !this.altAudio
  309. ? ElementaryStreamTypes.AUDIO
  310. : ElementaryStreamTypes.VIDEO;
  311. if (media) {
  312. this.afterBufferFlushed(media, type, PlaylistLevelType.MAIN);
  313. }
  314. frag = this.getNextFragment(this.nextLoadPosition, levelDetails);
  315. }
  316. if (!frag) {
  317. return;
  318. }
  319. if (frag.initSegment && !frag.initSegment.data && !this.bitrateTest) {
  320. frag = frag.initSegment;
  321. }
  322.  
  323. // We want to load the key if we're dealing with an identity key, because we will decrypt
  324. // this content using the key we fetch. Other keys will be handled by the DRM CDM via EME.
  325. if (frag.decryptdata?.keyFormat === 'identity' && !frag.decryptdata?.key) {
  326. this.loadKey(frag, levelDetails);
  327. } else {
  328. this.loadFragment(frag, levelDetails, targetBufferTime);
  329. }
  330. }
  331.  
  332. protected loadFragment(
  333. frag: Fragment,
  334. levelDetails: LevelDetails,
  335. targetBufferTime: number
  336. ) {
  337. // Check if fragment is not loaded
  338. const fragState = this.fragmentTracker.getState(frag);
  339. this.fragCurrent = frag;
  340. if (fragState === FragmentState.NOT_LOADED) {
  341. if (frag.sn === 'initSegment') {
  342. this._loadInitSegment(frag);
  343. } else if (this.bitrateTest) {
  344. this.log(
  345. `Fragment ${frag.sn} of level ${frag.level} is being downloaded to test bitrate and will not be buffered`
  346. );
  347. this._loadBitrateTestFrag(frag);
  348. } else {
  349. this.startFragRequested = true;
  350. super.loadFragment(frag, levelDetails, targetBufferTime);
  351. }
  352. } else if (fragState === FragmentState.APPENDING) {
  353. // Lower the buffer size and try again
  354. if (this.reduceMaxBufferLength(frag.duration)) {
  355. this.fragmentTracker.removeFragment(frag);
  356. }
  357. } else if (this.media?.buffered.length === 0) {
  358. // Stop gap for bad tracker / buffer flush behavior
  359. this.fragmentTracker.removeAllFragments();
  360. }
  361. }
  362.  
  363. private getAppendedFrag(position): Fragment | null {
  364. const fragOrPart = this.fragmentTracker.getAppendedFrag(
  365. position,
  366. PlaylistLevelType.MAIN
  367. );
  368. if (fragOrPart && 'fragment' in fragOrPart) {
  369. return fragOrPart.fragment;
  370. }
  371. return fragOrPart;
  372. }
  373.  
  374. private getBufferedFrag(position) {
  375. return this.fragmentTracker.getBufferedFrag(
  376. position,
  377. PlaylistLevelType.MAIN
  378. );
  379. }
  380.  
  381. private followingBufferedFrag(frag: Fragment | null) {
  382. if (frag) {
  383. // try to get range of next fragment (500ms after this range)
  384. return this.getBufferedFrag(frag.end + 0.5);
  385. }
  386. return null;
  387. }
  388.  
  389. /*
  390. on immediate level switch :
  391. - pause playback if playing
  392. - cancel any pending load request
  393. - and trigger a buffer flush
  394. */
  395. public immediateLevelSwitch() {
  396. this.abortCurrentFrag();
  397. this.flushMainBuffer(0, Number.POSITIVE_INFINITY);
  398. }
  399.  
  400. /**
  401. * try to switch ASAP without breaking video playback:
  402. * in order to ensure smooth but quick level switching,
  403. * we need to find the next flushable buffer range
  404. * we should take into account new segment fetch time
  405. */
  406. public nextLevelSwitch() {
  407. const { levels, media } = this;
  408. // ensure that media is defined and that metadata are available (to retrieve currentTime)
  409. if (media?.readyState) {
  410. let fetchdelay;
  411. const fragPlayingCurrent = this.getAppendedFrag(media.currentTime);
  412. if (fragPlayingCurrent && fragPlayingCurrent.start > 1) {
  413. // flush buffer preceding current fragment (flush until current fragment start offset)
  414. // minus 1s to avoid video freezing, that could happen if we flush keyframe of current video ...
  415. this.flushMainBuffer(0, fragPlayingCurrent.start - 1);
  416. }
  417. if (!media.paused && levels) {
  418. // add a safety delay of 1s
  419. const nextLevelId = this.hls.nextLoadLevel;
  420. const nextLevel = levels[nextLevelId];
  421. const fragLastKbps = this.fragLastKbps;
  422. if (fragLastKbps && this.fragCurrent) {
  423. fetchdelay =
  424. (this.fragCurrent.duration * nextLevel.maxBitrate) /
  425. (1000 * fragLastKbps) +
  426. 1;
  427. } else {
  428. fetchdelay = 0;
  429. }
  430. } else {
  431. fetchdelay = 0;
  432. }
  433. // this.log('fetchdelay:'+fetchdelay);
  434. // find buffer range that will be reached once new fragment will be fetched
  435. const bufferedFrag = this.getBufferedFrag(media.currentTime + fetchdelay);
  436. if (bufferedFrag) {
  437. // we can flush buffer range following this one without stalling playback
  438. const nextBufferedFrag = this.followingBufferedFrag(bufferedFrag);
  439. if (nextBufferedFrag) {
  440. // if we are here, we can also cancel any loading/demuxing in progress, as they are useless
  441. this.abortCurrentFrag();
  442. // start flush position is in next buffered frag. Leave some padding for non-independent segments and smoother playback.
  443. const maxStart = nextBufferedFrag.maxStartPTS
  444. ? nextBufferedFrag.maxStartPTS
  445. : nextBufferedFrag.start;
  446. const fragDuration = nextBufferedFrag.duration;
  447. const startPts = Math.max(
  448. bufferedFrag.end,
  449. maxStart +
  450. Math.min(
  451. Math.max(
  452. fragDuration - this.config.maxFragLookUpTolerance,
  453. fragDuration * 0.5
  454. ),
  455. fragDuration * 0.75
  456. )
  457. );
  458. this.flushMainBuffer(startPts, Number.POSITIVE_INFINITY);
  459. }
  460. }
  461. }
  462. }
  463.  
  464. private abortCurrentFrag() {
  465. const fragCurrent = this.fragCurrent;
  466. this.fragCurrent = null;
  467. this.backtrackFragment = null;
  468. if (fragCurrent?.loader) {
  469. fragCurrent.loader.abort();
  470. }
  471. switch (this.state) {
  472. case State.KEY_LOADING:
  473. case State.FRAG_LOADING:
  474. case State.FRAG_LOADING_WAITING_RETRY:
  475. case State.PARSING:
  476. case State.PARSED:
  477. this.state = State.IDLE;
  478. break;
  479. }
  480. this.nextLoadPosition = this.getLoadPosition();
  481. }
  482.  
  483. protected flushMainBuffer(startOffset: number, endOffset: number) {
  484. super.flushMainBuffer(
  485. startOffset,
  486. endOffset,
  487. this.altAudio ? 'video' : null
  488. );
  489. }
  490.  
  491. protected onMediaAttached(
  492. event: Events.MEDIA_ATTACHED,
  493. data: MediaAttachedData
  494. ) {
  495. super.onMediaAttached(event, data);
  496. const media = data.media;
  497. this.onvplaying = this.onMediaPlaying.bind(this);
  498. this.onvseeked = this.onMediaSeeked.bind(this);
  499. media.addEventListener('playing', this.onvplaying as EventListener);
  500. media.addEventListener('seeked', this.onvseeked as EventListener);
  501. this.gapController = new GapController(
  502. this.config,
  503. media,
  504. this.fragmentTracker,
  505. this.hls
  506. );
  507. }
  508.  
  509. protected onMediaDetaching() {
  510. const { media } = this;
  511. if (media && this.onvplaying && this.onvseeked) {
  512. media.removeEventListener('playing', this.onvplaying);
  513. media.removeEventListener('seeked', this.onvseeked);
  514. this.onvplaying = this.onvseeked = null;
  515. this.videoBuffer = null;
  516. }
  517. this.fragPlaying = null;
  518. if (this.gapController) {
  519. this.gapController.destroy();
  520. this.gapController = null;
  521. }
  522. super.onMediaDetaching();
  523. }
  524.  
  525. private onMediaPlaying() {
  526. // tick to speed up FRAG_CHANGED triggering
  527. this.tick();
  528. }
  529.  
  530. private onMediaSeeked() {
  531. const media = this.media;
  532. const currentTime = media ? media.currentTime : null;
  533. if (Number.isFinite(currentTime)) {
  534. this.log(`Media seeked to ${(currentTime as number).toFixed(3)}`);
  535. }
  536.  
  537. // tick to speed up FRAG_CHANGED triggering
  538. this.tick();
  539. }
  540.  
  541. private onManifestLoading() {
  542. // reset buffer on manifest loading
  543. this.log('Trigger BUFFER_RESET');
  544. this.hls.trigger(Events.BUFFER_RESET, undefined);
  545. this.fragmentTracker.removeAllFragments();
  546. this.couldBacktrack = false;
  547. this.startPosition = this.lastCurrentTime = 0;
  548. this.fragPlaying = null;
  549. this.backtrackFragment = null;
  550. }
  551.  
  552. private onManifestParsed(
  553. event: Events.MANIFEST_PARSED,
  554. data: ManifestParsedData
  555. ) {
  556. let aac = false;
  557. let heaac = false;
  558. let codec;
  559. data.levels.forEach((level) => {
  560. // detect if we have different kind of audio codecs used amongst playlists
  561. codec = level.audioCodec;
  562. if (codec) {
  563. if (codec.indexOf('mp4a.40.2') !== -1) {
  564. aac = true;
  565. }
  566.  
  567. if (codec.indexOf('mp4a.40.5') !== -1) {
  568. heaac = true;
  569. }
  570. }
  571. });
  572. this.audioCodecSwitch = aac && heaac && !changeTypeSupported();
  573. if (this.audioCodecSwitch) {
  574. this.log(
  575. 'Both AAC/HE-AAC audio found in levels; declaring level codec as HE-AAC'
  576. );
  577. }
  578.  
  579. this.levels = data.levels;
  580. this.startFragRequested = false;
  581. }
  582.  
  583. private onLevelLoading(event: Events.LEVEL_LOADING, data: LevelLoadingData) {
  584. const { levels } = this;
  585. if (!levels || this.state !== State.IDLE) {
  586. return;
  587. }
  588. const level = levels[data.level];
  589. if (
  590. !level.details ||
  591. (level.details.live && this.levelLastLoaded !== data.level) ||
  592. this.waitForCdnTuneIn(level.details)
  593. ) {
  594. this.state = State.WAITING_LEVEL;
  595. }
  596. }
  597.  
  598. private onLevelLoaded(event: Events.LEVEL_LOADED, data: LevelLoadedData) {
  599. const { levels } = this;
  600. const newLevelId = data.level;
  601. const newDetails = data.details;
  602. const duration = newDetails.totalduration;
  603.  
  604. if (!levels) {
  605. this.warn(`Levels were reset while loading level ${newLevelId}`);
  606. return;
  607. }
  608. this.log(
  609. `Level ${newLevelId} loaded [${newDetails.startSN},${newDetails.endSN}], cc [${newDetails.startCC}, ${newDetails.endCC}] duration:${duration}`
  610. );
  611.  
  612. const fragCurrent = this.fragCurrent;
  613. if (
  614. fragCurrent &&
  615. (this.state === State.FRAG_LOADING ||
  616. this.state === State.FRAG_LOADING_WAITING_RETRY)
  617. ) {
  618. if (fragCurrent.level !== data.level && fragCurrent.loader) {
  619. this.state = State.IDLE;
  620. this.backtrackFragment = null;
  621. fragCurrent.loader.abort();
  622. }
  623. }
  624.  
  625. const curLevel = levels[newLevelId];
  626. let sliding = 0;
  627. if (newDetails.live || curLevel.details?.live) {
  628. if (!newDetails.fragments[0]) {
  629. newDetails.deltaUpdateFailed = true;
  630. }
  631. if (newDetails.deltaUpdateFailed) {
  632. return;
  633. }
  634. sliding = this.alignPlaylists(newDetails, curLevel.details);
  635. }
  636. // override level info
  637. curLevel.details = newDetails;
  638. this.levelLastLoaded = newLevelId;
  639.  
  640. this.hls.trigger(Events.LEVEL_UPDATED, {
  641. details: newDetails,
  642. level: newLevelId,
  643. });
  644.  
  645. // only switch back to IDLE state if we were waiting for level to start downloading a new fragment
  646. if (this.state === State.WAITING_LEVEL) {
  647. if (this.waitForCdnTuneIn(newDetails)) {
  648. // Wait for Low-Latency CDN Tune-in
  649. return;
  650. }
  651. this.state = State.IDLE;
  652. }
  653.  
  654. if (!this.startFragRequested) {
  655. this.setStartPosition(newDetails, sliding);
  656. } else if (newDetails.live) {
  657. this.synchronizeToLiveEdge(newDetails);
  658. }
  659.  
  660. // trigger handler right now
  661. this.tick();
  662. }
  663.  
  664. protected _handleFragmentLoadProgress(data: FragLoadedData) {
  665. const { frag, part, payload } = data;
  666. const { levels } = this;
  667. if (!levels) {
  668. this.warn(
  669. `Levels were reset while fragment load was in progress. Fragment ${frag.sn} of level ${frag.level} will not be buffered`
  670. );
  671. return;
  672. }
  673. const currentLevel = levels[frag.level];
  674. const details = currentLevel.details as LevelDetails;
  675. if (!details) {
  676. this.warn(
  677. `Dropping fragment ${frag.sn} of level ${frag.level} after level details were reset`
  678. );
  679. return;
  680. }
  681. const videoCodec = currentLevel.videoCodec;
  682.  
  683. // time Offset is accurate if level PTS is known, or if playlist is not sliding (not live)
  684. const accurateTimeOffset = details.PTSKnown || !details.live;
  685. const initSegmentData = frag.initSegment?.data;
  686. const audioCodec = this._getAudioCodec(currentLevel);
  687.  
  688. // transmux the MPEG-TS data to ISO-BMFF segments
  689. // this.log(`Transmuxing ${frag.sn} of [${details.startSN} ,${details.endSN}],level ${frag.level}, cc ${frag.cc}`);
  690. const transmuxer = (this.transmuxer =
  691. this.transmuxer ||
  692. new TransmuxerInterface(
  693. this.hls,
  694. PlaylistLevelType.MAIN,
  695. this._handleTransmuxComplete.bind(this),
  696. this._handleTransmuxerFlush.bind(this)
  697. ));
  698. const partIndex = part ? part.index : -1;
  699. const partial = partIndex !== -1;
  700. const chunkMeta = new ChunkMetadata(
  701. frag.level,
  702. frag.sn as number,
  703. frag.stats.chunkCount,
  704. payload.byteLength,
  705. partIndex,
  706. partial
  707. );
  708. const initPTS = this.initPTS[frag.cc];
  709.  
  710. transmuxer.push(
  711. payload,
  712. initSegmentData,
  713. audioCodec,
  714. videoCodec,
  715. frag,
  716. part,
  717. details.totalduration,
  718. accurateTimeOffset,
  719. chunkMeta,
  720. initPTS
  721. );
  722. }
  723.  
  724. private onAudioTrackSwitching(
  725. event: Events.AUDIO_TRACK_SWITCHING,
  726. data: AudioTrackSwitchingData
  727. ) {
  728. // if any URL found on new audio track, it is an alternate audio track
  729. const fromAltAudio = this.altAudio;
  730. const altAudio = !!data.url;
  731. const trackId = data.id;
  732. // if we switch on main audio, ensure that main fragment scheduling is synced with media.buffered
  733. // don't do anything if we switch to alt audio: audio stream controller is handling it.
  734. // we will just have to change buffer scheduling on audioTrackSwitched
  735. if (!altAudio) {
  736. if (this.mediaBuffer !== this.media) {
  737. this.log(
  738. 'Switching on main audio, use media.buffered to schedule main fragment loading'
  739. );
  740. this.mediaBuffer = this.media;
  741. const fragCurrent = this.fragCurrent;
  742. // we need to refill audio buffer from main: cancel any frag loading to speed up audio switch
  743. if (fragCurrent?.loader) {
  744. this.log('Switching to main audio track, cancel main fragment load');
  745. fragCurrent.loader.abort();
  746. }
  747. // destroy transmuxer to force init segment generation (following audio switch)
  748. this.resetTransmuxer();
  749. // switch to IDLE state to load new fragment
  750. this.resetLoadingState();
  751. } else if (this.audioOnly) {
  752. // Reset audio transmuxer so when switching back to main audio we're not still appending where we left off
  753. this.resetTransmuxer();
  754. }
  755. const hls = this.hls;
  756. // If switching from alt to main audio, flush all audio and trigger track switched
  757. if (fromAltAudio) {
  758. hls.trigger(Events.BUFFER_FLUSHING, {
  759. startOffset: 0,
  760. endOffset: Number.POSITIVE_INFINITY,
  761. type: 'audio',
  762. });
  763. }
  764. hls.trigger(Events.AUDIO_TRACK_SWITCHED, {
  765. id: trackId,
  766. });
  767. }
  768. }
  769.  
  770. private onAudioTrackSwitched(
  771. event: Events.AUDIO_TRACK_SWITCHED,
  772. data: AudioTrackSwitchedData
  773. ) {
  774. const trackId = data.id;
  775. const altAudio = !!this.hls.audioTracks[trackId].url;
  776. if (altAudio) {
  777. const videoBuffer = this.videoBuffer;
  778. // if we switched on alternate audio, ensure that main fragment scheduling is synced with video sourcebuffer buffered
  779. if (videoBuffer && this.mediaBuffer !== videoBuffer) {
  780. this.log(
  781. 'Switching on alternate audio, use video.buffered to schedule main fragment loading'
  782. );
  783. this.mediaBuffer = videoBuffer;
  784. }
  785. }
  786. this.altAudio = altAudio;
  787. this.tick();
  788. }
  789.  
  790. private onBufferCreated(
  791. event: Events.BUFFER_CREATED,
  792. data: BufferCreatedData
  793. ) {
  794. const tracks = data.tracks;
  795. let mediaTrack;
  796. let name;
  797. let alternate = false;
  798. for (const type in tracks) {
  799. const track = tracks[type];
  800. if (track.id === 'main') {
  801. name = type;
  802. mediaTrack = track;
  803. // keep video source buffer reference
  804. if (type === 'video') {
  805. const videoTrack = tracks[type];
  806. if (videoTrack) {
  807. this.videoBuffer = videoTrack.buffer;
  808. }
  809. }
  810. } else {
  811. alternate = true;
  812. }
  813. }
  814. if (alternate && mediaTrack) {
  815. this.log(
  816. `Alternate track found, use ${name}.buffered to schedule main fragment loading`
  817. );
  818. this.mediaBuffer = mediaTrack.buffer;
  819. } else {
  820. this.mediaBuffer = this.media;
  821. }
  822. }
  823.  
  824. private onFragBuffered(event: Events.FRAG_BUFFERED, data: FragBufferedData) {
  825. const { frag, part } = data;
  826. if (frag && frag.type !== PlaylistLevelType.MAIN) {
  827. return;
  828. }
  829. if (this.fragContextChanged(frag)) {
  830. // If a level switch was requested while a fragment was buffering, it will emit the FRAG_BUFFERED event upon completion
  831. // Avoid setting state back to IDLE, since that will interfere with a level switch
  832. this.warn(
  833. `Fragment ${frag.sn}${part ? ' p: ' + part.index : ''} of level ${
  834. frag.level
  835. } finished buffering, but was aborted. state: ${this.state}`
  836. );
  837. if (this.state === State.PARSED) {
  838. this.state = State.IDLE;
  839. }
  840. return;
  841. }
  842. const stats = part ? part.stats : frag.stats;
  843. this.fragLastKbps = Math.round(
  844. (8 * stats.total) / (stats.buffering.end - stats.loading.first)
  845. );
  846. if (frag.sn !== 'initSegment') {
  847. this.fragPrevious = frag;
  848. }
  849. this.fragBufferedComplete(frag, part);
  850. }
  851.  
  852. private onError(event: Events.ERROR, data: ErrorData) {
  853. switch (data.details) {
  854. case ErrorDetails.FRAG_LOAD_ERROR:
  855. case ErrorDetails.FRAG_LOAD_TIMEOUT:
  856. case ErrorDetails.KEY_LOAD_ERROR:
  857. case ErrorDetails.KEY_LOAD_TIMEOUT:
  858. this.onFragmentOrKeyLoadError(PlaylistLevelType.MAIN, data);
  859. break;
  860. case ErrorDetails.LEVEL_LOAD_ERROR:
  861. case ErrorDetails.LEVEL_LOAD_TIMEOUT:
  862. if (this.state !== State.ERROR) {
  863. if (data.fatal) {
  864. // if fatal error, stop processing
  865. this.warn(`${data.details}`);
  866. this.state = State.ERROR;
  867. } else {
  868. // in case of non fatal error while loading level, if level controller is not retrying to load level , switch back to IDLE
  869. if (!data.levelRetry && this.state === State.WAITING_LEVEL) {
  870. this.state = State.IDLE;
  871. }
  872. }
  873. }
  874. break;
  875. case ErrorDetails.BUFFER_FULL_ERROR:
  876. // if in appending state
  877. if (
  878. data.parent === 'main' &&
  879. (this.state === State.PARSING || this.state === State.PARSED)
  880. ) {
  881. let flushBuffer = true;
  882. const bufferedInfo = this.getFwdBufferInfo(
  883. this.media,
  884. PlaylistLevelType.MAIN
  885. );
  886. // 0.5 : tolerance needed as some browsers stalls playback before reaching buffered end
  887. // reduce max buf len if current position is buffered
  888. if (bufferedInfo && bufferedInfo.len > 0.5) {
  889. flushBuffer = !this.reduceMaxBufferLength(bufferedInfo.len);
  890. }
  891. if (flushBuffer) {
  892. // current position is not buffered, but browser is still complaining about buffer full error
  893. // this happens on IE/Edge, refer to https://github.com/video-dev/hls.js/pull/708
  894. // in that case flush the whole buffer to recover
  895. this.warn(
  896. 'buffer full error also media.currentTime is not buffered, flush main'
  897. );
  898. // flush main buffer
  899. this.immediateLevelSwitch();
  900. }
  901. this.resetLoadingState();
  902. }
  903. break;
  904. default:
  905. break;
  906. }
  907. }
  908.  
  909. // Checks the health of the buffer and attempts to resolve playback stalls.
  910. private checkBuffer() {
  911. const { media, gapController } = this;
  912. if (!media || !gapController || !media.readyState) {
  913. // Exit early if we don't have media or if the media hasn't buffered anything yet (readyState 0)
  914. return;
  915. }
  916.  
  917. if (this.loadedmetadata || !BufferHelper.getBuffered(media).length) {
  918. // Resolve gaps using the main buffer, whose ranges are the intersections of the A/V sourcebuffers
  919. const activeFrag = this.state !== State.IDLE ? this.fragCurrent : null;
  920. gapController.poll(this.lastCurrentTime, activeFrag);
  921. }
  922.  
  923. this.lastCurrentTime = media.currentTime;
  924. }
  925.  
  926. private onFragLoadEmergencyAborted() {
  927. this.state = State.IDLE;
  928. // if loadedmetadata is not set, it means that we are emergency switch down on first frag
  929. // in that case, reset startFragRequested flag
  930. if (!this.loadedmetadata) {
  931. this.startFragRequested = false;
  932. this.nextLoadPosition = this.startPosition;
  933. }
  934. this.tickImmediate();
  935. }
  936.  
  937. private onBufferFlushed(
  938. event: Events.BUFFER_FLUSHED,
  939. { type }: BufferFlushedData
  940. ) {
  941. if (
  942. type !== ElementaryStreamTypes.AUDIO ||
  943. (this.audioOnly && !this.altAudio)
  944. ) {
  945. const media =
  946. (type === ElementaryStreamTypes.VIDEO
  947. ? this.videoBuffer
  948. : this.mediaBuffer) || this.media;
  949. this.afterBufferFlushed(media, type, PlaylistLevelType.MAIN);
  950. }
  951. }
  952.  
  953. private onLevelsUpdated(
  954. event: Events.LEVELS_UPDATED,
  955. data: LevelsUpdatedData
  956. ) {
  957. this.levels = data.levels;
  958. }
  959.  
  960. public swapAudioCodec() {
  961. this.audioCodecSwap = !this.audioCodecSwap;
  962. }
  963.  
  964. /**
  965. * Seeks to the set startPosition if not equal to the mediaElement's current time.
  966. */
  967. protected seekToStartPos() {
  968. const { media } = this;
  969. if (!media) {
  970. return;
  971. }
  972. const currentTime = media.currentTime;
  973. let startPosition = this.startPosition;
  974. // only adjust currentTime if different from startPosition or if startPosition not buffered
  975. // at that stage, there should be only one buffered range, as we reach that code after first fragment has been buffered
  976. if (startPosition >= 0 && currentTime < startPosition) {
  977. if (media.seeking) {
  978. this.log(
  979. `could not seek to ${startPosition}, already seeking at ${currentTime}`
  980. );
  981. return;
  982. }
  983. const buffered = BufferHelper.getBuffered(media);
  984. const bufferStart = buffered.length ? buffered.start(0) : 0;
  985. const delta = bufferStart - startPosition;
  986. if (
  987. delta > 0 &&
  988. (delta < this.config.maxBufferHole ||
  989. delta < this.config.maxFragLookUpTolerance)
  990. ) {
  991. this.log(`adjusting start position by ${delta} to match buffer start`);
  992. startPosition += delta;
  993. this.startPosition = startPosition;
  994. }
  995. this.log(
  996. `seek to target start position ${startPosition} from current time ${currentTime}`
  997. );
  998. media.currentTime = startPosition;
  999. }
  1000. }
  1001.  
  1002. private _getAudioCodec(currentLevel) {
  1003. let audioCodec = this.config.defaultAudioCodec || currentLevel.audioCodec;
  1004. if (this.audioCodecSwap && audioCodec) {
  1005. this.log('Swapping audio codec');
  1006. if (audioCodec.indexOf('mp4a.40.5') !== -1) {
  1007. audioCodec = 'mp4a.40.2';
  1008. } else {
  1009. audioCodec = 'mp4a.40.5';
  1010. }
  1011. }
  1012.  
  1013. return audioCodec;
  1014. }
  1015.  
  1016. private _loadBitrateTestFrag(frag: Fragment) {
  1017. frag.bitrateTest = true;
  1018. this._doFragLoad(frag).then((data) => {
  1019. const { hls } = this;
  1020. if (!data || hls.nextLoadLevel || this.fragContextChanged(frag)) {
  1021. return;
  1022. }
  1023. this.fragLoadError = 0;
  1024. this.state = State.IDLE;
  1025. this.startFragRequested = false;
  1026. this.bitrateTest = false;
  1027. const stats = frag.stats;
  1028. // Bitrate tests fragments are neither parsed nor buffered
  1029. stats.parsing.start =
  1030. stats.parsing.end =
  1031. stats.buffering.start =
  1032. stats.buffering.end =
  1033. self.performance.now();
  1034. hls.trigger(Events.FRAG_LOADED, data as FragLoadedData);
  1035. frag.bitrateTest = false;
  1036. });
  1037. }
  1038.  
  1039. private _handleTransmuxComplete(transmuxResult: TransmuxerResult) {
  1040. const id = 'main';
  1041. const { hls } = this;
  1042. const { remuxResult, chunkMeta } = transmuxResult;
  1043.  
  1044. const context = this.getCurrentContext(chunkMeta);
  1045. if (!context) {
  1046. this.warn(
  1047. `The loading context changed while buffering fragment ${chunkMeta.sn} of level ${chunkMeta.level}. This chunk will not be buffered.`
  1048. );
  1049. this.resetStartWhenNotLoaded(chunkMeta.level);
  1050. return;
  1051. }
  1052. const { frag, part, level } = context;
  1053. const { video, text, id3, initSegment } = remuxResult;
  1054. const { details } = level;
  1055. // The audio-stream-controller handles audio buffering if Hls.js is playing an alternate audio track
  1056. const audio = this.altAudio ? undefined : remuxResult.audio;
  1057.  
  1058. // Check if the current fragment has been aborted. We check this by first seeing if we're still playing the current level.
  1059. // If we are, subsequently check if the currently loading fragment (fragCurrent) has changed.
  1060. if (this.fragContextChanged(frag)) {
  1061. return;
  1062. }
  1063.  
  1064. this.state = State.PARSING;
  1065.  
  1066. if (initSegment) {
  1067. if (initSegment.tracks) {
  1068. this._bufferInitSegment(level, initSegment.tracks, frag, chunkMeta);
  1069. hls.trigger(Events.FRAG_PARSING_INIT_SEGMENT, {
  1070. frag,
  1071. id,
  1072. tracks: initSegment.tracks,
  1073. });
  1074. }
  1075.  
  1076. // This would be nice if Number.isFinite acted as a typeguard, but it doesn't. See: https://github.com/Microsoft/TypeScript/issues/10038
  1077. const initPTS = initSegment.initPTS as number;
  1078. const timescale = initSegment.timescale as number;
  1079. if (Number.isFinite(initPTS)) {
  1080. this.initPTS[frag.cc] = initPTS;
  1081. hls.trigger(Events.INIT_PTS_FOUND, { frag, id, initPTS, timescale });
  1082. }
  1083. }
  1084.  
  1085. // Avoid buffering if backtracking this fragment
  1086. if (video && remuxResult.independent !== false) {
  1087. if (details) {
  1088. const { startPTS, endPTS, startDTS, endDTS } = video;
  1089. if (part) {
  1090. part.elementaryStreams[video.type] = {
  1091. startPTS,
  1092. endPTS,
  1093. startDTS,
  1094. endDTS,
  1095. };
  1096. } else {
  1097. if (video.firstKeyFrame && video.independent) {
  1098. this.couldBacktrack = true;
  1099. }
  1100. if (video.dropped && video.independent) {
  1101. // Backtrack if dropped frames create a gap after currentTime
  1102.  
  1103. const bufferInfo = this.getMainFwdBufferInfo();
  1104. const targetBufferTime =
  1105. (bufferInfo ? bufferInfo.end : this.getLoadPosition()) +
  1106. this.config.maxBufferHole;
  1107. const startTime = video.firstKeyFramePTS
  1108. ? video.firstKeyFramePTS
  1109. : startPTS;
  1110. if (targetBufferTime < startTime - this.config.maxBufferHole) {
  1111. this.backtrack(frag);
  1112. return;
  1113. }
  1114. // Set video stream start to fragment start so that truncated samples do not distort the timeline, and mark it partial
  1115. frag.setElementaryStreamInfo(
  1116. video.type as ElementaryStreamTypes,
  1117. frag.start,
  1118. endPTS,
  1119. frag.start,
  1120. endDTS,
  1121. true
  1122. );
  1123. }
  1124. }
  1125. frag.setElementaryStreamInfo(
  1126. video.type as ElementaryStreamTypes,
  1127. startPTS,
  1128. endPTS,
  1129. startDTS,
  1130. endDTS
  1131. );
  1132. if (this.backtrackFragment) {
  1133. this.backtrackFragment = frag;
  1134. }
  1135. this.bufferFragmentData(video, frag, part, chunkMeta);
  1136. }
  1137. } else if (remuxResult.independent === false) {
  1138. this.backtrack(frag);
  1139. return;
  1140. }
  1141.  
  1142. if (audio) {
  1143. const { startPTS, endPTS, startDTS, endDTS } = audio;
  1144. if (part) {
  1145. part.elementaryStreams[ElementaryStreamTypes.AUDIO] = {
  1146. startPTS,
  1147. endPTS,
  1148. startDTS,
  1149. endDTS,
  1150. };
  1151. }
  1152. frag.setElementaryStreamInfo(
  1153. ElementaryStreamTypes.AUDIO,
  1154. startPTS,
  1155. endPTS,
  1156. startDTS,
  1157. endDTS
  1158. );
  1159. this.bufferFragmentData(audio, frag, part, chunkMeta);
  1160. }
  1161.  
  1162. if (details && id3?.samples?.length) {
  1163. const emittedID3: FragParsingMetadataData = {
  1164. id,
  1165. frag,
  1166. details,
  1167. samples: id3.samples,
  1168. };
  1169. hls.trigger(Events.FRAG_PARSING_METADATA, emittedID3);
  1170. }
  1171. if (details && text) {
  1172. const emittedText: FragParsingUserdataData = {
  1173. id,
  1174. frag,
  1175. details,
  1176. samples: text.samples,
  1177. };
  1178. hls.trigger(Events.FRAG_PARSING_USERDATA, emittedText);
  1179. }
  1180. }
  1181.  
  1182. private _bufferInitSegment(
  1183. currentLevel: Level,
  1184. tracks: TrackSet,
  1185. frag: Fragment,
  1186. chunkMeta: ChunkMetadata
  1187. ) {
  1188. if (this.state !== State.PARSING) {
  1189. return;
  1190. }
  1191.  
  1192. this.audioOnly = !!tracks.audio && !tracks.video;
  1193.  
  1194. // if audio track is expected to come from audio stream controller, discard any coming from main
  1195. if (this.altAudio && !this.audioOnly) {
  1196. delete tracks.audio;
  1197. }
  1198. // include levelCodec in audio and video tracks
  1199. const { audio, video, audiovideo } = tracks;
  1200. if (audio) {
  1201. let audioCodec = currentLevel.audioCodec;
  1202. const ua = navigator.userAgent.toLowerCase();
  1203. if (this.audioCodecSwitch) {
  1204. if (audioCodec) {
  1205. if (audioCodec.indexOf('mp4a.40.5') !== -1) {
  1206. audioCodec = 'mp4a.40.2';
  1207. } else {
  1208. audioCodec = 'mp4a.40.5';
  1209. }
  1210. }
  1211. // In the case that AAC and HE-AAC audio codecs are signalled in manifest,
  1212. // force HE-AAC, as it seems that most browsers prefers it.
  1213. // don't force HE-AAC if mono stream, or in Firefox
  1214. if (audio.metadata.channelCount !== 1 && ua.indexOf('firefox') === -1) {
  1215. audioCodec = 'mp4a.40.5';
  1216. }
  1217. }
  1218. // HE-AAC is broken on Android, always signal audio codec as AAC even if variant manifest states otherwise
  1219. if (ua.indexOf('android') !== -1 && audio.container !== 'audio/mpeg') {
  1220. // Exclude mpeg audio
  1221. audioCodec = 'mp4a.40.2';
  1222. this.log(`Android: force audio codec to ${audioCodec}`);
  1223. }
  1224. if (currentLevel.audioCodec && currentLevel.audioCodec !== audioCodec) {
  1225. this.log(
  1226. `Swapping manifest audio codec "${currentLevel.audioCodec}" for "${audioCodec}"`
  1227. );
  1228. }
  1229. audio.levelCodec = audioCodec;
  1230. audio.id = 'main';
  1231. this.log(
  1232. `Init audio buffer, container:${
  1233. audio.container
  1234. }, codecs[selected/level/parsed]=[${audioCodec || ''}/${
  1235. currentLevel.audioCodec || ''
  1236. }/${audio.codec}]`
  1237. );
  1238. }
  1239. if (video) {
  1240. video.levelCodec = currentLevel.videoCodec;
  1241. video.id = 'main';
  1242. this.log(
  1243. `Init video buffer, container:${
  1244. video.container
  1245. }, codecs[level/parsed]=[${currentLevel.videoCodec || ''}/${
  1246. video.codec
  1247. }]`
  1248. );
  1249. }
  1250. if (audiovideo) {
  1251. this.log(
  1252. `Init audiovideo buffer, container:${
  1253. audiovideo.container
  1254. }, codecs[level/parsed]=[${currentLevel.attrs.CODECS || ''}/${
  1255. audiovideo.codec
  1256. }]`
  1257. );
  1258. }
  1259. this.hls.trigger(Events.BUFFER_CODECS, tracks);
  1260. // loop through tracks that are going to be provided to bufferController
  1261. Object.keys(tracks).forEach((trackName) => {
  1262. const track = tracks[trackName];
  1263. const initSegment = track.initSegment;
  1264. if (initSegment?.byteLength) {
  1265. this.hls.trigger(Events.BUFFER_APPENDING, {
  1266. type: trackName as SourceBufferName,
  1267. data: initSegment,
  1268. frag,
  1269. part: null,
  1270. chunkMeta,
  1271. parent: frag.type,
  1272. });
  1273. }
  1274. });
  1275. // trigger handler right now
  1276. this.tick();
  1277. }
  1278.  
  1279. private getMainFwdBufferInfo() {
  1280. return this.getFwdBufferInfo(
  1281. this.mediaBuffer ? this.mediaBuffer : this.media,
  1282. PlaylistLevelType.MAIN
  1283. );
  1284. }
  1285.  
  1286. private backtrack(frag: Fragment) {
  1287. this.couldBacktrack = true;
  1288. // Causes findFragments to backtrack through fragments to find the keyframe
  1289. this.backtrackFragment = frag;
  1290. this.resetTransmuxer();
  1291. this.flushBufferGap(frag);
  1292. this.fragmentTracker.removeFragment(frag);
  1293. this.fragPrevious = null;
  1294. this.nextLoadPosition = frag.start;
  1295. this.state = State.IDLE;
  1296. }
  1297.  
  1298. private checkFragmentChanged() {
  1299. const video = this.media;
  1300. let fragPlayingCurrent: Fragment | null = null;
  1301. if (video && video.readyState > 1 && video.seeking === false) {
  1302. const currentTime = video.currentTime;
  1303. /* if video element is in seeked state, currentTime can only increase.
  1304. (assuming that playback rate is positive ...)
  1305. As sometimes currentTime jumps back to zero after a
  1306. media decode error, check this, to avoid seeking back to
  1307. wrong position after a media decode error
  1308. */
  1309.  
  1310. if (BufferHelper.isBuffered(video, currentTime)) {
  1311. fragPlayingCurrent = this.getAppendedFrag(currentTime);
  1312. } else if (BufferHelper.isBuffered(video, currentTime + 0.1)) {
  1313. /* ensure that FRAG_CHANGED event is triggered at startup,
  1314. when first video frame is displayed and playback is paused.
  1315. add a tolerance of 100ms, in case current position is not buffered,
  1316. check if current pos+100ms is buffered and use that buffer range
  1317. for FRAG_CHANGED event reporting */
  1318. fragPlayingCurrent = this.getAppendedFrag(currentTime + 0.1);
  1319. }
  1320. if (fragPlayingCurrent) {
  1321. this.backtrackFragment = null;
  1322. const fragPlaying = this.fragPlaying;
  1323. const fragCurrentLevel = fragPlayingCurrent.level;
  1324. if (
  1325. !fragPlaying ||
  1326. fragPlayingCurrent.sn !== fragPlaying.sn ||
  1327. fragPlaying.level !== fragCurrentLevel ||
  1328. fragPlayingCurrent.urlId !== fragPlaying.urlId
  1329. ) {
  1330. this.hls.trigger(Events.FRAG_CHANGED, { frag: fragPlayingCurrent });
  1331. if (!fragPlaying || fragPlaying.level !== fragCurrentLevel) {
  1332. this.hls.trigger(Events.LEVEL_SWITCHED, {
  1333. level: fragCurrentLevel,
  1334. });
  1335. }
  1336. this.fragPlaying = fragPlayingCurrent;
  1337. }
  1338. }
  1339. }
  1340. }
  1341.  
  1342. get nextLevel(): number {
  1343. const frag = this.nextBufferedFrag;
  1344. if (frag) {
  1345. return frag.level;
  1346. }
  1347. return -1;
  1348. }
  1349.  
  1350. get currentFrag(): Fragment | null {
  1351. const media = this.media;
  1352. if (media) {
  1353. return this.fragPlaying || this.getAppendedFrag(media.currentTime);
  1354. }
  1355. return null;
  1356. }
  1357.  
  1358. get currentProgramDateTime(): Date | null {
  1359. const media = this.media;
  1360. if (media) {
  1361. const currentTime = media.currentTime;
  1362. const frag = this.currentFrag;
  1363. if (
  1364. frag &&
  1365. Number.isFinite(currentTime) &&
  1366. Number.isFinite(frag.programDateTime)
  1367. ) {
  1368. const epocMs =
  1369. (frag.programDateTime as number) + (currentTime - frag.start) * 1000;
  1370. return new Date(epocMs);
  1371. }
  1372. }
  1373. return null;
  1374. }
  1375.  
  1376. get currentLevel(): number {
  1377. const frag = this.currentFrag;
  1378. if (frag) {
  1379. return frag.level;
  1380. }
  1381. return -1;
  1382. }
  1383.  
  1384. get nextBufferedFrag() {
  1385. const frag = this.currentFrag;
  1386. if (frag) {
  1387. return this.followingBufferedFrag(frag);
  1388. }
  1389. return null;
  1390. }
  1391.  
  1392. get forceStartLoad() {
  1393. return this._forceStartLoad;
  1394. }
  1395. }