Home Reference Source

src/controller/audio-stream-controller.ts

  1. import BaseStreamController, { State } from './base-stream-controller';
  2. import { Events } from '../events';
  3. import { Bufferable, BufferHelper } from '../utils/buffer-helper';
  4. import { FragmentState } from './fragment-tracker';
  5. import { Level } from '../types/level';
  6. import { PlaylistLevelType } from '../types/loader';
  7. import { Fragment, ElementaryStreamTypes, Part } from '../loader/fragment';
  8. import ChunkCache from '../demux/chunk-cache';
  9. import TransmuxerInterface from '../demux/transmuxer-interface';
  10. import { ChunkMetadata } from '../types/transmuxer';
  11. import { fragmentWithinToleranceTest } from './fragment-finders';
  12. import { alignMediaPlaylistByPDT } from '../utils/discontinuities';
  13. import { ErrorDetails, ErrorTypes } from '../errors';
  14. import type { NetworkComponentAPI } from '../types/component-api';
  15. import type Hls from '../hls';
  16. import type { FragmentTracker } from './fragment-tracker';
  17. import type KeyLoader from '../loader/key-loader';
  18. import type { TransmuxerResult } from '../types/transmuxer';
  19. import type { LevelDetails } from '../loader/level-details';
  20. import type { TrackSet } from '../types/track';
  21. import type {
  22. BufferCreatedData,
  23. AudioTracksUpdatedData,
  24. AudioTrackSwitchingData,
  25. LevelLoadedData,
  26. TrackLoadedData,
  27. BufferAppendingData,
  28. BufferFlushedData,
  29. InitPTSFoundData,
  30. FragLoadedData,
  31. FragParsingMetadataData,
  32. FragParsingUserdataData,
  33. FragBufferedData,
  34. ErrorData,
  35. } from '../types/events';
  36.  
  37. const TICK_INTERVAL = 100; // how often to tick in ms
  38.  
  39. type WaitingForPTSData = {
  40. frag: Fragment;
  41. part: Part | null;
  42. cache: ChunkCache;
  43. complete: boolean;
  44. };
  45.  
  46. class AudioStreamController
  47. extends BaseStreamController
  48. implements NetworkComponentAPI
  49. {
  50. private videoBuffer: Bufferable | null = null;
  51. private videoTrackCC: number = -1;
  52. private waitingVideoCC: number = -1;
  53. private audioSwitch: boolean = false;
  54. private trackId: number = -1;
  55. private waitingData: WaitingForPTSData | null = null;
  56. private mainDetails: LevelDetails | null = null;
  57. private bufferFlushed: boolean = false;
  58. private cachedTrackLoadedData: TrackLoadedData | null = null;
  59.  
  60. constructor(
  61. hls: Hls,
  62. fragmentTracker: FragmentTracker,
  63. keyLoader: KeyLoader
  64. ) {
  65. super(hls, fragmentTracker, keyLoader, '[audio-stream-controller]');
  66. this._registerListeners();
  67. }
  68.  
  69. protected onHandlerDestroying() {
  70. this._unregisterListeners();
  71. this.mainDetails = null;
  72. }
  73.  
  74. private _registerListeners() {
  75. const { hls } = this;
  76. hls.on(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
  77. hls.on(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
  78. hls.on(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  79. hls.on(Events.LEVEL_LOADED, this.onLevelLoaded, this);
  80. hls.on(Events.AUDIO_TRACKS_UPDATED, this.onAudioTracksUpdated, this);
  81. hls.on(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
  82. hls.on(Events.AUDIO_TRACK_LOADED, this.onAudioTrackLoaded, this);
  83. hls.on(Events.ERROR, this.onError, this);
  84. hls.on(Events.BUFFER_RESET, this.onBufferReset, this);
  85. hls.on(Events.BUFFER_CREATED, this.onBufferCreated, this);
  86. hls.on(Events.BUFFER_FLUSHED, this.onBufferFlushed, this);
  87. hls.on(Events.INIT_PTS_FOUND, this.onInitPtsFound, this);
  88. hls.on(Events.FRAG_BUFFERED, this.onFragBuffered, this);
  89. }
  90.  
  91. private _unregisterListeners() {
  92. const { hls } = this;
  93. hls.off(Events.MEDIA_ATTACHED, this.onMediaAttached, this);
  94. hls.off(Events.MEDIA_DETACHING, this.onMediaDetaching, this);
  95. hls.off(Events.MANIFEST_LOADING, this.onManifestLoading, this);
  96. hls.off(Events.LEVEL_LOADED, this.onLevelLoaded, this);
  97. hls.off(Events.AUDIO_TRACKS_UPDATED, this.onAudioTracksUpdated, this);
  98. hls.off(Events.AUDIO_TRACK_SWITCHING, this.onAudioTrackSwitching, this);
  99. hls.off(Events.AUDIO_TRACK_LOADED, this.onAudioTrackLoaded, this);
  100. hls.off(Events.ERROR, this.onError, this);
  101. hls.off(Events.BUFFER_RESET, this.onBufferReset, this);
  102. hls.off(Events.BUFFER_CREATED, this.onBufferCreated, this);
  103. hls.off(Events.BUFFER_FLUSHED, this.onBufferFlushed, this);
  104. hls.off(Events.INIT_PTS_FOUND, this.onInitPtsFound, this);
  105. hls.off(Events.FRAG_BUFFERED, this.onFragBuffered, this);
  106. }
  107.  
  108. // INIT_PTS_FOUND is triggered when the video track parsed in the stream-controller has a new PTS value
  109. onInitPtsFound(
  110. event: Events.INIT_PTS_FOUND,
  111. { frag, id, initPTS }: InitPTSFoundData
  112. ) {
  113. // Always update the new INIT PTS
  114. // Can change due level switch
  115. if (id === 'main') {
  116. const cc = frag.cc;
  117. this.initPTS[frag.cc] = initPTS;
  118. this.log(`InitPTS for cc: ${cc} found from main: ${initPTS}`);
  119. this.videoTrackCC = cc;
  120. // If we are waiting, tick immediately to unblock audio fragment transmuxing
  121. if (this.state === State.WAITING_INIT_PTS) {
  122. this.tick();
  123. }
  124. }
  125. }
  126.  
  127. startLoad(startPosition: number) {
  128. if (!this.levels) {
  129. this.startPosition = startPosition;
  130. this.state = State.STOPPED;
  131. return;
  132. }
  133. const lastCurrentTime = this.lastCurrentTime;
  134. this.stopLoad();
  135. this.setInterval(TICK_INTERVAL);
  136. this.fragLoadError = 0;
  137. if (lastCurrentTime > 0 && startPosition === -1) {
  138. this.log(
  139. `Override startPosition with lastCurrentTime @${lastCurrentTime.toFixed(
  140. 3
  141. )}`
  142. );
  143. startPosition = lastCurrentTime;
  144. this.state = State.IDLE;
  145. } else {
  146. this.loadedmetadata = false;
  147. this.state = State.WAITING_TRACK;
  148. }
  149. this.nextLoadPosition =
  150. this.startPosition =
  151. this.lastCurrentTime =
  152. startPosition;
  153.  
  154. this.tick();
  155. }
  156.  
  157. doTick() {
  158. switch (this.state) {
  159. case State.IDLE:
  160. this.doTickIdle();
  161. break;
  162. case State.WAITING_TRACK: {
  163. const { levels, trackId } = this;
  164. const details = levels?.[trackId]?.details;
  165. if (details) {
  166. if (this.waitForCdnTuneIn(details)) {
  167. break;
  168. }
  169. this.state = State.WAITING_INIT_PTS;
  170. }
  171. break;
  172. }
  173. case State.FRAG_LOADING_WAITING_RETRY: {
  174. const now = performance.now();
  175. const retryDate = this.retryDate;
  176. // if current time is gt than retryDate, or if media seeking let's switch to IDLE state to retry loading
  177. if (!retryDate || now >= retryDate || this.media?.seeking) {
  178. this.log('RetryDate reached, switch back to IDLE state');
  179. this.resetStartWhenNotLoaded(this.trackId);
  180. this.state = State.IDLE;
  181. }
  182. break;
  183. }
  184. case State.WAITING_INIT_PTS: {
  185. // Ensure we don't get stuck in the WAITING_INIT_PTS state if the waiting frag CC doesn't match any initPTS
  186. const waitingData = this.waitingData;
  187. if (waitingData) {
  188. const { frag, part, cache, complete } = waitingData;
  189. if (this.initPTS[frag.cc] !== undefined) {
  190. this.waitingData = null;
  191. this.waitingVideoCC = -1;
  192. this.state = State.FRAG_LOADING;
  193. const payload = cache.flush();
  194. const data: FragLoadedData = {
  195. frag,
  196. part,
  197. payload,
  198. networkDetails: null,
  199. };
  200. this._handleFragmentLoadProgress(data);
  201. if (complete) {
  202. super._handleFragmentLoadComplete(data);
  203. }
  204. } else if (this.videoTrackCC !== this.waitingVideoCC) {
  205. // Drop waiting fragment if videoTrackCC has changed since waitingFragment was set and initPTS was not found
  206. this.log(
  207. `Waiting fragment cc (${frag.cc}) cancelled because video is at cc ${this.videoTrackCC}`
  208. );
  209. this.clearWaitingFragment();
  210. } else {
  211. // Drop waiting fragment if an earlier fragment is needed
  212. const pos = this.getLoadPosition();
  213. const bufferInfo = BufferHelper.bufferInfo(
  214. this.mediaBuffer,
  215. pos,
  216. this.config.maxBufferHole
  217. );
  218. const waitingFragmentAtPosition = fragmentWithinToleranceTest(
  219. bufferInfo.end,
  220. this.config.maxFragLookUpTolerance,
  221. frag
  222. );
  223. if (waitingFragmentAtPosition < 0) {
  224. this.log(
  225. `Waiting fragment cc (${frag.cc}) @ ${frag.start} cancelled because another fragment at ${bufferInfo.end} is needed`
  226. );
  227. this.clearWaitingFragment();
  228. }
  229. }
  230. } else {
  231. this.state = State.IDLE;
  232. }
  233. }
  234. }
  235.  
  236. this.onTickEnd();
  237. }
  238.  
  239. clearWaitingFragment() {
  240. const waitingData = this.waitingData;
  241. if (waitingData) {
  242. this.fragmentTracker.removeFragment(waitingData.frag);
  243. this.waitingData = null;
  244. this.waitingVideoCC = -1;
  245. this.state = State.IDLE;
  246. }
  247. }
  248.  
  249. protected resetLoadingState() {
  250. this.clearWaitingFragment();
  251. super.resetLoadingState();
  252. }
  253.  
  254. protected onTickEnd() {
  255. const { media } = this;
  256. if (!media || !media.readyState) {
  257. // Exit early if we don't have media or if the media hasn't buffered anything yet (readyState 0)
  258. return;
  259. }
  260.  
  261. this.lastCurrentTime = media.currentTime;
  262. }
  263.  
  264. private doTickIdle() {
  265. const { hls, levels, media, trackId } = this;
  266. const config = hls.config;
  267.  
  268. if (!levels || !levels[trackId]) {
  269. return;
  270. }
  271.  
  272. // if video not attached AND
  273. // start fragment already requested OR start frag prefetch not enabled
  274. // exit loop
  275. // => if media not attached but start frag prefetch is enabled and start frag not requested yet, we will not exit loop
  276. if (!media && (this.startFragRequested || !config.startFragPrefetch)) {
  277. return;
  278. }
  279.  
  280. const levelInfo = levels[trackId];
  281.  
  282. const trackDetails = levelInfo.details;
  283. if (
  284. !trackDetails ||
  285. (trackDetails.live && this.levelLastLoaded !== trackId) ||
  286. this.waitForCdnTuneIn(trackDetails)
  287. ) {
  288. this.state = State.WAITING_TRACK;
  289. return;
  290. }
  291.  
  292. const bufferable = this.mediaBuffer ? this.mediaBuffer : this.media;
  293. if (this.bufferFlushed && bufferable) {
  294. this.bufferFlushed = false;
  295. this.afterBufferFlushed(
  296. bufferable,
  297. ElementaryStreamTypes.AUDIO,
  298. PlaylistLevelType.AUDIO
  299. );
  300. }
  301.  
  302. const bufferInfo = this.getFwdBufferInfo(
  303. bufferable,
  304. PlaylistLevelType.AUDIO
  305. );
  306. if (bufferInfo === null) {
  307. return;
  308. }
  309. const audioSwitch = this.audioSwitch;
  310.  
  311. if (!audioSwitch && this._streamEnded(bufferInfo, trackDetails)) {
  312. hls.trigger(Events.BUFFER_EOS, { type: 'audio' });
  313. this.state = State.ENDED;
  314. return;
  315. }
  316.  
  317. const mainBufferInfo = this.getFwdBufferInfo(
  318. this.videoBuffer ? this.videoBuffer : this.media,
  319. PlaylistLevelType.MAIN
  320. );
  321. const bufferLen = bufferInfo.len;
  322. const maxBufLen = this.getMaxBufferLength(mainBufferInfo?.len);
  323.  
  324. // if buffer length is less than maxBufLen try to load a new fragment
  325. if (bufferLen >= maxBufLen && !audioSwitch) {
  326. return;
  327. }
  328. const fragments = trackDetails.fragments;
  329. const start = fragments[0].start;
  330. let targetBufferTime = bufferInfo.end;
  331.  
  332. if (audioSwitch && media) {
  333. const pos = this.getLoadPosition();
  334. targetBufferTime = pos;
  335. // if currentTime (pos) is less than alt audio playlist start time, it means that alt audio is ahead of currentTime
  336. if (trackDetails.PTSKnown && pos < start) {
  337. // if everything is buffered from pos to start or if audio buffer upfront, let's seek to start
  338. if (bufferInfo.end > start || bufferInfo.nextStart) {
  339. this.log(
  340. 'Alt audio track ahead of main track, seek to start of alt audio track'
  341. );
  342. media.currentTime = start + 0.05;
  343. }
  344. }
  345. }
  346.  
  347. // buffer audio up to one target duration ahead of main buffer
  348. if (
  349. mainBufferInfo &&
  350. targetBufferTime > mainBufferInfo.end + trackDetails.targetduration
  351. ) {
  352. return;
  353. }
  354. // wait for main buffer after buffing some audio
  355. if ((!mainBufferInfo || !mainBufferInfo.len) && bufferInfo.len) {
  356. return;
  357. }
  358.  
  359. const frag = this.getNextFragment(targetBufferTime, trackDetails);
  360. if (!frag) {
  361. this.bufferFlushed = true;
  362. return;
  363. }
  364.  
  365. this.loadFragment(frag, trackDetails, targetBufferTime);
  366. }
  367.  
  368. protected getMaxBufferLength(mainBufferLength?: number): number {
  369. const maxConfigBuffer = super.getMaxBufferLength();
  370. if (!mainBufferLength) {
  371. return maxConfigBuffer;
  372. }
  373. return Math.max(maxConfigBuffer, mainBufferLength);
  374. }
  375.  
  376. onMediaDetaching() {
  377. this.videoBuffer = null;
  378. super.onMediaDetaching();
  379. }
  380.  
  381. onAudioTracksUpdated(
  382. event: Events.AUDIO_TRACKS_UPDATED,
  383. { audioTracks }: AudioTracksUpdatedData
  384. ) {
  385. this.resetTransmuxer();
  386. this.levels = audioTracks.map((mediaPlaylist) => new Level(mediaPlaylist));
  387. }
  388.  
  389. onAudioTrackSwitching(
  390. event: Events.AUDIO_TRACK_SWITCHING,
  391. data: AudioTrackSwitchingData
  392. ) {
  393. // if any URL found on new audio track, it is an alternate audio track
  394. const altAudio = !!data.url;
  395. this.trackId = data.id;
  396. const { fragCurrent } = this;
  397.  
  398. if (fragCurrent) {
  399. fragCurrent.abortRequests();
  400. }
  401. this.fragCurrent = null;
  402. this.clearWaitingFragment();
  403. // destroy useless transmuxer when switching audio to main
  404. if (!altAudio) {
  405. this.resetTransmuxer();
  406. } else {
  407. // switching to audio track, start timer if not already started
  408. this.setInterval(TICK_INTERVAL);
  409. }
  410.  
  411. // should we switch tracks ?
  412. if (altAudio) {
  413. this.audioSwitch = true;
  414. // main audio track are handled by stream-controller, just do something if switching to alt audio track
  415. this.state = State.IDLE;
  416. } else {
  417. this.state = State.STOPPED;
  418. }
  419. this.tick();
  420. }
  421.  
  422. onManifestLoading() {
  423. this.mainDetails = null;
  424. this.fragmentTracker.removeAllFragments();
  425. this.startPosition = this.lastCurrentTime = 0;
  426. this.bufferFlushed = false;
  427. }
  428.  
  429. onLevelLoaded(event: Events.LEVEL_LOADED, data: LevelLoadedData) {
  430. this.mainDetails = data.details;
  431. if (this.cachedTrackLoadedData !== null) {
  432. this.hls.trigger(Events.AUDIO_TRACK_LOADED, this.cachedTrackLoadedData);
  433. this.cachedTrackLoadedData = null;
  434. }
  435. }
  436.  
  437. onAudioTrackLoaded(event: Events.AUDIO_TRACK_LOADED, data: TrackLoadedData) {
  438. if (this.mainDetails == null) {
  439. this.cachedTrackLoadedData = data;
  440. return;
  441. }
  442. const { levels } = this;
  443. const { details: newDetails, id: trackId } = data;
  444. if (!levels) {
  445. this.warn(`Audio tracks were reset while loading level ${trackId}`);
  446. return;
  447. }
  448. this.log(
  449. `Track ${trackId} loaded [${newDetails.startSN},${newDetails.endSN}],duration:${newDetails.totalduration}`
  450. );
  451.  
  452. const track = levels[trackId];
  453. let sliding = 0;
  454. if (newDetails.live || track.details?.live) {
  455. const mainDetails = this.mainDetails;
  456. if (!newDetails.fragments[0]) {
  457. newDetails.deltaUpdateFailed = true;
  458. }
  459. if (newDetails.deltaUpdateFailed || !mainDetails) {
  460. return;
  461. }
  462. if (
  463. !track.details &&
  464. newDetails.hasProgramDateTime &&
  465. mainDetails.hasProgramDateTime
  466. ) {
  467. // Make sure our audio rendition is aligned with the "main" rendition, using
  468. // pdt as our reference times.
  469. alignMediaPlaylistByPDT(newDetails, mainDetails);
  470. sliding = newDetails.fragments[0].start;
  471. } else {
  472. sliding = this.alignPlaylists(newDetails, track.details);
  473. }
  474. }
  475. track.details = newDetails;
  476. this.levelLastLoaded = trackId;
  477.  
  478. // compute start position if we are aligned with the main playlist
  479. if (!this.startFragRequested && (this.mainDetails || !newDetails.live)) {
  480. this.setStartPosition(track.details, sliding);
  481. }
  482. // only switch back to IDLE state if we were waiting for track to start downloading a new fragment
  483. if (
  484. this.state === State.WAITING_TRACK &&
  485. !this.waitForCdnTuneIn(newDetails)
  486. ) {
  487. this.state = State.IDLE;
  488. }
  489.  
  490. // trigger handler right now
  491. this.tick();
  492. }
  493.  
  494. _handleFragmentLoadProgress(data: FragLoadedData) {
  495. const { frag, part, payload } = data;
  496. const { config, trackId, levels } = this;
  497. if (!levels) {
  498. this.warn(
  499. `Audio tracks were reset while fragment load was in progress. Fragment ${frag.sn} of level ${frag.level} will not be buffered`
  500. );
  501. return;
  502. }
  503.  
  504. const track = levels[trackId] as Level;
  505. console.assert(track, 'Audio track is defined on fragment load progress');
  506. const details = track.details as LevelDetails;
  507. console.assert(
  508. details,
  509. 'Audio track details are defined on fragment load progress'
  510. );
  511. const audioCodec =
  512. config.defaultAudioCodec || track.audioCodec || 'mp4a.40.2';
  513.  
  514. let transmuxer = this.transmuxer;
  515. if (!transmuxer) {
  516. transmuxer = this.transmuxer = new TransmuxerInterface(
  517. this.hls,
  518. PlaylistLevelType.AUDIO,
  519. this._handleTransmuxComplete.bind(this),
  520. this._handleTransmuxerFlush.bind(this)
  521. );
  522. }
  523.  
  524. // Check if we have video initPTS
  525. // If not we need to wait for it
  526. const initPTS = this.initPTS[frag.cc];
  527. const initSegmentData = frag.initSegment?.data;
  528. if (initPTS !== undefined) {
  529. // this.log(`Transmuxing ${sn} of [${details.startSN} ,${details.endSN}],track ${trackId}`);
  530. // time Offset is accurate if level PTS is known, or if playlist is not sliding (not live)
  531. const accurateTimeOffset = false; // details.PTSKnown || !details.live;
  532. const partIndex = part ? part.index : -1;
  533. const partial = partIndex !== -1;
  534. const chunkMeta = new ChunkMetadata(
  535. frag.level,
  536. frag.sn as number,
  537. frag.stats.chunkCount,
  538. payload.byteLength,
  539. partIndex,
  540. partial
  541. );
  542. transmuxer.push(
  543. payload,
  544. initSegmentData,
  545. audioCodec,
  546. '',
  547. frag,
  548. part,
  549. details.totalduration,
  550. accurateTimeOffset,
  551. chunkMeta,
  552. initPTS
  553. );
  554. } else {
  555. this.log(
  556. `Unknown video PTS for cc ${frag.cc}, waiting for video PTS before demuxing audio frag ${frag.sn} of [${details.startSN} ,${details.endSN}],track ${trackId}`
  557. );
  558. const { cache } = (this.waitingData = this.waitingData || {
  559. frag,
  560. part,
  561. cache: new ChunkCache(),
  562. complete: false,
  563. });
  564. cache.push(new Uint8Array(payload));
  565. this.waitingVideoCC = this.videoTrackCC;
  566. this.state = State.WAITING_INIT_PTS;
  567. }
  568. }
  569.  
  570. protected _handleFragmentLoadComplete(fragLoadedData: FragLoadedData) {
  571. if (this.waitingData) {
  572. this.waitingData.complete = true;
  573. return;
  574. }
  575. super._handleFragmentLoadComplete(fragLoadedData);
  576. }
  577.  
  578. onBufferReset(/* event: Events.BUFFER_RESET */) {
  579. // reset reference to sourcebuffers
  580. this.mediaBuffer = this.videoBuffer = null;
  581. this.loadedmetadata = false;
  582. }
  583.  
  584. onBufferCreated(event: Events.BUFFER_CREATED, data: BufferCreatedData) {
  585. const audioTrack = data.tracks.audio;
  586. if (audioTrack) {
  587. this.mediaBuffer = audioTrack.buffer || null;
  588. }
  589. if (data.tracks.video) {
  590. this.videoBuffer = data.tracks.video.buffer || null;
  591. }
  592. }
  593.  
  594. onFragBuffered(event: Events.FRAG_BUFFERED, data: FragBufferedData) {
  595. const { frag, part } = data;
  596. if (frag.type !== PlaylistLevelType.AUDIO) {
  597. if (!this.loadedmetadata && frag.type === PlaylistLevelType.MAIN) {
  598. if ((this.videoBuffer || this.media)?.buffered.length) {
  599. this.loadedmetadata = true;
  600. }
  601. }
  602. return;
  603. }
  604. if (this.fragContextChanged(frag)) {
  605. // If a level switch was requested while a fragment was buffering, it will emit the FRAG_BUFFERED event upon completion
  606. // Avoid setting state back to IDLE or concluding the audio switch; otherwise, the switched-to track will not buffer
  607. this.warn(
  608. `Fragment ${frag.sn}${part ? ' p: ' + part.index : ''} of level ${
  609. frag.level
  610. } finished buffering, but was aborted. state: ${
  611. this.state
  612. }, audioSwitch: ${this.audioSwitch}`
  613. );
  614. return;
  615. }
  616. if (frag.sn !== 'initSegment') {
  617. this.fragPrevious = frag;
  618. if (this.audioSwitch) {
  619. this.audioSwitch = false;
  620. this.hls.trigger(Events.AUDIO_TRACK_SWITCHED, { id: this.trackId });
  621. }
  622. }
  623. this.fragBufferedComplete(frag, part);
  624. }
  625.  
  626. private onError(event: Events.ERROR, data: ErrorData) {
  627. if (data.type === ErrorTypes.KEY_SYSTEM_ERROR) {
  628. this.onFragmentOrKeyLoadError(PlaylistLevelType.AUDIO, data);
  629. return;
  630. }
  631. switch (data.details) {
  632. case ErrorDetails.FRAG_LOAD_ERROR:
  633. case ErrorDetails.FRAG_LOAD_TIMEOUT:
  634. case ErrorDetails.FRAG_PARSING_ERROR:
  635. case ErrorDetails.KEY_LOAD_ERROR:
  636. case ErrorDetails.KEY_LOAD_TIMEOUT:
  637. // TODO: Skip fragments that do not belong to this.fragCurrent audio-group id
  638. this.onFragmentOrKeyLoadError(PlaylistLevelType.AUDIO, data);
  639. break;
  640. case ErrorDetails.AUDIO_TRACK_LOAD_ERROR:
  641. case ErrorDetails.AUDIO_TRACK_LOAD_TIMEOUT:
  642. // when in ERROR state, don't switch back to IDLE state in case a non-fatal error is received
  643. if (this.state !== State.ERROR && this.state !== State.STOPPED) {
  644. // if fatal error, stop processing, otherwise move to IDLE to retry loading
  645. this.state = data.fatal ? State.ERROR : State.IDLE;
  646. this.warn(
  647. `${data.details} while loading frag, switching to ${this.state} state`
  648. );
  649. }
  650. break;
  651. case ErrorDetails.BUFFER_FULL_ERROR:
  652. // if in appending state
  653. if (
  654. data.parent === 'audio' &&
  655. (this.state === State.PARSING || this.state === State.PARSED)
  656. ) {
  657. let flushBuffer = true;
  658. const bufferedInfo = this.getFwdBufferInfo(
  659. this.mediaBuffer,
  660. PlaylistLevelType.AUDIO
  661. );
  662. // 0.5 : tolerance needed as some browsers stalls playback before reaching buffered end
  663. // reduce max buf len if current position is buffered
  664. if (bufferedInfo && bufferedInfo.len > 0.5) {
  665. flushBuffer = !this.reduceMaxBufferLength(bufferedInfo.len);
  666. }
  667. if (flushBuffer) {
  668. // current position is not buffered, but browser is still complaining about buffer full error
  669. // this happens on IE/Edge, refer to https://github.com/video-dev/hls.js/pull/708
  670. // in that case flush the whole audio buffer to recover
  671. this.warn(
  672. 'Buffer full error also media.currentTime is not buffered, flush audio buffer'
  673. );
  674. this.fragCurrent = null;
  675. super.flushMainBuffer(0, Number.POSITIVE_INFINITY, 'audio');
  676. }
  677. this.resetLoadingState();
  678. }
  679. break;
  680. default:
  681. break;
  682. }
  683. }
  684.  
  685. private onBufferFlushed(
  686. event: Events.BUFFER_FLUSHED,
  687. { type }: BufferFlushedData
  688. ) {
  689. if (type === ElementaryStreamTypes.AUDIO) {
  690. this.bufferFlushed = true;
  691. if (this.state === State.ENDED) {
  692. this.state = State.IDLE;
  693. }
  694. }
  695. }
  696.  
  697. private _handleTransmuxComplete(transmuxResult: TransmuxerResult) {
  698. const id = 'audio';
  699. const { hls } = this;
  700. const { remuxResult, chunkMeta } = transmuxResult;
  701.  
  702. const context = this.getCurrentContext(chunkMeta);
  703. if (!context) {
  704. this.warn(
  705. `The loading context changed while buffering fragment ${chunkMeta.sn} of level ${chunkMeta.level}. This chunk will not be buffered.`
  706. );
  707. this.resetStartWhenNotLoaded(chunkMeta.level);
  708. return;
  709. }
  710. const {
  711. frag,
  712. part,
  713. level: { details },
  714. } = context;
  715. const { audio, text, id3, initSegment } = remuxResult;
  716.  
  717. // Check if the current fragment has been aborted. We check this by first seeing if we're still playing the current level.
  718. // If we are, subsequently check if the currently loading fragment (fragCurrent) has changed.
  719. if (this.fragContextChanged(frag) || !details) {
  720. return;
  721. }
  722.  
  723. this.state = State.PARSING;
  724. if (this.audioSwitch && audio) {
  725. this.completeAudioSwitch();
  726. }
  727.  
  728. if (initSegment?.tracks) {
  729. this._bufferInitSegment(initSegment.tracks, frag, chunkMeta);
  730. hls.trigger(Events.FRAG_PARSING_INIT_SEGMENT, {
  731. frag,
  732. id,
  733. tracks: initSegment.tracks,
  734. });
  735. // Only flush audio from old audio tracks when PTS is known on new audio track
  736. }
  737. if (audio) {
  738. const { startPTS, endPTS, startDTS, endDTS } = audio;
  739. if (part) {
  740. part.elementaryStreams[ElementaryStreamTypes.AUDIO] = {
  741. startPTS,
  742. endPTS,
  743. startDTS,
  744. endDTS,
  745. };
  746. }
  747. frag.setElementaryStreamInfo(
  748. ElementaryStreamTypes.AUDIO,
  749. startPTS,
  750. endPTS,
  751. startDTS,
  752. endDTS
  753. );
  754. this.bufferFragmentData(audio, frag, part, chunkMeta);
  755. }
  756.  
  757. if (id3?.samples?.length) {
  758. const emittedID3: FragParsingMetadataData = Object.assign(
  759. {
  760. id,
  761. frag,
  762. details,
  763. },
  764. id3
  765. );
  766. hls.trigger(Events.FRAG_PARSING_METADATA, emittedID3);
  767. }
  768. if (text) {
  769. const emittedText: FragParsingUserdataData = Object.assign(
  770. {
  771. id,
  772. frag,
  773. details,
  774. },
  775. text
  776. );
  777. hls.trigger(Events.FRAG_PARSING_USERDATA, emittedText);
  778. }
  779. }
  780.  
  781. private _bufferInitSegment(
  782. tracks: TrackSet,
  783. frag: Fragment,
  784. chunkMeta: ChunkMetadata
  785. ) {
  786. if (this.state !== State.PARSING) {
  787. return;
  788. }
  789. // delete any video track found on audio transmuxer
  790. if (tracks.video) {
  791. delete tracks.video;
  792. }
  793.  
  794. // include levelCodec in audio and video tracks
  795. const track = tracks.audio;
  796. if (!track) {
  797. return;
  798. }
  799.  
  800. track.levelCodec = track.codec;
  801. track.id = 'audio';
  802. this.log(
  803. `Init audio buffer, container:${track.container}, codecs[parsed]=[${track.codec}]`
  804. );
  805. this.hls.trigger(Events.BUFFER_CODECS, tracks);
  806. const initSegment = track.initSegment;
  807. if (initSegment?.byteLength) {
  808. const segment: BufferAppendingData = {
  809. type: 'audio',
  810. frag,
  811. part: null,
  812. chunkMeta,
  813. parent: frag.type,
  814. data: initSegment,
  815. };
  816. this.hls.trigger(Events.BUFFER_APPENDING, segment);
  817. }
  818. // trigger handler right now
  819. this.tick();
  820. }
  821.  
  822. protected loadFragment(
  823. frag: Fragment,
  824. trackDetails: LevelDetails,
  825. targetBufferTime: number
  826. ) {
  827. // only load if fragment is not loaded or if in audio switch
  828. const fragState = this.fragmentTracker.getState(frag);
  829. this.fragCurrent = frag;
  830.  
  831. // we force a frag loading in audio switch as fragment tracker might not have evicted previous frags in case of quick audio switch
  832. if (
  833. this.audioSwitch ||
  834. fragState === FragmentState.NOT_LOADED ||
  835. fragState === FragmentState.PARTIAL
  836. ) {
  837. if (frag.sn === 'initSegment') {
  838. this._loadInitSegment(frag, trackDetails);
  839. } else if (trackDetails.live && !Number.isFinite(this.initPTS[frag.cc])) {
  840. this.log(
  841. `Waiting for video PTS in continuity counter ${frag.cc} of live stream before loading audio fragment ${frag.sn} of level ${this.trackId}`
  842. );
  843. this.state = State.WAITING_INIT_PTS;
  844. } else {
  845. this.startFragRequested = true;
  846. super.loadFragment(frag, trackDetails, targetBufferTime);
  847. }
  848. }
  849. }
  850.  
  851. private completeAudioSwitch() {
  852. const { hls, media, trackId } = this;
  853. if (media) {
  854. this.log('Switching audio track : flushing all audio');
  855. super.flushMainBuffer(0, Number.POSITIVE_INFINITY, 'audio');
  856. }
  857. this.audioSwitch = false;
  858. hls.trigger(Events.AUDIO_TRACK_SWITCHED, { id: trackId });
  859. }
  860. }
  861. export default AudioStreamController;