| 1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540
1541
1542
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554
1555
1556
1557
1558
1559
1560
1561
1562
1563
1564
1565
1566
1567
1568
1569
1570
1571
1572
1573
1574
1575
1576
1577
1578
1579
1580
1581 |
1x
5x
5x
5x
5x
5x
5x
| /*
* Stream Controller
*/
import BinarySearch from '../utils/binary-search';
import BufferHelper from '../helper/buffer-helper';
import Demuxer from '../demux/demuxer';
import Event from '../events';
import EventHandler from '../event-handler';
import * as LevelHelper from '../helper/level-helper';
import TimeRanges from '../utils/timeRanges';
import {ErrorTypes, ErrorDetails} from '../errors';
import {logger} from '../utils/logger';
import { alignDiscontinuities } from '../utils/discontinuities';
const State = {
STOPPED : 'STOPPED',
IDLE : 'IDLE',
KEY_LOADING : 'KEY_LOADING',
FRAG_LOADING : 'FRAG_LOADING',
FRAG_LOADING_WAITING_RETRY : 'FRAG_LOADING_WAITING_RETRY',
WAITING_LEVEL : 'WAITING_LEVEL',
PARSING : 'PARSING',
PARSED : 'PARSED',
BUFFER_FLUSHING : 'BUFFER_FLUSHING',
ENDED : 'ENDED',
ERROR : 'ERROR'
};
class StreamController extends EventHandler {
constructor(hls) {
super(hls,
Event.MEDIA_ATTACHED,
Event.MEDIA_DETACHING,
Event.MANIFEST_LOADING,
Event.MANIFEST_PARSED,
Event.LEVEL_LOADED,
Event.KEY_LOADED,
Event.FRAG_LOADED,
Event.FRAG_LOAD_EMERGENCY_ABORTED,
Event.FRAG_PARSING_INIT_SEGMENT,
Event.FRAG_PARSING_DATA,
Event.FRAG_PARSED,
Event.ERROR,
Event.AUDIO_TRACK_SWITCHING,
Event.AUDIO_TRACK_SWITCHED,
Event.BUFFER_CREATED,
Event.BUFFER_APPENDED,
Event.BUFFER_FLUSHED);
this.config = hls.config;
this.audioCodecSwap = false;
this.ticks = 0;
this._state = State.STOPPED;
this.ontick = this.tick.bind(this);
}
destroy() {
this.stopLoad();
if (this.timer) {
clearInterval(this.timer);
this.timer = null;
}
EventHandler.prototype.destroy.call(this);
this.state = State.STOPPED;
}
startLoad(startPosition) {
if (this.levels) {
let lastCurrentTime = this.lastCurrentTime, hls = this.hls;
this.stopLoad();
if (!this.timer) {
this.timer = setInterval(this.ontick, 100);
}
this.level = -1;
this.fragLoadError = 0;
if (!this.startFragRequested) {
// determine load level
let startLevel = hls.startLevel;
if (startLevel === -1) {
// -1 : guess start Level by doing a bitrate test by loading first fragment of lowest quality level
startLevel = 0;
this.bitrateTest = true;
}
// set new level to playlist loader : this will trigger start level load
// hls.nextLoadLevel remains until it is set to a new value or until a new frag is successfully loaded
this.level = hls.nextLoadLevel = startLevel;
this.loadedmetadata = false;
}
// if startPosition undefined but lastCurrentTime set, set startPosition to last currentTime
if (lastCurrentTime > 0 && startPosition === -1) {
logger.log(`override startPosition with lastCurrentTime @${lastCurrentTime.toFixed(3)}`);
startPosition = lastCurrentTime;
}
this.state = State.IDLE;
this.nextLoadPosition = this.startPosition = this.lastCurrentTime = startPosition;
this.tick();
} else {
this.forceStartLoad = true;
this.state = State.STOPPED;
}
}
stopLoad() {
var frag = this.fragCurrent;
if (frag) {
if (frag.loader) {
frag.loader.abort();
}
this.fragCurrent = null;
}
this.fragPrevious = null;
if (this.demuxer) {
this.demuxer.destroy();
this.demuxer = null;
}
this.state = State.STOPPED;
this.forceStartLoad = false;
}
tick() {
this.ticks++;
if (this.ticks === 1) {
this.doTick();
if (this.ticks > 1) {
setTimeout(this.tick, 1);
}
this.ticks = 0;
}
}
doTick() {
switch(this.state) {
case State.ERROR:
//don't do anything in error state to avoid breaking further ...
break;
case State.BUFFER_FLUSHING:
// in buffer flushing state, reset fragLoadError counter
this.fragLoadError = 0;
break;
case State.IDLE:
this._doTickIdle();
break;
case State.WAITING_LEVEL:
var level = this.levels[this.level];
// check if playlist is already loaded
if (level && level.details) {
this.state = State.IDLE;
}
break;
case State.FRAG_LOADING_WAITING_RETRY:
var now = performance.now();
var retryDate = this.retryDate;
// if current time is gt than retryDate, or if media seeking let's switch to IDLE state to retry loading
if(!retryDate || (now >= retryDate) || (this.media && this.media.seeking)) {
logger.log(`mediaController: retryDate reached, switch back to IDLE state`);
this.state = State.IDLE;
}
break;
case State.ERROR:
case State.STOPPED:
case State.FRAG_LOADING:
case State.PARSING:
case State.PARSED:
case State.ENDED:
break;
default:
break;
}
// check buffer
this._checkBuffer();
// check/update current fragment
this._checkFragmentChanged();
}
// Ironically the "idle" state is the on we do the most logic in it seems ....
// NOTE: Maybe we could rather schedule a check for buffer length after half of the currently
// played segment, or on pause/play/seek instead of naively checking every 100ms?
_doTickIdle() {
const hls = this.hls,
config = hls.config,
media = this.media;
// if start level not parsed yet OR
// if video not attached AND start fragment already requested OR start frag prefetch disable
// exit loop, as we either need more info (level not parsed) or we need media to be attached to load new fragment
if (this.levelLastLoaded === undefined || (
!media && (this.startFragRequested || !config.startFragPrefetch))) {
return;
}
// if we have not yet loaded any fragment, start loading from start position
let pos;
if (this.loadedmetadata) {
pos = media.currentTime;
} else {
pos = this.nextLoadPosition;
}
// determine next load level
let level = hls.nextLoadLevel,
levelInfo = this.levels[level];
if (!levelInfo) {
return;
}
let levelBitrate = levelInfo.bitrate,
maxBufLen;
// compute max Buffer Length that we could get from this load level, based on level bitrate. don't buffer more than 60 MB and more than 30s
if (levelBitrate) {
maxBufLen = Math.max(8 * config.maxBufferSize / levelBitrate, config.maxBufferLength);
} else {
maxBufLen = config.maxBufferLength;
}
maxBufLen = Math.min(maxBufLen, config.maxMaxBufferLength);
// determine next candidate fragment to be loaded, based on current position and end of buffer position
// ensure up to `config.maxMaxBufferLength` of buffer upfront
const bufferInfo = BufferHelper.bufferInfo(this.mediaBuffer ? this.mediaBuffer : media, pos, config.maxBufferHole),
bufferLen = bufferInfo.len;
// Stay idle if we are still with buffer margins
if (bufferLen >= maxBufLen) {
return;
}
// if buffer length is less than maxBufLen try to load a new fragment ...
logger.trace(`buffer length of ${bufferLen.toFixed(3)} is below max of ${maxBufLen.toFixed(3)}. checking for more payload ...`);
// set next load level : this will trigger a playlist load if needed
this.level = hls.nextLoadLevel = level;
const levelDetails = levelInfo.details;
// if level info not retrieved yet, switch state and wait for level retrieval
// if live playlist, ensure that new playlist has been refreshed to avoid loading/try to load
// a useless and outdated fragment (that might even introduce load error if it is already out of the live playlist)
if (levelDetails === undefined || levelDetails.live === true && this.levelLastLoaded !== level) {
this.state = State.WAITING_LEVEL;
return;
}
// we just got done loading the final fragment and there is no other buffered range after ...
// rationale is that in case there are any buffered ranges after, it means that there are unbuffered portion in between
// so we should not switch to ENDED in that case, to be able to buffer them
// dont switch to ENDED if we need to backtrack last fragment
let fragPrevious = this.fragPrevious;
if (!levelDetails.live && fragPrevious && !fragPrevious.backtracked && fragPrevious.sn === levelDetails.endSN && !bufferInfo.nextStart) {
// fragPrevious is last fragment. retrieve level duration using last frag start offset + duration
// real duration might be lower than initial duration if there are drifts between real frag duration and playlist signaling
const duration = Math.min(media.duration,fragPrevious.start + fragPrevious.duration);
// if everything (almost) til the end is buffered, let's signal eos
// we don't compare exactly media.duration === bufferInfo.end as there could be some subtle media duration difference (audio/video offsets...)
// tolerate up to one frag duration to cope with these cases.
// also cope with almost zero last frag duration (max last frag duration with 200ms) refer to https://github.com/video-dev/hls.js/pull/657
if (duration - Math.max(bufferInfo.end,fragPrevious.start) <= Math.max(0.2,fragPrevious.duration)) {
// Finalize the media stream
let data = {};
if (this.altAudio) {
data.type = 'video';
}
this.hls.trigger(Event.BUFFER_EOS,data);
this.state = State.ENDED;
return;
}
}
// if we have the levelDetails for the selected variant, lets continue enrichen our stream (load keys/fragments or trigger EOS, etc..)
this._fetchPayloadOrEos(pos, bufferInfo, levelDetails);
}
_fetchPayloadOrEos(pos, bufferInfo, levelDetails) {
const fragPrevious = this.fragPrevious,
level = this.level,
fragments = levelDetails.fragments,
fragLen = fragments.length;
// empty playlist
if (fragLen === 0) {
return;
}
// find fragment index, contiguous with end of buffer position
let start = fragments[0].start,
end = fragments[fragLen-1].start + fragments[fragLen-1].duration,
bufferEnd = bufferInfo.end,
frag;
if (levelDetails.initSegment && !levelDetails.initSegment.data) {
frag = levelDetails.initSegment;
} else {
// in case of live playlist we need to ensure that requested position is not located before playlist start
if (levelDetails.live) {
let initialLiveManifestSize = this.config.initialLiveManifestSize;
if(fragLen < initialLiveManifestSize){
logger.warn(`Can not start playback of a level, reason: not enough fragments ${fragLen} < ${initialLiveManifestSize}`);
return;
}
frag = this._ensureFragmentAtLivePoint(levelDetails, bufferEnd, start, end, fragPrevious, fragments, fragLen);
// if it explicitely returns null don't load any fragment and exit function now
if (frag === null) {
return;
}
} else {
// VoD playlist: if bufferEnd before start of playlist, load first fragment
if (bufferEnd < start) {
frag = fragments[0];
}
}
}
if (!frag) {
frag = this._findFragment(start, fragPrevious, fragLen, fragments, bufferEnd, end, levelDetails);
}
if(frag) {
this._loadFragmentOrKey(frag, level, levelDetails, pos, bufferEnd);
}
return;
}
_ensureFragmentAtLivePoint(levelDetails, bufferEnd, start, end, fragPrevious, fragments, fragLen) {
const config = this.hls.config, media = this.media;
let frag;
// check if requested position is within seekable boundaries :
//logger.log(`start/pos/bufEnd/seeking:${start.toFixed(3)}/${pos.toFixed(3)}/${bufferEnd.toFixed(3)}/${this.media.seeking}`);
let maxLatency = config.liveMaxLatencyDuration !== undefined ? config.liveMaxLatencyDuration : config.liveMaxLatencyDurationCount*levelDetails.targetduration;
if (bufferEnd < Math.max(start-config.maxFragLookUpTolerance, end - maxLatency)) {
let liveSyncPosition = this.liveSyncPosition = this.computeLivePosition(start, levelDetails);
logger.log(`buffer end: ${bufferEnd.toFixed(3)} is located too far from the end of live sliding playlist, reset currentTime to : ${liveSyncPosition.toFixed(3)}`);
bufferEnd = liveSyncPosition;
if (media && media.readyState && media.duration > liveSyncPosition) {
media.currentTime = liveSyncPosition;
}
this.nextLoadPosition = liveSyncPosition;
}
// if end of buffer greater than live edge, don't load any fragment
// this could happen if live playlist intermittently slides in the past.
// level 1 loaded [182580161,182580167]
// level 1 loaded [182580162,182580169]
// Loading 182580168 of [182580162 ,182580169],level 1 ..
// Loading 182580169 of [182580162 ,182580169],level 1 ..
// level 1 loaded [182580162,182580168] <============= here we should have bufferEnd > end. in that case break to avoid reloading 182580168
// level 1 loaded [182580164,182580171]
//
// don't return null in case media not loaded yet (readystate === 0)
if (levelDetails.PTSKnown && bufferEnd > end && media && media.readyState) {
return null;
}
if (this.startFragRequested && !levelDetails.PTSKnown) {
/* we are switching level on live playlist, but we don't have any PTS info for that quality level ...
try to load frag matching with next SN.
even if SN are not synchronized between playlists, loading this frag will help us
compute playlist sliding and find the right one after in case it was not the right consecutive one */
if (fragPrevious) {
const targetSN = fragPrevious.sn + 1;
if (targetSN >= levelDetails.startSN && targetSN <= levelDetails.endSN) {
const fragNext = fragments[targetSN - levelDetails.startSN];
if (fragPrevious.cc === fragNext.cc) {
frag = fragNext;
logger.log(`live playlist, switching playlist, load frag with next SN: ${frag.sn}`);
}
}
// next frag SN not available (or not with same continuity counter)
// look for a frag sharing the same CC
if (!frag) {
frag = BinarySearch.search(fragments, function(frag) {
return fragPrevious.cc - frag.cc;
});
if (frag) {
logger.log(`live playlist, switching playlist, load frag with same CC: ${frag.sn}`);
}
}
}
if (!frag) {
/* we have no idea about which fragment should be loaded.
so let's load mid fragment. it will help computing playlist sliding and find the right one
*/
frag = fragments[Math.min(fragLen - 1, Math.round(fragLen / 2))];
logger.log(`live playlist, switching playlist, unknown, load middle frag : ${frag.sn}`);
}
}
return frag;
}
_findFragment(start, fragPrevious, fragLen, fragments, bufferEnd, end, levelDetails) {
const config = this.hls.config;
let frag;
let foundFrag;
let maxFragLookUpTolerance = config.maxFragLookUpTolerance;
const fragNext = fragPrevious ? fragments[fragPrevious.sn - fragments[0].sn + 1] : undefined;
let fragmentWithinToleranceTest = (candidate) => {
// offset should be within fragment boundary - config.maxFragLookUpTolerance
// this is to cope with situations like
// bufferEnd = 9.991
// frag[Ø] : [0,10]
// frag[1] : [10,20]
// bufferEnd is within frag[0] range ... although what we are expecting is to return frag[1] here
// frag start frag start+duration
// |-----------------------------|
// <---> <--->
// ...--------><-----------------------------><---------....
// previous frag matching fragment next frag
// return -1 return 0 return 1
//logger.log(`level/sn/start/end/bufEnd:${level}/${candidate.sn}/${candidate.start}/${(candidate.start+candidate.duration)}/${bufferEnd}`);
// Set the lookup tolerance to be small enough to detect the current segment - ensures we don't skip over very small segments
let candidateLookupTolerance = Math.min(maxFragLookUpTolerance, candidate.duration + (candidate.deltaPTS ? candidate.deltaPTS : 0));
if (candidate.start + candidate.duration - candidateLookupTolerance <= bufferEnd) {
return 1;
} // if maxFragLookUpTolerance will have negative value then don't return -1 for first element
else if (candidate.start - candidateLookupTolerance > bufferEnd && candidate.start) {
return -1;
}
return 0;
};
if (bufferEnd < end) {
if (bufferEnd > end - maxFragLookUpTolerance) {
maxFragLookUpTolerance = 0;
}
// Prefer the next fragment if it's within tolerance
if (fragNext && !fragmentWithinToleranceTest(fragNext)) {
foundFrag = fragNext;
} else {
foundFrag = BinarySearch.search(fragments, fragmentWithinToleranceTest);
}
} else {
// reach end of playlist
foundFrag = fragments[fragLen-1];
}
if (foundFrag) {
frag = foundFrag;
const curSNIdx = frag.sn - levelDetails.startSN;
const sameLevel = fragPrevious && frag.level === fragPrevious.level;
const prevFrag = fragments[curSNIdx - 1];
const nextFrag = fragments[curSNIdx + 1];
//logger.log('find SN matching with pos:' + bufferEnd + ':' + frag.sn);
if (fragPrevious && frag.sn === fragPrevious.sn) {
if (sameLevel && !frag.backtracked) {
if (frag.sn < levelDetails.endSN) {
let deltaPTS = fragPrevious.deltaPTS;
// if there is a significant delta between audio and video, larger than max allowed hole,
// and if previous remuxed fragment did not start with a keyframe. (fragPrevious.dropped)
// let's try to load previous fragment again to get last keyframe
// then we will reload again current fragment (that way we should be able to fill the buffer hole ...)
if (deltaPTS && deltaPTS > config.maxBufferHole && fragPrevious.dropped && curSNIdx) {
frag = prevFrag;
logger.warn(`SN just loaded, with large PTS gap between audio and video, maybe frag is not starting with a keyframe ? load previous one to try to overcome this`);
// decrement previous frag load counter to avoid frag loop loading error when next fragment will get reloaded
fragPrevious.loadCounter--;
} else {
frag = nextFrag;
logger.log(`SN just loaded, load next one: ${frag.sn}`);
}
} else {
frag = null;
}
} else if (frag.backtracked) {
// Only backtrack a max of 1 consecutive fragment to prevent sliding back too far when little or no frags start with keyframes
if (nextFrag && nextFrag.backtracked) {
logger.warn(`Already backtracked from fragment ${nextFrag.sn}, will not backtrack to fragment ${frag.sn}. Loading fragment ${nextFrag.sn}`);
frag = nextFrag;
} else {
// If a fragment has dropped frames and it's in a same level/sequence, load the previous fragment to try and find the keyframe
// Reset the dropped count now since it won't be reset until we parse the fragment again, which prevents infinite backtracking on the same segment
logger.warn('Loaded fragment with dropped frames, backtracking 1 segment to find a keyframe');
frag.dropped = 0;
if (prevFrag) {
if (prevFrag.loadCounter) {
prevFrag.loadCounter--;
}
frag = prevFrag;
frag.backtracked = true;
} else if (curSNIdx) {
// can't backtrack on very first fragment
frag = null;
}
}
}
}
}
return frag;
}
_loadFragmentOrKey(frag, level, levelDetails, pos, bufferEnd) {
const hls = this.hls,
config = hls.config;
//logger.log('loading frag ' + i +',pos/bufEnd:' + pos.toFixed(3) + '/' + bufferEnd.toFixed(3));
if ((frag.decryptdata && frag.decryptdata.uri != null) && (frag.decryptdata.key == null)) {
logger.log(`Loading key for ${frag.sn} of [${levelDetails.startSN} ,${levelDetails.endSN}],level ${level}`);
this.state = State.KEY_LOADING;
hls.trigger(Event.KEY_LOADING, {frag: frag});
} else {
logger.log(`Loading ${frag.sn} of [${levelDetails.startSN} ,${levelDetails.endSN}],level ${level}, currentTime:${pos.toFixed(3)},bufferEnd:${bufferEnd.toFixed(3)}`);
// ensure that we are not reloading the same fragments in loop ...
if (this.fragLoadIdx !== undefined) {
this.fragLoadIdx++;
} else {
this.fragLoadIdx = 0;
}
if (frag.loadCounter) {
frag.loadCounter++;
let maxThreshold = config.fragLoadingLoopThreshold;
// if this frag has already been loaded 3 times, and if it has been reloaded recently
if (frag.loadCounter > maxThreshold && (Math.abs(this.fragLoadIdx - frag.loadIdx) < maxThreshold)) {
hls.trigger(Event.ERROR, {type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.FRAG_LOOP_LOADING_ERROR, fatal: false, frag: frag});
return;
}
} else {
frag.loadCounter = 1;
}
frag.loadIdx = this.fragLoadIdx;
frag.autoLevel = hls.autoLevelEnabled;
frag.bitrateTest = this.bitrateTest;
this.fragCurrent = frag;
this.startFragRequested = true;
// Don't update nextLoadPosition for fragments which are not buffered
if (!isNaN(frag.sn) && !frag.bitrateTest) {
this.nextLoadPosition = frag.start + frag.duration;
}
hls.trigger(Event.FRAG_LOADING, {frag: frag});
// lazy demuxer init, as this could take some time ... do it during frag loading
if (!this.demuxer) {
this.demuxer = new Demuxer(hls,'main');
}
this.state = State.FRAG_LOADING;
return;
}
}
set state(nextState) {
if (this.state !== nextState) {
const previousState = this.state;
this._state = nextState;
logger.log(`main stream:${previousState}->${nextState}`);
this.hls.trigger(Event.STREAM_STATE_TRANSITION, {previousState, nextState});
}
}
get state() {
return this._state;
}
getBufferedFrag(position) {
return BinarySearch.search(this._bufferedFrags, function(frag) {
if (position < frag.startPTS) {
return -1;
} else if (position > frag.endPTS) {
return 1;
}
return 0;
});
}
get currentLevel() {
let media = this.media;
if (media) {
const frag = this.getBufferedFrag(media.currentTime);
if (frag) {
return frag.level;
}
}
return -1;
}
get nextBufferedFrag() {
let media = this.media;
if (media) {
// first get end range of current fragment
return this.followingBufferedFrag(this.getBufferedFrag(media.currentTime));
} else {
return null;
}
}
followingBufferedFrag(frag) {
if (frag) {
// try to get range of next fragment (500ms after this range)
return this.getBufferedFrag(frag.endPTS + 0.5);
}
return null;
}
get nextLevel() {
const frag = this.nextBufferedFrag;
if (frag) {
return frag.level;
} else {
return -1;
}
}
_checkFragmentChanged() {
var fragPlayingCurrent, currentTime, video = this.media;
if (video && video.readyState && video.seeking === false) {
currentTime = video.currentTime;
/* if video element is in seeked state, currentTime can only increase.
(assuming that playback rate is positive ...)
As sometimes currentTime jumps back to zero after a
media decode error, check this, to avoid seeking back to
wrong position after a media decode error
*/
if(currentTime > video.playbackRate*this.lastCurrentTime) {
this.lastCurrentTime = currentTime;
}
if (BufferHelper.isBuffered(video,currentTime)) {
fragPlayingCurrent = this.getBufferedFrag(currentTime);
} else if (BufferHelper.isBuffered(video,currentTime + 0.1)) {
/* ensure that FRAG_CHANGED event is triggered at startup,
when first video frame is displayed and playback is paused.
add a tolerance of 100ms, in case current position is not buffered,
check if current pos+100ms is buffered and use that buffer range
for FRAG_CHANGED event reporting */
fragPlayingCurrent = this.getBufferedFrag(currentTime + 0.1);
}
if (fragPlayingCurrent) {
var fragPlaying = fragPlayingCurrent;
if (fragPlaying !== this.fragPlaying) {
this.hls.trigger(Event.FRAG_CHANGED, {frag: fragPlaying});
const fragPlayingLevel = fragPlaying.level;
if (!this.fragPlaying || this.fragPlaying.level !== fragPlayingLevel) {
this.hls.trigger(Event.LEVEL_SWITCHED, {level: fragPlayingLevel});
}
this.fragPlaying = fragPlaying;
}
}
}
}
/*
on immediate level switch :
- pause playback if playing
- cancel any pending load request
- and trigger a buffer flush
*/
immediateLevelSwitch() {
logger.log('immediateLevelSwitch');
if (!this.immediateSwitch) {
this.immediateSwitch = true;
let media = this.media, previouslyPaused;
if (media) {
previouslyPaused = media.paused;
media.pause();
} else {
// don't restart playback after instant level switch in case media not attached
previouslyPaused = true;
}
this.previouslyPaused = previouslyPaused;
}
var fragCurrent = this.fragCurrent;
if (fragCurrent && fragCurrent.loader) {
fragCurrent.loader.abort();
}
this.fragCurrent = null;
// increase fragment load Index to avoid frag loop loading error after buffer flush
if (this.fragLoadIdx !== undefined) {
this.fragLoadIdx += 2 * this.config.fragLoadingLoopThreshold;
}
// flush everything
this.flushMainBuffer(0,Number.POSITIVE_INFINITY);
}
/*
on immediate level switch end, after new fragment has been buffered :
- nudge video decoder by slightly adjusting video currentTime (if currentTime buffered)
- resume the playback if needed
*/
immediateLevelSwitchEnd() {
let media = this.media;
if (media && media.buffered.length) {
this.immediateSwitch = false;
if(BufferHelper.isBuffered(media,media.currentTime)) {
// only nudge if currentTime is buffered
media.currentTime -= 0.0001;
}
if (!this.previouslyPaused) {
media.play();
}
}
}
nextLevelSwitch() {
/* try to switch ASAP without breaking video playback :
in order to ensure smooth but quick level switching,
we need to find the next flushable buffer range
we should take into account new segment fetch time
*/
let media = this.media;
// ensure that media is defined and that metadata are available (to retrieve currentTime)
if (media && media.readyState) {
let fetchdelay, fragPlayingCurrent, nextBufferedFrag;
if (this.fragLoadIdx !== undefined) {
// increase fragment load Index to avoid frag loop loading error after buffer flush
this.fragLoadIdx += 2 * this.config.fragLoadingLoopThreshold;
}
fragPlayingCurrent = this.getBufferedFrag(media.currentTime);
if (fragPlayingCurrent && fragPlayingCurrent.startPTS > 1) {
// flush buffer preceding current fragment (flush until current fragment start offset)
// minus 1s to avoid video freezing, that could happen if we flush keyframe of current video ...
this.flushMainBuffer(0,fragPlayingCurrent.startPTS - 1);
}
if (!media.paused) {
// add a safety delay of 1s
var nextLevelId = this.hls.nextLoadLevel,nextLevel = this.levels[nextLevelId], fragLastKbps = this.fragLastKbps;
if (fragLastKbps && this.fragCurrent) {
fetchdelay = this.fragCurrent.duration * nextLevel.bitrate / (1000 * fragLastKbps) + 1;
} else {
fetchdelay = 0;
}
} else {
fetchdelay = 0;
}
//logger.log('fetchdelay:'+fetchdelay);
// find buffer range that will be reached once new fragment will be fetched
nextBufferedFrag = this.getBufferedFrag(media.currentTime + fetchdelay);
if (nextBufferedFrag) {
// we can flush buffer range following this one without stalling playback
nextBufferedFrag = this.followingBufferedFrag(nextBufferedFrag);
if (nextBufferedFrag) {
// if we are here, we can also cancel any loading/demuxing in progress, as they are useless
var fragCurrent = this.fragCurrent;
if (fragCurrent && fragCurrent.loader) {
fragCurrent.loader.abort();
}
this.fragCurrent = null;
// start flush position is the start PTS of next buffered frag.
// we use frag.naxStartPTS which is max(audio startPTS, video startPTS).
// in case there is a small PTS Delta between audio and video, using maxStartPTS avoids flushing last samples from current fragment
this.flushMainBuffer(nextBufferedFrag.maxStartPTS , Number.POSITIVE_INFINITY);
}
}
}
}
flushMainBuffer(startOffset,endOffset) {
this.state = State.BUFFER_FLUSHING;
let flushScope = {startOffset: startOffset, endOffset: endOffset};
// if alternate audio tracks are used, only flush video, otherwise flush everything
if (this.altAudio) {
flushScope.type = 'video';
}
this.hls.trigger(Event.BUFFER_FLUSHING, flushScope);
}
onMediaAttached(data) {
var media = this.media = this.mediaBuffer = data.media;
this.onvseeking = this.onMediaSeeking.bind(this);
this.onvseeked = this.onMediaSeeked.bind(this);
this.onvended = this.onMediaEnded.bind(this);
media.addEventListener('seeking', this.onvseeking);
media.addEventListener('seeked', this.onvseeked);
media.addEventListener('ended', this.onvended);
let config = this.config;
if(this.levels && config.autoStartLoad) {
this.hls.startLoad(config.startPosition);
}
}
onMediaDetaching() {
var media = this.media;
if (media && media.ended) {
logger.log('MSE detaching and video ended, reset startPosition');
this.startPosition = this.lastCurrentTime = 0;
}
// reset fragment loading counter on MSE detaching to avoid reporting FRAG_LOOP_LOADING_ERROR after error recovery
var levels = this.levels;
if (levels) {
// reset fragment load counter
levels.forEach(level => {
if(level.details) {
level.details.fragments.forEach(fragment => {
fragment.loadCounter = undefined;
fragment.backtracked = undefined;
});
}
});
}
// remove video listeners
if (media) {
media.removeEventListener('seeking', this.onvseeking);
media.removeEventListener('seeked', this.onvseeked);
media.removeEventListener('ended', this.onvended);
this.onvseeking = this.onvseeked = this.onvended = null;
}
this.media = this.mediaBuffer = null;
this.loadedmetadata = false;
this.stopLoad();
}
onMediaSeeking() {
let media = this.media, currentTime = media ? media.currentTime : undefined, config = this.config;
if (!isNaN(currentTime)) {
logger.log(`media seeking to ${currentTime.toFixed(3)}`);
}
let mediaBuffer = this.mediaBuffer ? this.mediaBuffer : media;
let bufferInfo = BufferHelper.bufferInfo(mediaBuffer,currentTime,this.config.maxBufferHole);
if (this.state === State.FRAG_LOADING) {
let fragCurrent = this.fragCurrent;
// check if we are seeking to a unbuffered area AND if frag loading is in progress
if (bufferInfo.len === 0 && fragCurrent) {
let tolerance = config.maxFragLookUpTolerance,
fragStartOffset = fragCurrent.start - tolerance,
fragEndOffset = fragCurrent.start + fragCurrent.duration + tolerance;
// check if we seek position will be out of currently loaded frag range : if out cancel frag load, if in, don't do anything
if (currentTime < fragStartOffset || currentTime > fragEndOffset) {
if (fragCurrent.loader) {
logger.log('seeking outside of buffer while fragment load in progress, cancel fragment load');
fragCurrent.loader.abort();
}
this.fragCurrent = null;
this.fragPrevious = null;
// switch to IDLE state to load new fragment
this.state = State.IDLE;
} else {
logger.log('seeking outside of buffer but within currently loaded fragment range');
}
}
} else if (this.state === State.ENDED) {
// if seeking to unbuffered area, clean up fragPrevious
if (bufferInfo.len === 0) {
this.fragPrevious = 0;
}
// switch to IDLE state to check for potential new fragment
this.state = State.IDLE;
}
if (media) {
this.lastCurrentTime = currentTime;
}
// avoid reporting fragment loop loading error in case user is seeking several times on same position
if (this.state !== State.FRAG_LOADING && this.fragLoadIdx !== undefined) {
this.fragLoadIdx += 2 * config.fragLoadingLoopThreshold;
}
// in case seeking occurs although no media buffered, adjust startPosition and nextLoadPosition to seek target
if(!this.loadedmetadata) {
this.nextLoadPosition = this.startPosition = currentTime;
}
// tick to speed up processing
this.tick();
}
onMediaSeeked() {
const media = this.media, currentTime = media ? media.currentTime : undefined;
if (!isNaN(currentTime)) {
logger.log(`media seeked to ${currentTime.toFixed(3)}`);
}
// tick to speed up FRAGMENT_PLAYING triggering
this.tick();
}
onMediaEnded() {
logger.log('media ended');
// reset startPosition and lastCurrentTime to restart playback @ stream beginning
this.startPosition = this.lastCurrentTime = 0;
}
onManifestLoading() {
// reset buffer on manifest loading
logger.log('trigger BUFFER_RESET');
this.hls.trigger(Event.BUFFER_RESET);
this._bufferedFrags = [];
this.stalled = false;
this.startPosition = this.lastCurrentTime = 0;
}
onManifestParsed(data) {
var aac = false, heaac = false, codec;
data.levels.forEach(level => {
// detect if we have different kind of audio codecs used amongst playlists
codec = level.audioCodec;
if (codec) {
if (codec.indexOf('mp4a.40.2') !== -1) {
aac = true;
}
if (codec.indexOf('mp4a.40.5') !== -1) {
heaac = true;
}
}
});
this.audioCodecSwitch = (aac && heaac);
if (this.audioCodecSwitch) {
logger.log('both AAC/HE-AAC audio found in levels; declaring level codec as HE-AAC');
}
this.levels = data.levels;
this.startFragRequested = false;
let config = this.config;
if (config.autoStartLoad || this.forceStartLoad) {
this.hls.startLoad(config.startPosition);
}
}
onLevelLoaded(data) {
const newDetails = data.details;
const newLevelId = data.level;
const lastLevel = this.levels[this.levelLastLoaded];
const curLevel = this.levels[newLevelId];
const duration = newDetails.totalduration;
let sliding = 0;
logger.log(`level ${newLevelId} loaded [${newDetails.startSN},${newDetails.endSN}],duration:${duration}`);
if (newDetails.live) {
var curDetails = curLevel.details;
if (curDetails && newDetails.fragments.length > 0) {
// we already have details for that level, merge them
LevelHelper.mergeDetails(curDetails,newDetails);
sliding = newDetails.fragments[0].start;
this.liveSyncPosition = this.computeLivePosition(sliding, curDetails);
if (newDetails.PTSKnown && !isNaN(sliding)) {
logger.log(`live playlist sliding:${sliding.toFixed(3)}`);
} else {
logger.log('live playlist - outdated PTS, unknown sliding');
alignDiscontinuities(this.fragPrevious, lastLevel, newDetails);
}
} else {
logger.log('live playlist - first load, unknown sliding');
newDetails.PTSKnown = false;
alignDiscontinuities(this.fragPrevious, lastLevel, newDetails);
}
} else {
newDetails.PTSKnown = false;
}
// override level info
curLevel.details = newDetails;
this.levelLastLoaded = newLevelId;
this.hls.trigger(Event.LEVEL_UPDATED, { details: newDetails, level: newLevelId });
if (this.startFragRequested === false) {
// compute start position if set to -1. use it straight away if value is defined
if (this.startPosition === -1 || this.lastCurrentTime === -1) {
// first, check if start time offset has been set in playlist, if yes, use this value
let startTimeOffset = newDetails.startTimeOffset;
if(!isNaN(startTimeOffset)) {
if (startTimeOffset < 0) {
logger.log(`negative start time offset ${startTimeOffset}, count from end of last fragment`);
startTimeOffset = sliding + duration + startTimeOffset;
}
logger.log(`start time offset found in playlist, adjust startPosition to ${startTimeOffset}`);
this.startPosition = startTimeOffset;
} else {
// if live playlist, set start position to be fragment N-this.config.liveSyncDurationCount (usually 3)
if (newDetails.live) {
this.startPosition = this.computeLivePosition(sliding, newDetails);
logger.log(`configure startPosition to ${this.startPosition}`);
} else {
this.startPosition = 0;
}
}
this.lastCurrentTime = this.startPosition;
}
this.nextLoadPosition = this.startPosition;
}
// only switch batck to IDLE state if we were waiting for level to start downloading a new fragment
if (this.state === State.WAITING_LEVEL) {
this.state = State.IDLE;
}
//trigger handler right now
this.tick();
}
onKeyLoaded() {
if (this.state === State.KEY_LOADING) {
this.state = State.IDLE;
this.tick();
}
}
onFragLoaded(data) {
var fragCurrent = this.fragCurrent,
fragLoaded = data.frag;
if (this.state === State.FRAG_LOADING &&
fragCurrent &&
fragLoaded.type === 'main' &&
fragLoaded.level === fragCurrent.level &&
fragLoaded.sn === fragCurrent.sn) {
let stats = data.stats,
currentLevel = this.levels[fragCurrent.level],
details = currentLevel.details;
logger.log(`Loaded ${fragCurrent.sn} of [${details.startSN} ,${details.endSN}],level ${fragCurrent.level}`);
// reset frag bitrate test in any case after frag loaded event
this.bitrateTest = false;
this.stats = stats;
// if this frag was loaded to perform a bitrate test AND if hls.nextLoadLevel is greater than 0
// then this means that we should be able to load a fragment at a higher quality level
if (fragLoaded.bitrateTest === true && this.hls.nextLoadLevel) {
// switch back to IDLE state ... we just loaded a fragment to determine adequate start bitrate and initialize autoswitch algo
this.state = State.IDLE;
this.startFragRequested = false;
stats.tparsed = stats.tbuffered = performance.now();
this.hls.trigger(Event.FRAG_BUFFERED, {stats: stats, frag: fragCurrent, id : 'main'});
this.tick();
} else if (fragLoaded.sn === 'initSegment') {
this.state = State.IDLE;
stats.tparsed = stats.tbuffered = performance.now();
details.initSegment.data = data.payload;
this.hls.trigger(Event.FRAG_BUFFERED, {stats: stats, frag: fragCurrent, id : 'main'});
this.tick();
} else {
this.state = State.PARSING;
// transmux the MPEG-TS data to ISO-BMFF segments
let duration = details.totalduration,
level = fragCurrent.level,
sn = fragCurrent.sn,
audioCodec = this.config.defaultAudioCodec || currentLevel.audioCodec;
if(this.audioCodecSwap) {
logger.log('swapping playlist audio codec');
if(audioCodec === undefined) {
audioCodec = this.lastAudioCodec;
}
if(audioCodec) {
if(audioCodec.indexOf('mp4a.40.5') !==-1) {
audioCodec = 'mp4a.40.2';
} else {
audioCodec = 'mp4a.40.5';
}
}
}
this.pendingBuffering = true;
this.appended = false;
logger.log(`Parsing ${sn} of [${details.startSN} ,${details.endSN}],level ${level}, cc ${fragCurrent.cc}`);
let demuxer = this.demuxer;
if (!demuxer) {
demuxer = this.demuxer = new Demuxer(this.hls,'main');
}
// time Offset is accurate if level PTS is known, or if playlist is not sliding (not live) and if media is not seeking (this is to overcome potential timestamp drifts between playlists and fragments)
let media = this.media;
let mediaSeeking = media && media.seeking;
let accurateTimeOffset = !mediaSeeking && (details.PTSKnown || !details.live);
let initSegmentData = details.initSegment ? details.initSegment.data : [];
demuxer.push(data.payload, initSegmentData,audioCodec, currentLevel.videoCodec, fragCurrent, duration, accurateTimeOffset,undefined);
}
}
this.fragLoadError = 0;
}
onFragParsingInitSegment(data) {
const fragCurrent = this.fragCurrent;
const fragNew = data.frag;
if (fragCurrent &&
data.id === 'main' &&
fragNew.sn === fragCurrent.sn &&
fragNew.level === fragCurrent.level &&
this.state === State.PARSING) {
var tracks = data.tracks, trackName, track;
// if audio track is expected to come from audio stream controller, discard any coming from main
if (tracks.audio && this.altAudio) {
delete tracks.audio;
}
// include levelCodec in audio and video tracks
track = tracks.audio;
if(track) {
var audioCodec = this.levels[this.level].audioCodec,
ua = navigator.userAgent.toLowerCase();
if(audioCodec && this.audioCodecSwap) {
logger.log('swapping playlist audio codec');
if(audioCodec.indexOf('mp4a.40.5') !==-1) {
audioCodec = 'mp4a.40.2';
} else {
audioCodec = 'mp4a.40.5';
}
}
// in case AAC and HE-AAC audio codecs are signalled in manifest
// force HE-AAC , as it seems that most browsers prefers that way,
// except for mono streams OR on FF
// these conditions might need to be reviewed ...
if (this.audioCodecSwitch) {
// don't force HE-AAC if mono stream
if(track.metadata.channelCount !== 1 &&
// don't force HE-AAC if firefox
ua.indexOf('firefox') === -1) {
audioCodec = 'mp4a.40.5';
}
}
// HE-AAC is broken on Android, always signal audio codec as AAC even if variant manifest states otherwise
if(ua.indexOf('android') !== -1 && track.container !== 'audio/mpeg') { // Exclude mpeg audio
audioCodec = 'mp4a.40.2';
logger.log(`Android: force audio codec to ${audioCodec}`);
}
track.levelCodec = audioCodec;
track.id = data.id;
}
track = tracks.video;
if(track) {
track.levelCodec = this.levels[this.level].videoCodec;
track.id = data.id;
}
this.hls.trigger(Event.BUFFER_CODECS,tracks);
// loop through tracks that are going to be provided to bufferController
for (trackName in tracks) {
track = tracks[trackName];
logger.log(`main track:${trackName},container:${track.container},codecs[level/parsed]=[${track.levelCodec}/${track.codec}]`);
var initSegment = track.initSegment;
if (initSegment) {
this.appended = true;
// arm pending Buffering flag before appending a segment
this.pendingBuffering = true;
this.hls.trigger(Event.BUFFER_APPENDING, {type: trackName, data: initSegment, parent : 'main', content : 'initSegment'});
}
}
//trigger handler right now
this.tick();
}
}
onFragParsingData(data) {
const fragCurrent = this.fragCurrent;
const fragNew = data.frag;
if (fragCurrent &&
data.id === 'main' &&
fragNew.sn === fragCurrent.sn &&
fragNew.level === fragCurrent.level &&
!(data.type === 'audio' && this.altAudio) && // filter out main audio if audio track is loaded through audio stream controller
this.state === State.PARSING) {
var level = this.levels[this.level],
frag = fragCurrent;
if (isNaN(data.endPTS)) {
data.endPTS = data.startPTS + fragCurrent.duration;
data.endDTS = data.startDTS + fragCurrent.duration;
}
logger.log(`Parsed ${data.type},PTS:[${data.startPTS.toFixed(3)},${data.endPTS.toFixed(3)}],DTS:[${data.startDTS.toFixed(3)}/${data.endDTS.toFixed(3)}],nb:${data.nb},dropped:${data.dropped || 0}`);
// Detect gaps in a fragment and try to fix it by finding a keyframe in the previous fragment (see _findFragments)
if(data.type === 'video') {
frag.dropped = data.dropped;
if (frag.dropped) {
if (!frag.backtracked) {
const levelDetails = level.details;
if (levelDetails && frag.sn === levelDetails.startSN) {
logger.warn('missing video frame(s) on first frag, appending with gap');
} else {
logger.warn('missing video frame(s), backtracking fragment');
// Return back to the IDLE state without appending to buffer
// Causes findFragments to backtrack a segment and find the keyframe
// Audio fragments arriving before video sets the nextLoadPosition, causing _findFragments to skip the backtracked fragment
frag.backtracked = true;
this.nextLoadPosition = data.startPTS;
this.state = State.IDLE;
this.fragPrevious = frag;
this.tick();
return;
}
} else {
logger.warn('Already backtracked on this fragment, appending with the gap');
}
} else {
// Only reset the backtracked flag if we've loaded the frag without any dropped frames
frag.backtracked = false;
}
}
var drift = LevelHelper.updateFragPTSDTS(level.details,frag,data.startPTS,data.endPTS,data.startDTS,data.endDTS),
hls = this.hls;
hls.trigger(Event.LEVEL_PTS_UPDATED, {details: level.details, level: this.level, drift: drift, type: data.type, start: data.startPTS, end: data.endPTS});
// has remuxer dropped video frames located before first keyframe ?
[data.data1, data.data2].forEach(buffer => {
// only append in PARSING state (rationale is that an appending error could happen synchronously on first segment appending)
// in that case it is useless to append following segments
if (buffer && buffer.length && this.state === State.PARSING) {
this.appended = true;
// arm pending Buffering flag before appending a segment
this.pendingBuffering = true;
hls.trigger(Event.BUFFER_APPENDING, {type: data.type, data: buffer, parent : 'main',content : 'data'});
}
});
//trigger handler right now
this.tick();
}
}
onFragParsed(data) {
const fragCurrent = this.fragCurrent;
const fragNew = data.frag;
if (fragCurrent &&
data.id === 'main' &&
fragNew.sn === fragCurrent.sn &&
fragNew.level === fragCurrent.level &&
this.state === State.PARSING) {
this.stats.tparsed = performance.now();
this.state = State.PARSED;
this._checkAppendedParsed();
}
}
onAudioTrackSwitching(data) {
// if any URL found on new audio track, it is an alternate audio track
var altAudio = !!data.url,
trackId = data.id;
// if we switch on main audio, ensure that main fragment scheduling is synced with media.buffered
// don't do anything if we switch to alt audio: audio stream controller is handling it.
// we will just have to change buffer scheduling on audioTrackSwitched
if (!altAudio) {
if (this.mediaBuffer !== this.media) {
logger.log(`switching on main audio, use media.buffered to schedule main fragment loading`);
this.mediaBuffer = this.media;
let fragCurrent = this.fragCurrent;
// we need to refill audio buffer from main: cancel any frag loading to speed up audio switch
if (fragCurrent.loader) {
logger.log('switching to main audio track, cancel main fragment load');
fragCurrent.loader.abort();
}
this.fragCurrent = null;
this.fragPrevious = null;
// destroy demuxer to force init segment generation (following audio switch)
if (this.demuxer) {
this.demuxer.destroy();
this.demuxer = null;
}
// switch to IDLE state to load new fragment
this.state = State.IDLE;
}
let hls = this.hls;
// switching to main audio, flush all audio and trigger track switched
hls.trigger(Event.BUFFER_FLUSHING, {startOffset: 0 , endOffset: Number.POSITIVE_INFINITY, type : 'audio'});
hls.trigger(Event.AUDIO_TRACK_SWITCHED, {id : trackId});
this.altAudio = false;
}
}
onAudioTrackSwitched(data) {
var trackId = data.id,
altAudio = !!this.hls.audioTracks[trackId].url;
if (altAudio) {
let videoBuffer = this.videoBuffer;
// if we switched on alternate audio, ensure that main fragment scheduling is synced with video sourcebuffer buffered
if (videoBuffer && this.mediaBuffer !== videoBuffer) {
logger.log(`switching on alternate audio, use video.buffered to schedule main fragment loading`);
this.mediaBuffer = videoBuffer;
}
}
this.altAudio = altAudio;
this.tick();
}
onBufferCreated(data) {
let tracks = data.tracks, mediaTrack, name, alternate = false;
for(var type in tracks) {
let track = tracks[type];
if (track.id === 'main') {
name = type;
mediaTrack = track;
// keep video source buffer reference
if (type === 'video') {
this.videoBuffer = tracks[type].buffer;
}
} else {
alternate = true;
}
}
if (alternate && mediaTrack) {
logger.log(`alternate track found, use ${name}.buffered to schedule main fragment loading`);
this.mediaBuffer = mediaTrack.buffer;
} else {
this.mediaBuffer = this.media;
}
}
onBufferAppended(data) {
if (data.parent === 'main') {
const state = this.state;
if (state === State.PARSING || state === State.PARSED) {
// check if all buffers have been appended
this.pendingBuffering = (data.pending > 0);
this._checkAppendedParsed();
}
}
}
_checkAppendedParsed() {
//trigger handler right now
if (this.state === State.PARSED && (!this.appended || !this.pendingBuffering)) {
const frag = this.fragCurrent;
if (frag) {
const media = this.mediaBuffer ? this.mediaBuffer : this.media;
logger.log(`main buffered : ${TimeRanges.toString(media.buffered)}`);
// filter fragments potentially evicted from buffer. this is to avoid memleak on live streams
let bufferedFrags = this._bufferedFrags.filter(frag => {return BufferHelper.isBuffered(media,(frag.startPTS + frag.endPTS) / 2);});
// push new range
bufferedFrags.push(frag);
// sort frags, as we use BinarySearch for lookup in getBufferedFrag ...
this._bufferedFrags = bufferedFrags.sort(function(a,b) {return (a.startPTS - b.startPTS);});
this.fragPrevious = frag;
const stats = this.stats;
stats.tbuffered = performance.now();
// we should get rid of this.fragLastKbps
this.fragLastKbps = Math.round(8 * stats.total / (stats.tbuffered - stats.tfirst));
this.hls.trigger(Event.FRAG_BUFFERED, {stats: stats, frag: frag, id : 'main'});
this.state = State.IDLE;
}
this.tick();
}
}
onError(data) {
let frag = data.frag || this.fragCurrent;
// don't handle frag error not related to main fragment
if (frag && frag.type !== 'main') {
return;
}
// 0.5 : tolerance needed as some browsers stalls playback before reaching buffered end
let mediaBuffered = !!this.media && BufferHelper.isBuffered(this.media, this.media.currentTime) && BufferHelper.isBuffered(this.media, this.media.currentTime + 0.5);
switch(data.details) {
case ErrorDetails.FRAG_LOAD_ERROR:
case ErrorDetails.FRAG_LOAD_TIMEOUT:
case ErrorDetails.KEY_LOAD_ERROR:
case ErrorDetails.KEY_LOAD_TIMEOUT:
if (!data.fatal) {
// keep retrying until the limit will be reached
if ((this.fragLoadError + 1) <= this.config.fragLoadingMaxRetry) {
// exponential backoff capped to config.fragLoadingMaxRetryTimeout
let delay = Math.min(Math.pow(2, this.fragLoadError) * this.config.fragLoadingRetryDelay, this.config.fragLoadingMaxRetryTimeout);
// reset load counter to avoid frag loop loading error
frag.loadCounter = 0;
logger.warn(`mediaController: frag loading failed, retry in ${delay} ms`);
this.retryDate = performance.now() + delay;
// retry loading state
// if loadedmetadata is not set, it means that we are emergency switch down on first frag
// in that case, reset startFragRequested flag
if (!this.loadedmetadata) {
this.startFragRequested = false;
this.nextLoadPosition = this.startPosition;
}
this.fragLoadError++;
this.state = State.FRAG_LOADING_WAITING_RETRY;
} else {
logger.error(`mediaController: ${data.details} reaches max retry, redispatch as fatal ...`);
// switch error to fatal
data.fatal = true;
this.state = State.ERROR;
}
}
break;
case ErrorDetails.FRAG_LOOP_LOADING_ERROR:
if(!data.fatal) {
// if buffer is not empty
if (mediaBuffered) {
// try to reduce max buffer length : rationale is that we could get
// frag loop loading error because of buffer eviction
this._reduceMaxBufferLength(frag.duration);
this.state = State.IDLE;
} else {
// buffer empty. report as fatal if in manual mode or if lowest level.
// level controller takes care of emergency switch down logic
if (!frag.autoLevel || frag.level === 0) {
// switch error to fatal
data.fatal = true;
this.state = State.ERROR;
}
}
}
break;
case ErrorDetails.LEVEL_LOAD_ERROR:
case ErrorDetails.LEVEL_LOAD_TIMEOUT:
if(this.state !== State.ERROR) {
if (data.fatal) {
// if fatal error, stop processing
this.state = State.ERROR;
logger.warn(`streamController: ${data.details},switch to ${this.state} state ...`);
} else {
// in case of non fatal error while loading level, if level controller is not retrying to load level , switch back to IDLE
if (!data.levelRetry && this.state === State.WAITING_LEVEL) {
this.state = State.IDLE;
}
}
}
break;
case ErrorDetails.BUFFER_FULL_ERROR:
// if in appending state
if (data.parent === 'main' && (this.state === State.PARSING || this.state === State.PARSED)) {
// reduce max buf len if current position is buffered
if (mediaBuffered) {
this._reduceMaxBufferLength(this.config.maxBufferLength);
this.state = State.IDLE;
} else {
// current position is not buffered, but browser is still complaining about buffer full error
// this happens on IE/Edge, refer to https://github.com/video-dev/hls.js/pull/708
// in that case flush the whole buffer to recover
logger.warn('buffer full error also media.currentTime is not buffered, flush everything');
this.fragCurrent = null;
// flush everything
this.flushMainBuffer(0,Number.POSITIVE_INFINITY);
}
}
break;
default:
break;
}
}
_reduceMaxBufferLength(minLength) {
let config = this.config;
if (config.maxMaxBufferLength >= minLength) {
// reduce max buffer length as it might be too high. we do this to avoid loop flushing ...
config.maxMaxBufferLength/=2;
logger.warn(`main:reduce max buffer length to ${config.maxMaxBufferLength}s`);
if (this.fragLoadIdx !== undefined) {
// increase fragment load Index to avoid frag loop loading error after buffer flush
this.fragLoadIdx += 2 * config.fragLoadingLoopThreshold;
}
}
}
_checkBuffer() {
var media = this.media,
config = this.config;
// if ready state different from HAVE_NOTHING (numeric value 0), we are allowed to seek
if(media && media.readyState) {
let currentTime = media.currentTime,
mediaBuffer = this.mediaBuffer ? this.mediaBuffer : media,
buffered = mediaBuffer.buffered;
// adjust currentTime to start position on loaded metadata
if(!this.loadedmetadata && buffered.length) {
this.loadedmetadata = true;
// only adjust currentTime if different from startPosition or if startPosition not buffered
// at that stage, there should be only one buffered range, as we reach that code after first fragment has been buffered
let startPosition = media.seeking ? currentTime : this.startPosition,
startPositionBuffered = BufferHelper.isBuffered(mediaBuffer,startPosition),
firstbufferedPosition = buffered.start(0),
startNotBufferedButClose = !startPositionBuffered && (Math.abs(startPosition-firstbufferedPosition) < config.maxSeekHole);
// if currentTime not matching with expected startPosition or startPosition not buffered but close to first buffered
if (currentTime !== startPosition || startNotBufferedButClose) {
logger.log(`target start position:${startPosition}`);
// if startPosition not buffered, let's seek to buffered.start(0)
if(startNotBufferedButClose) {
startPosition = firstbufferedPosition;
logger.log(`target start position not buffered, seek to buffered.start(0) ${startPosition}`);
}
logger.log(`adjust currentTime from ${currentTime} to ${startPosition}`);
media.currentTime = startPosition;
}
} else if (this.immediateSwitch) {
this.immediateLevelSwitchEnd();
} else {
let bufferInfo = BufferHelper.bufferInfo(media,currentTime,0),
expectedPlaying = !(media.paused || // not playing when media is paused
media.ended || // not playing when media is ended
media.buffered.length === 0), // not playing if nothing buffered
jumpThreshold = 0.5, // tolerance needed as some browsers stalls playback before reaching buffered range end
playheadMoving = currentTime !== this.lastCurrentTime;
if (playheadMoving) {
// played moving, but was previously stalled => now not stuck anymore
if (this.stallReported) {
logger.warn(`playback not stuck anymore @${currentTime}, after ${Math.round(performance.now()-this.stalled)}ms`);
this.stallReported = false;
}
this.stalled = undefined;
this.nudgeRetry = 0;
} else {
// playhead not moving
if(expectedPlaying) {
// playhead not moving BUT media expected to play
const tnow = performance.now();
const hls = this.hls;
if(!this.stalled) {
// stall just detected, store current time
this.stalled = tnow;
this.stallReported = false;
} else {
// playback already stalled, check stalling duration
// if stalling for more than a given threshold, let's try to recover
const stalledDuration = tnow - this.stalled;
const bufferLen = bufferInfo.len;
let nudgeRetry = this.nudgeRetry || 0;
// have we reached stall deadline ?
if (bufferLen <= jumpThreshold && stalledDuration > config.lowBufferWatchdogPeriod * 1000) {
// report stalled error once
if (!this.stallReported) {
this.stallReported = true;
logger.warn(`playback stalling in low buffer @${currentTime}`);
hls.trigger(Event.ERROR, {type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.BUFFER_STALLED_ERROR, fatal: false, buffer : bufferLen});
}
// if buffer len is below threshold, try to jump to start of next buffer range if close
// no buffer available @ currentTime, check if next buffer is close (within a config.maxSeekHole second range)
var nextBufferStart = bufferInfo.nextStart, delta = nextBufferStart-currentTime;
if(nextBufferStart &&
(delta < config.maxSeekHole) &&
(delta > 0)) {
this.nudgeRetry = ++nudgeRetry;
const nudgeOffset = nudgeRetry * config.nudgeOffset;
// next buffer is close ! adjust currentTime to nextBufferStart
// this will ensure effective video decoding
logger.log(`adjust currentTime from ${media.currentTime} to next buffered @ ${nextBufferStart} + nudge ${nudgeOffset}`);
media.currentTime = nextBufferStart + nudgeOffset;
// reset stalled so to rearm watchdog timer
this.stalled = undefined;
hls.trigger(Event.ERROR, {type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.BUFFER_SEEK_OVER_HOLE, fatal: false, hole : nextBufferStart + nudgeOffset - currentTime});
}
} else if (bufferLen > jumpThreshold && stalledDuration > config.highBufferWatchdogPeriod * 1000) {
// report stalled error once
if (!this.stallReported) {
this.stallReported = true;
logger.warn(`playback stalling in high buffer @${currentTime}`);
hls.trigger(Event.ERROR, {type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.BUFFER_STALLED_ERROR, fatal: false, buffer : bufferLen});
}
// reset stalled so to rearm watchdog timer
this.stalled = undefined;
this.nudgeRetry = ++nudgeRetry;
if (nudgeRetry < config.nudgeMaxRetry) {
const currentTime = media.currentTime;
const targetTime = currentTime + nudgeRetry * config.nudgeOffset;
logger.log(`adjust currentTime from ${currentTime} to ${targetTime}`);
// playback stalled in buffered area ... let's nudge currentTime to try to overcome this
media.currentTime = targetTime;
hls.trigger(Event.ERROR, {type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.BUFFER_NUDGE_ON_STALL, fatal: false});
} else {
logger.error(`still stuck in high buffer @${currentTime} after ${config.nudgeMaxRetry}, raise fatal error`);
hls.trigger(Event.ERROR, {type: ErrorTypes.MEDIA_ERROR, details: ErrorDetails.BUFFER_STALLED_ERROR, fatal: true});
}
}
}
}
}
}
}
}
onFragLoadEmergencyAborted() {
this.state = State.IDLE;
// if loadedmetadata is not set, it means that we are emergency switch down on first frag
// in that case, reset startFragRequested flag
if(!this.loadedmetadata) {
this.startFragRequested = false;
this.nextLoadPosition = this.startPosition;
}
this.tick();
}
onBufferFlushed() {
/* after successful buffer flushing, filter flushed fragments from bufferedFrags
use mediaBuffered instead of media (so that we will check against video.buffered ranges in case of alt audio track)
*/
const media = this.mediaBuffer ? this.mediaBuffer : this.media;
this._bufferedFrags = this._bufferedFrags.filter(frag => {return BufferHelper.isBuffered(media,(frag.startPTS + frag.endPTS) / 2);});
if (this.fragLoadIdx !== undefined) {
// increase fragment load Index to avoid frag loop loading error after buffer flush
this.fragLoadIdx += 2 * this.config.fragLoadingLoopThreshold;
}
// move to IDLE once flush complete. this should trigger new fragment loading
this.state = State.IDLE;
// reset reference to frag
this.fragPrevious = null;
}
swapAudioCodec() {
this.audioCodecSwap = !this.audioCodecSwap;
}
computeLivePosition(sliding, levelDetails) {
let targetLatency = this.config.liveSyncDuration !== undefined ? this.config.liveSyncDuration : this.config.liveSyncDurationCount * levelDetails.targetduration;
return sliding + Math.max(0, levelDetails.totalduration - targetLatency);
}
get liveSyncPosition() {
return this._liveSyncPosition;
}
set liveSyncPosition(value) {
this._liveSyncPosition = value;
}
}
export default StreamController;
|