-
Notifications
You must be signed in to change notification settings - Fork 1.7k
/
Copy pathstats.go
2444 lines (1931 loc) · 110 KB
/
stats.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
// SPDX-FileCopyrightText: 2023 The Pion community <https://pion.ly>
// SPDX-License-Identifier: MIT
package webrtc
import (
"encoding/json"
"fmt"
"sync"
"time"
"github.com/pion/ice/v4"
)
// A Stats object contains a set of statistics copies out of a monitored component
// of the WebRTC stack at a specific time.
type Stats interface {
statsMarker()
}
// UnmarshalStatsJSON unmarshals a Stats object from JSON.
func UnmarshalStatsJSON(b []byte) (Stats, error) { //nolint:cyclop
type typeJSON struct {
Type StatsType `json:"type"`
}
typeHolder := typeJSON{}
err := json.Unmarshal(b, &typeHolder)
if err != nil {
return nil, fmt.Errorf("unmarshal json type: %w", err)
}
switch typeHolder.Type {
case StatsTypeCodec:
return unmarshalCodecStats(b)
case StatsTypeInboundRTP:
return unmarshalInboundRTPStreamStats(b)
case StatsTypeOutboundRTP:
return unmarshalOutboundRTPStreamStats(b)
case StatsTypeRemoteInboundRTP:
return unmarshalRemoteInboundRTPStreamStats(b)
case StatsTypeRemoteOutboundRTP:
return unmarshalRemoteOutboundRTPStreamStats(b)
case StatsTypeCSRC:
return unmarshalCSRCStats(b)
case StatsTypeMediaSource:
return unmarshalMediaSourceStats(b)
case StatsTypeMediaPlayout:
return unmarshalMediaPlayoutStats(b)
case StatsTypePeerConnection:
return unmarshalPeerConnectionStats(b)
case StatsTypeDataChannel:
return unmarshalDataChannelStats(b)
case StatsTypeStream:
return unmarshalStreamStats(b)
case StatsTypeTrack:
return unmarshalTrackStats(b)
case StatsTypeSender:
return unmarshalSenderStats(b)
case StatsTypeReceiver:
return unmarshalReceiverStats(b)
case StatsTypeTransport:
return unmarshalTransportStats(b)
case StatsTypeCandidatePair:
return unmarshalICECandidatePairStats(b)
case StatsTypeLocalCandidate, StatsTypeRemoteCandidate:
return unmarshalICECandidateStats(b)
case StatsTypeCertificate:
return unmarshalCertificateStats(b)
case StatsTypeSCTPTransport:
return unmarshalSCTPTransportStats(b)
default:
return nil, fmt.Errorf("type: %w", ErrUnknownType)
}
}
// StatsType indicates the type of the object that a Stats object represents.
type StatsType string
const (
// StatsTypeCodec is used by CodecStats.
StatsTypeCodec StatsType = "codec"
// StatsTypeInboundRTP is used by InboundRTPStreamStats.
StatsTypeInboundRTP StatsType = "inbound-rtp"
// StatsTypeOutboundRTP is used by OutboundRTPStreamStats.
StatsTypeOutboundRTP StatsType = "outbound-rtp"
// StatsTypeRemoteInboundRTP is used by RemoteInboundRTPStreamStats.
StatsTypeRemoteInboundRTP StatsType = "remote-inbound-rtp"
// StatsTypeRemoteOutboundRTP is used by RemoteOutboundRTPStreamStats.
StatsTypeRemoteOutboundRTP StatsType = "remote-outbound-rtp"
// StatsTypeCSRC is used by RTPContributingSourceStats.
StatsTypeCSRC StatsType = "csrc"
// StatsTypeMediaSource is used by AudioSourceStats or VideoSourceStats depending on kind.
StatsTypeMediaSource = "media-source"
// StatsTypeMediaPlayout is used by AudioPlayoutStats.
StatsTypeMediaPlayout StatsType = "media-playout"
// StatsTypePeerConnection used by PeerConnectionStats.
StatsTypePeerConnection StatsType = "peer-connection"
// StatsTypeDataChannel is used by DataChannelStats.
StatsTypeDataChannel StatsType = "data-channel"
// StatsTypeStream is used by MediaStreamStats.
StatsTypeStream StatsType = "stream"
// StatsTypeTrack is used by SenderVideoTrackAttachmentStats and SenderAudioTrackAttachmentStats depending on kind.
StatsTypeTrack StatsType = "track"
// StatsTypeSender is used by the AudioSenderStats or VideoSenderStats depending on kind.
StatsTypeSender StatsType = "sender"
// StatsTypeReceiver is used by the AudioReceiverStats or VideoReceiverStats depending on kind.
StatsTypeReceiver StatsType = "receiver"
// StatsTypeTransport is used by TransportStats.
StatsTypeTransport StatsType = "transport"
// StatsTypeCandidatePair is used by ICECandidatePairStats.
StatsTypeCandidatePair StatsType = "candidate-pair"
// StatsTypeLocalCandidate is used by ICECandidateStats for the local candidate.
StatsTypeLocalCandidate StatsType = "local-candidate"
// StatsTypeRemoteCandidate is used by ICECandidateStats for the remote candidate.
StatsTypeRemoteCandidate StatsType = "remote-candidate"
// StatsTypeCertificate is used by CertificateStats.
StatsTypeCertificate StatsType = "certificate"
// StatsTypeSCTPTransport is used by SCTPTransportStats.
StatsTypeSCTPTransport StatsType = "sctp-transport"
)
// MediaKind indicates the kind of media (audio or video).
type MediaKind string
const (
// MediaKindAudio indicates this is audio stats.
MediaKindAudio MediaKind = "audio"
// MediaKindVideo indicates this is video stats.
MediaKindVideo MediaKind = "video"
)
// StatsTimestamp is a timestamp represented by the floating point number of
// milliseconds since the epoch.
type StatsTimestamp float64
// Time returns the time.Time represented by this timestamp.
func (s StatsTimestamp) Time() time.Time {
millis := float64(s)
nanos := int64(millis * float64(time.Millisecond))
return time.Unix(0, nanos).UTC()
}
func statsTimestampFrom(t time.Time) StatsTimestamp {
return StatsTimestamp(t.UnixNano() / int64(time.Millisecond))
}
func statsTimestampNow() StatsTimestamp {
return statsTimestampFrom(time.Now())
}
// StatsReport collects Stats objects indexed by their ID.
type StatsReport map[string]Stats
type statsReportCollector struct {
collectingGroup sync.WaitGroup
report StatsReport
mux sync.Mutex
}
func newStatsReportCollector() *statsReportCollector {
return &statsReportCollector{report: make(StatsReport)}
}
func (src *statsReportCollector) Collecting() {
src.collectingGroup.Add(1)
}
func (src *statsReportCollector) Collect(id string, stats Stats) {
src.mux.Lock()
defer src.mux.Unlock()
src.report[id] = stats
src.collectingGroup.Done()
}
func (src *statsReportCollector) Done() {
src.collectingGroup.Done()
}
func (src *statsReportCollector) Ready() StatsReport {
src.collectingGroup.Wait()
src.mux.Lock()
defer src.mux.Unlock()
return src.report
}
// CodecType specifies whether a CodecStats objects represents a media format
// that is being encoded or decoded.
type CodecType string
const (
// CodecTypeEncode means the attached CodecStats represents a media format that
// is being encoded, or that the implementation is prepared to encode.
CodecTypeEncode CodecType = "encode"
// CodecTypeDecode means the attached CodecStats represents a media format
// that the implementation is prepared to decode.
CodecTypeDecode CodecType = "decode"
)
// CodecStats contains statistics for a codec that is currently being used by RTP streams
// being sent or received by this PeerConnection object.
type CodecStats struct {
// Timestamp is the timestamp associated with this object.
Timestamp StatsTimestamp `json:"timestamp"`
// Type is the object's StatsType
Type StatsType `json:"type"`
// ID is a unique id that is associated with the component inspected to produce
// this Stats object. Two Stats objects will have the same ID if they were produced
// by inspecting the same underlying object.
ID string `json:"id"`
// PayloadType as used in RTP encoding or decoding
PayloadType PayloadType `json:"payloadType"`
// CodecType of this CodecStats
CodecType CodecType `json:"codecType"`
// TransportID is the unique identifier of the transport on which this codec is
// being used, which can be used to look up the corresponding TransportStats object.
TransportID string `json:"transportId"`
// MimeType is the codec MIME media type/subtype. e.g., video/vp8 or equivalent.
MimeType string `json:"mimeType"`
// ClockRate represents the media sampling rate.
ClockRate uint32 `json:"clockRate"`
// Channels is 2 for stereo, missing for most other cases.
Channels uint8 `json:"channels"`
// SDPFmtpLine is the a=fmtp line in the SDP corresponding to the codec,
// i.e., after the colon following the PT.
SDPFmtpLine string `json:"sdpFmtpLine"`
// Implementation identifies the implementation used. This is useful for diagnosing
// interoperability issues.
Implementation string `json:"implementation"`
}
func (s CodecStats) statsMarker() {}
func unmarshalCodecStats(b []byte) (CodecStats, error) {
var codecStats CodecStats
err := json.Unmarshal(b, &codecStats)
if err != nil {
return CodecStats{}, fmt.Errorf("unmarshal codec stats: %w", err)
}
return codecStats, nil
}
// InboundRTPStreamStats contains statistics for an inbound RTP stream that is
// currently received with this PeerConnection object.
type InboundRTPStreamStats struct {
// Mid represents a mid value of RTPTransceiver owning this stream, if that value is not
// null. Otherwise, this member is not present.
Mid string `json:"mid"`
// Timestamp is the timestamp associated with this object.
Timestamp StatsTimestamp `json:"timestamp"`
// Type is the object's StatsType
Type StatsType `json:"type"`
// ID is a unique id that is associated with the component inspected to produce
// this Stats object. Two Stats objects will have the same ID if they were produced
// by inspecting the same underlying object.
ID string `json:"id"`
// SSRC is the 32-bit unsigned integer value used to identify the source of the
// stream of RTP packets that this stats object concerns.
SSRC SSRC `json:"ssrc"`
// Kind is either "audio" or "video"
Kind string `json:"kind"`
// It is a unique identifier that is associated to the object that was inspected
// to produce the TransportStats associated with this RTP stream.
TransportID string `json:"transportId"`
// CodecID is a unique identifier that is associated to the object that was inspected
// to produce the CodecStats associated with this RTP stream.
CodecID string `json:"codecId"`
// FIRCount counts the total number of Full Intra Request (FIR) packets received
// by the sender. This metric is only valid for video and is sent by receiver.
FIRCount uint32 `json:"firCount"`
// PLICount counts the total number of Picture Loss Indication (PLI) packets
// received by the sender. This metric is only valid for video and is sent by receiver.
PLICount uint32 `json:"pliCount"`
// TotalProcessingDelay is the sum of the time, in seconds, each audio sample or video frame
// takes from the time the first RTP packet is received (reception timestamp) and to the time
// the corresponding sample or frame is decoded (decoded timestamp). At this point the audio
// sample or video frame is ready for playout by the MediaStreamTrack. Typically ready for
// playout here means after the audio sample or video frame is fully decoded by the decoder.
TotalProcessingDelay float64 `json:"totalProcessingDelay"`
// NACKCount counts the total number of Negative ACKnowledgement (NACK) packets
// received by the sender and is sent by receiver.
NACKCount uint32 `json:"nackCount"`
// JitterBufferDelay is the sum of the time, in seconds, each audio sample or a video frame
// takes from the time the first packet is received by the jitter buffer (ingest timestamp)
// to the time it exits the jitter buffer (emit timestamp). The average jitter buffer delay
// can be calculated by dividing the JitterBufferDelay with the JitterBufferEmittedCount.
JitterBufferDelay float64 `json:"jitterBufferDelay"`
// JitterBufferTargetDelay is increased by the target jitter buffer delay every time a sample is emitted
// by the jitter buffer. The added target is the target delay, in seconds, at the time that
// the sample was emitted from the jitter buffer. To get the average target delay,
// divide by JitterBufferEmittedCount
JitterBufferTargetDelay float64 `json:"jitterBufferTargetDelay"`
// JitterBufferEmittedCount is the total number of audio samples or video frames that
// have come out of the jitter buffer (increasing jitterBufferDelay).
JitterBufferEmittedCount uint64 `json:"jitterBufferEmittedCount"`
// JitterBufferMinimumDelay works the same way as jitterBufferTargetDelay, except that
// it is not affected by external mechanisms that increase the jitter buffer target delay,
// such as jitterBufferTarget, AV sync, or any other mechanisms. This metric is purely
// based on the network characteristics such as jitter and packet loss, and can be seen
// as the minimum obtainable jitter buffer delay if no external factors would affect it.
// The metric is updated every time JitterBufferEmittedCount is updated.
JitterBufferMinimumDelay float64 `json:"jitterBufferMinimumDelay"`
// TotalSamplesReceived is the total number of samples that have been received on
// this RTP stream. This includes concealedSamples. Does not exist for video.
TotalSamplesReceived uint64 `json:"totalSamplesReceived"`
// ConcealedSamples is the total number of samples that are concealed samples.
// A concealed sample is a sample that was replaced with synthesized samples generated
// locally before being played out. Examples of samples that have to be concealed are
// samples from lost packets (reported in packetsLost) or samples from packets that
// arrive too late to be played out (reported in packetsDiscarded). Does not exist for video.
ConcealedSamples uint64 `json:"concealedSamples"`
// SilentConcealedSamples is the total number of concealed samples inserted that
// are "silent". Playing out silent samples results in silence or comfort noise.
// This is a subset of concealedSamples. Does not exist for video.
SilentConcealedSamples uint64 `json:"silentConcealedSamples"`
// ConcealmentEvents increases every time a concealed sample is synthesized after
// a non-concealed sample. That is, multiple consecutive concealed samples will increase
// the concealedSamples count multiple times but is a single concealment event.
// Does not exist for video.
ConcealmentEvents uint64 `json:"concealmentEvents"`
// InsertedSamplesForDeceleration is increased by the difference between the number of
// samples received and the number of samples played out when playout is slowed down.
// If playout is slowed down by inserting samples, this will be the number of inserted samples.
// Does not exist for video.
InsertedSamplesForDeceleration uint64 `json:"insertedSamplesForDeceleration"`
// RemovedSamplesForAcceleration is increased by the difference between the number of
// samples received and the number of samples played out when playout is sped up. If speedup
// is achieved by removing samples, this will be the count of samples removed.
// Does not exist for video.
RemovedSamplesForAcceleration uint64 `json:"removedSamplesForAcceleration"`
// AudioLevel represents the audio level of the receiving track..
//
// The value is a value between 0..1 (linear), where 1.0 represents 0 dBov,
// 0 represents silence, and 0.5 represents approximately 6 dBSPL change in
// the sound pressure level from 0 dBov. Does not exist for video.
AudioLevel float64 `json:"audioLevel"`
// TotalAudioEnergy represents the audio energy of the receiving track. It is calculated
// by duration * Math.pow(energy/maxEnergy, 2) for each audio sample received (and thus
// counted by TotalSamplesReceived). Does not exist for video.
TotalAudioEnergy float64 `json:"totalAudioEnergy"`
// TotalSamplesDuration represents the total duration in seconds of all samples that have been
// received (and thus counted by TotalSamplesReceived). Can be used with totalAudioEnergy to
// compute an average audio level over different intervals. Does not exist for video.
TotalSamplesDuration float64 `json:"totalSamplesDuration"`
// SLICount counts the total number of Slice Loss Indication (SLI) packets received
// by the sender. This metric is only valid for video and is sent by receiver.
SLICount uint32 `json:"sliCount"`
// QPSum is the sum of the QP values of frames passed. The count of frames is
// in FramesDecoded for inbound stream stats, and in FramesEncoded for outbound stream stats.
QPSum uint64 `json:"qpSum"`
// TotalDecodeTime is the total number of seconds that have been spent decoding the FramesDecoded
// frames of this stream. The average decode time can be calculated by dividing this value
// with FramesDecoded. The time it takes to decode one frame is the time passed between
// feeding the decoder a frame and the decoder returning decoded data for that frame.
TotalDecodeTime float64 `json:"totalDecodeTime"`
// TotalInterFrameDelay is the sum of the interframe delays in seconds between consecutively
// rendered frames, recorded just after a frame has been rendered. The interframe delay variance
// be calculated from TotalInterFrameDelay, TotalSquaredInterFrameDelay, and FramesRendered according
// to the formula: (TotalSquaredInterFrameDelay - TotalInterFrameDelay^2 / FramesRendered) / FramesRendered.
// Does not exist for audio.
TotalInterFrameDelay float64 `json:"totalInterFrameDelay"`
// TotalSquaredInterFrameDelay is the sum of the squared interframe delays in seconds
// between consecutively rendered frames, recorded just after a frame has been rendered.
// See TotalInterFrameDelay for details on how to calculate the interframe delay variance.
// Does not exist for audio.
TotalSquaredInterFrameDelay float64 `json:"totalSquaredInterFrameDelay"`
// PacketsReceived is the total number of RTP packets received for this SSRC.
PacketsReceived uint32 `json:"packetsReceived"`
// PacketsLost is the total number of RTP packets lost for this SSRC. Note that
// because of how this is estimated, it can be negative if more packets are received than sent.
PacketsLost int32 `json:"packetsLost"`
// Jitter is the packet jitter measured in seconds for this SSRC
Jitter float64 `json:"jitter"`
// PacketsDiscarded is the cumulative number of RTP packets discarded by the jitter
// buffer due to late or early-arrival, i.e., these packets are not played out.
// RTP packets discarded due to packet duplication are not reported in this metric.
PacketsDiscarded uint32 `json:"packetsDiscarded"`
// PacketsRepaired is the cumulative number of lost RTP packets repaired after applying
// an error-resilience mechanism. It is measured for the primary source RTP packets
// and only counted for RTP packets that have no further chance of repair.
PacketsRepaired uint32 `json:"packetsRepaired"`
// BurstPacketsLost is the cumulative number of RTP packets lost during loss bursts.
BurstPacketsLost uint32 `json:"burstPacketsLost"`
// BurstPacketsDiscarded is the cumulative number of RTP packets discarded during discard bursts.
BurstPacketsDiscarded uint32 `json:"burstPacketsDiscarded"`
// BurstLossCount is the cumulative number of bursts of lost RTP packets.
BurstLossCount uint32 `json:"burstLossCount"`
// BurstDiscardCount is the cumulative number of bursts of discarded RTP packets.
BurstDiscardCount uint32 `json:"burstDiscardCount"`
// BurstLossRate is the fraction of RTP packets lost during bursts to the
// total number of RTP packets expected in the bursts.
BurstLossRate float64 `json:"burstLossRate"`
// BurstDiscardRate is the fraction of RTP packets discarded during bursts to
// the total number of RTP packets expected in bursts.
BurstDiscardRate float64 `json:"burstDiscardRate"`
// GapLossRate is the fraction of RTP packets lost during the gap periods.
GapLossRate float64 `json:"gapLossRate"`
// GapDiscardRate is the fraction of RTP packets discarded during the gap periods.
GapDiscardRate float64 `json:"gapDiscardRate"`
// TrackID is the identifier of the stats object representing the receiving track,
// a ReceiverAudioTrackAttachmentStats or ReceiverVideoTrackAttachmentStats.
TrackID string `json:"trackId"`
// ReceiverID is the stats ID used to look up the AudioReceiverStats or VideoReceiverStats
// object receiving this stream.
ReceiverID string `json:"receiverId"`
// RemoteID is used for looking up the remote RemoteOutboundRTPStreamStats object
// for the same SSRC.
RemoteID string `json:"remoteId"`
// FramesDecoded represents the total number of frames correctly decoded for this SSRC,
// i.e., frames that would be displayed if no frames are dropped. Only valid for video.
FramesDecoded uint32 `json:"framesDecoded"`
// KeyFramesDecoded represents the total number of key frames, such as key frames in
// VP8 [RFC6386] or IDR-frames in H.264 [RFC6184], successfully decoded for this RTP
// media stream. This is a subset of FramesDecoded. FramesDecoded - KeyFramesDecoded
// gives you the number of delta frames decoded. Does not exist for audio.
KeyFramesDecoded uint32 `json:"keyFramesDecoded"`
// FramesRendered represents the total number of frames that have been rendered.
// It is incremented just after a frame has been rendered. Does not exist for audio.
FramesRendered uint32 `json:"framesRendered"`
// FramesDropped is the total number of frames dropped prior to decode or dropped
// because the frame missed its display deadline for this receiver's track.
// The measurement begins when the receiver is created and is a cumulative metric
// as defined in Appendix A (g) of [RFC7004]. Does not exist for audio.
FramesDropped uint32 `json:"framesDropped"`
// FrameWidth represents the width of the last decoded frame. Before the first
// frame is decoded this member does not exist. Does not exist for audio.
FrameWidth uint32 `json:"frameWidth"`
// FrameHeight represents the height of the last decoded frame. Before the first
// frame is decoded this member does not exist. Does not exist for audio.
FrameHeight uint32 `json:"frameHeight"`
// LastPacketReceivedTimestamp represents the timestamp at which the last packet was
// received for this SSRC. This differs from Timestamp, which represents the time
// at which the statistics were generated by the local endpoint.
LastPacketReceivedTimestamp StatsTimestamp `json:"lastPacketReceivedTimestamp"`
// HeaderBytesReceived is the total number of RTP header and padding bytes received for this SSRC.
// This includes retransmissions. This does not include the size of transport layer headers such
// as IP or UDP. headerBytesReceived + bytesReceived equals the number of bytes received as
// payload over the transport.
HeaderBytesReceived uint64 `json:"headerBytesReceived"`
// AverageRTCPInterval is the average RTCP interval between two consecutive compound RTCP packets.
// This is calculated by the sending endpoint when sending compound RTCP reports.
// Compound packets must contain at least a RTCP RR or SR packet and an SDES packet
// with the CNAME item.
AverageRTCPInterval float64 `json:"averageRtcpInterval"`
// FECPacketsReceived is the total number of RTP FEC packets received for this SSRC.
// This counter can also be incremented when receiving FEC packets in-band with media packets (e.g., with Opus).
FECPacketsReceived uint32 `json:"fecPacketsReceived"`
// FECPacketsDiscarded is the total number of RTP FEC packets received for this SSRC where the
// error correction payload was discarded by the application. This may happen
// 1. if all the source packets protected by the FEC packet were received or already
// recovered by a separate FEC packet, or
// 2. if the FEC packet arrived late, i.e., outside the recovery window, and the
// lost RTP packets have already been skipped during playout.
// This is a subset of FECPacketsReceived.
FECPacketsDiscarded uint64 `json:"fecPacketsDiscarded"`
// BytesReceived is the total number of bytes received for this SSRC.
BytesReceived uint64 `json:"bytesReceived"`
// FramesReceived represents the total number of complete frames received on this RTP stream.
// This metric is incremented when the complete frame is received. Does not exist for audio.
FramesReceived uint32 `json:"framesReceived"`
// PacketsFailedDecryption is the cumulative number of RTP packets that failed
// to be decrypted. These packets are not counted by PacketsDiscarded.
PacketsFailedDecryption uint32 `json:"packetsFailedDecryption"`
// PacketsDuplicated is the cumulative number of packets discarded because they
// are duplicated. Duplicate packets are not counted in PacketsDiscarded.
//
// Duplicated packets have the same RTP sequence number and content as a previously
// received packet. If multiple duplicates of a packet are received, all of them are counted.
// An improved estimate of lost packets can be calculated by adding PacketsDuplicated to PacketsLost.
PacketsDuplicated uint32 `json:"packetsDuplicated"`
// PerDSCPPacketsReceived is the total number of packets received for this SSRC,
// per Differentiated Services code point (DSCP) [RFC2474]. DSCPs are identified
// as decimal integers in string form. Note that due to network remapping and bleaching,
// these numbers are not expected to match the numbers seen on sending. Not all
// OSes make this information available.
PerDSCPPacketsReceived map[string]uint32 `json:"perDscpPacketsReceived"`
// Identifies the decoder implementation used. This is useful for diagnosing interoperability issues.
// Does not exist for audio.
DecoderImplementation string `json:"decoderImplementation"`
// PauseCount is the total number of video pauses experienced by this receiver.
// Video is considered to be paused if time passed since last rendered frame exceeds 5 seconds.
// PauseCount is incremented when a frame is rendered after such a pause. Does not exist for audio.
PauseCount uint32 `json:"pauseCount"`
// TotalPausesDuration is the total duration of pauses (for definition of pause see PauseCount), in seconds.
// Does not exist for audio.
TotalPausesDuration float64 `json:"totalPausesDuration"`
// FreezeCount is the total number of video freezes experienced by this receiver.
// It is a freeze if frame duration, which is time interval between two consecutively rendered frames,
// is equal or exceeds Max(3 * avg_frame_duration_ms, avg_frame_duration_ms + 150),
// where avg_frame_duration_ms is linear average of durations of last 30 rendered frames.
// Does not exist for audio.
FreezeCount uint32 `json:"freezeCount"`
// TotalFreezesDuration is the total duration of rendered frames which are considered as frozen
// (for definition of freeze see freezeCount), in seconds. Does not exist for audio.
TotalFreezesDuration float64 `json:"totalFreezesDuration"`
// PowerEfficientDecoder indicates whether the decoder currently used is considered power efficient
// by the user agent. Does not exist for audio.
PowerEfficientDecoder bool `json:"powerEfficientDecoder"`
}
func (s InboundRTPStreamStats) statsMarker() {}
func unmarshalInboundRTPStreamStats(b []byte) (InboundRTPStreamStats, error) {
var inboundRTPStreamStats InboundRTPStreamStats
err := json.Unmarshal(b, &inboundRTPStreamStats)
if err != nil {
return InboundRTPStreamStats{}, fmt.Errorf("unmarshal inbound rtp stream stats: %w", err)
}
return inboundRTPStreamStats, nil
}
// QualityLimitationReason lists the reason for limiting the resolution and/or framerate.
// Only valid for video.
type QualityLimitationReason string
const (
// QualityLimitationReasonNone means the resolution and/or framerate is not limited.
QualityLimitationReasonNone QualityLimitationReason = "none"
// QualityLimitationReasonCPU means the resolution and/or framerate is primarily limited due to CPU load.
QualityLimitationReasonCPU QualityLimitationReason = "cpu"
// QualityLimitationReasonBandwidth means the resolution and/or framerate is primarily limited
// due to congestion cues during bandwidth estimation.
// Typical, congestion control algorithms use inter-arrival time, round-trip time,
// packet or other congestion cues to perform bandwidth estimation.
QualityLimitationReasonBandwidth QualityLimitationReason = "bandwidth"
// QualityLimitationReasonOther means the resolution and/or framerate is primarily limited
// for a reason other than the above.
QualityLimitationReasonOther QualityLimitationReason = "other"
)
// OutboundRTPStreamStats contains statistics for an outbound RTP stream that is
// currently sent with this PeerConnection object.
type OutboundRTPStreamStats struct {
// Mid represents a mid value of RTPTransceiver owning this stream, if that value is not
// null. Otherwise, this member is not present.
Mid string `json:"mid"`
// Rid only exists if a rid has been set for this RTP stream.
// Must not exist for audio.
Rid string `json:"rid"`
// MediaSourceID is the identifier of the stats object representing the track currently
// attached to the sender of this stream, an RTCMediaSourceStats.
MediaSourceID string `json:"mediaSourceId"`
// Timestamp is the timestamp associated with this object.
Timestamp StatsTimestamp `json:"timestamp"`
// Type is the object's StatsType
Type StatsType `json:"type"`
// ID is a unique id that is associated with the component inspected to produce
// this Stats object. Two Stats objects will have the same ID if they were produced
// by inspecting the same underlying object.
ID string `json:"id"`
// SSRC is the 32-bit unsigned integer value used to identify the source of the
// stream of RTP packets that this stats object concerns.
SSRC SSRC `json:"ssrc"`
// Kind is either "audio" or "video"
Kind string `json:"kind"`
// It is a unique identifier that is associated to the object that was inspected
// to produce the TransportStats associated with this RTP stream.
TransportID string `json:"transportId"`
// CodecID is a unique identifier that is associated to the object that was inspected
// to produce the CodecStats associated with this RTP stream.
CodecID string `json:"codecId"`
// HeaderBytesSent is the total number of RTP header and padding bytes sent for this SSRC. This does not
// include the size of transport layer headers such as IP or UDP.
// HeaderBytesSent + BytesSent equals the number of bytes sent as payload over the transport.
HeaderBytesSent uint64 `json:"headerBytesSent"`
// RetransmittedPacketsSent is the total number of packets that were retransmitted for this SSRC.
// This is a subset of packetsSent. If RTX is not negotiated, retransmitted packets are sent
// over this ssrc. If RTX was negotiated, retransmitted packets are sent over a separate SSRC
// but is still accounted for here.
RetransmittedPacketsSent uint64 `json:"retransmittedPacketsSent"`
// RetransmittedBytesSent is the total number of bytes that were retransmitted for this SSRC,
// only including payload bytes. This is a subset of bytesSent. If RTX is not negotiated,
// retransmitted bytes are sent over this ssrc. If RTX was negotiated, retransmitted bytes
// are sent over a separate SSRC but is still accounted for here.
RetransmittedBytesSent uint64 `json:"retransmittedBytesSent"`
// FIRCount counts the total number of Full Intra Request (FIR) packets received
// by the sender. This metric is only valid for video and is sent by receiver.
FIRCount uint32 `json:"firCount"`
// PLICount counts the total number of Picture Loss Indication (PLI) packets
// received by the sender. This metric is only valid for video and is sent by receiver.
PLICount uint32 `json:"pliCount"`
// NACKCount counts the total number of Negative ACKnowledgement (NACK) packets
// received by the sender and is sent by receiver.
NACKCount uint32 `json:"nackCount"`
// SLICount counts the total number of Slice Loss Indication (SLI) packets received
// by the sender. This metric is only valid for video and is sent by receiver.
SLICount uint32 `json:"sliCount"`
// QPSum is the sum of the QP values of frames passed. The count of frames is
// in FramesDecoded for inbound stream stats, and in FramesEncoded for outbound stream stats.
QPSum uint64 `json:"qpSum"`
// PacketsSent is the total number of RTP packets sent for this SSRC.
PacketsSent uint32 `json:"packetsSent"`
// PacketsDiscardedOnSend is the total number of RTP packets for this SSRC that
// have been discarded due to socket errors, i.e. a socket error occurred when handing
// the packets to the socket. This might happen due to various reasons, including
// full buffer or no available memory.
PacketsDiscardedOnSend uint32 `json:"packetsDiscardedOnSend"`
// FECPacketsSent is the total number of RTP FEC packets sent for this SSRC.
// This counter can also be incremented when sending FEC packets in-band with
// media packets (e.g., with Opus).
FECPacketsSent uint32 `json:"fecPacketsSent"`
// BytesSent is the total number of bytes sent for this SSRC.
BytesSent uint64 `json:"bytesSent"`
// BytesDiscardedOnSend is the total number of bytes for this SSRC that have
// been discarded due to socket errors, i.e. a socket error occurred when handing
// the packets containing the bytes to the socket. This might happen due to various
// reasons, including full buffer or no available memory.
BytesDiscardedOnSend uint64 `json:"bytesDiscardedOnSend"`
// TrackID is the identifier of the stats object representing the current track
// attachment to the sender of this stream, a SenderAudioTrackAttachmentStats
// or SenderVideoTrackAttachmentStats.
TrackID string `json:"trackId"`
// SenderID is the stats ID used to look up the AudioSenderStats or VideoSenderStats
// object sending this stream.
SenderID string `json:"senderId"`
// RemoteID is used for looking up the remote RemoteInboundRTPStreamStats object
// for the same SSRC.
RemoteID string `json:"remoteId"`
// LastPacketSentTimestamp represents the timestamp at which the last packet was
// sent for this SSRC. This differs from timestamp, which represents the time at
// which the statistics were generated by the local endpoint.
LastPacketSentTimestamp StatsTimestamp `json:"lastPacketSentTimestamp"`
// TargetBitrate is the current target bitrate configured for this particular SSRC
// and is the Transport Independent Application Specific (TIAS) bitrate [RFC3890].
// Typically, the target bitrate is a configuration parameter provided to the codec's
// encoder and does not count the size of the IP or other transport layers like TCP or UDP.
// It is measured in bits per second and the bitrate is calculated over a 1 second window.
TargetBitrate float64 `json:"targetBitrate"`
// TotalEncodedBytesTarget is increased by the target frame size in bytes every time
// a frame has been encoded. The actual frame size may be bigger or smaller than this number.
// This value goes up every time framesEncoded goes up.
TotalEncodedBytesTarget uint64 `json:"totalEncodedBytesTarget"`
// FrameWidth represents the width of the last encoded frame. The resolution of the
// encoded frame may be lower than the media source. Before the first frame is encoded
// this member does not exist. Does not exist for audio.
FrameWidth uint32 `json:"frameWidth"`
// FrameHeight represents the height of the last encoded frame. The resolution of the
// encoded frame may be lower than the media source. Before the first frame is encoded
// this member does not exist. Does not exist for audio.
FrameHeight uint32 `json:"frameHeight"`
// FramesPerSecond is the number of encoded frames during the last second. This may be
// lower than the media source frame rate. Does not exist for audio.
FramesPerSecond float64 `json:"framesPerSecond"`
// FramesSent represents the total number of frames sent on this RTP stream. Does not exist for audio.
FramesSent uint32 `json:"framesSent"`
// HugeFramesSent represents the total number of huge frames sent by this RTP stream.
// Huge frames, by definition, are frames that have an encoded size at least 2.5 times
// the average size of the frames. The average size of the frames is defined as the
// target bitrate per second divided by the target FPS at the time the frame was encoded.
// These are usually complex to encode frames with a lot of changes in the picture.
// This can be used to estimate, e.g slide changes in the streamed presentation.
// Does not exist for audio.
HugeFramesSent uint32 `json:"hugeFramesSent"`
// FramesEncoded represents the total number of frames successfully encoded for this RTP media stream.
// Only valid for video.
FramesEncoded uint32 `json:"framesEncoded"`
// KeyFramesEncoded represents the total number of key frames, such as key frames in VP8 [RFC6386] or
// IDR-frames in H.264 [RFC6184], successfully encoded for this RTP media stream. This is a subset of
// FramesEncoded. FramesEncoded - KeyFramesEncoded gives you the number of delta frames encoded.
// Does not exist for audio.
KeyFramesEncoded uint32 `json:"keyFramesEncoded"`
// TotalEncodeTime is the total number of seconds that has been spent encoding the
// framesEncoded frames of this stream. The average encode time can be calculated by
// dividing this value with FramesEncoded. The time it takes to encode one frame is the
// time passed between feeding the encoder a frame and the encoder returning encoded data
// for that frame. This does not include any additional time it may take to packetize the resulting data.
TotalEncodeTime float64 `json:"totalEncodeTime"`
// TotalPacketSendDelay is the total number of seconds that packets have spent buffered
// locally before being transmitted onto the network. The time is measured from when
// a packet is emitted from the RTP packetizer until it is handed over to the OS network socket.
// This measurement is added to totalPacketSendDelay when packetsSent is incremented.
TotalPacketSendDelay float64 `json:"totalPacketSendDelay"`
// AverageRTCPInterval is the average RTCP interval between two consecutive compound RTCP
// packets. This is calculated by the sending endpoint when sending compound RTCP reports.
// Compound packets must contain at least a RTCP RR or SR packet and an SDES packet with the CNAME item.
AverageRTCPInterval float64 `json:"averageRtcpInterval"`
// QualityLimitationReason is the current reason for limiting the resolution and/or framerate,
// or "none" if not limited. Only valid for video.
QualityLimitationReason QualityLimitationReason `json:"qualityLimitationReason"`
// QualityLimitationDurations is record of the total time, in seconds, that this
// stream has spent in each quality limitation state. The record includes a mapping
// for all QualityLimitationReason types, including "none". Only valid for video.
QualityLimitationDurations map[string]float64 `json:"qualityLimitationDurations"`
// QualityLimitationResolutionChanges is the number of times that the resolution has changed
// because we are quality limited (qualityLimitationReason has a value other than "none").
// The counter is initially zero and increases when the resolution goes up or down.
// For example, if a 720p track is sent as 480p for some time and then recovers to 720p,
// qualityLimitationResolutionChanges will have the value 2. Does not exist for audio.
QualityLimitationResolutionChanges uint32 `json:"qualityLimitationResolutionChanges"`
// PerDSCPPacketsSent is the total number of packets sent for this SSRC, per DSCP.
// DSCPs are identified as decimal integers in string form.
PerDSCPPacketsSent map[string]uint32 `json:"perDscpPacketsSent"`
// Active indicates whether this RTP stream is configured to be sent or disabled. Note that an
// active stream can still not be sending, e.g. when being limited by network conditions.
Active bool `json:"active"`
// Identifies the encoder implementation used. This is useful for diagnosing interoperability issues.
// Does not exist for audio.
EncoderImplementation string `json:"encoderImplementation"`
// PowerEfficientEncoder indicates whether the encoder currently used is considered power efficient.
// by the user agent. Does not exist for audio.
PowerEfficientEncoder bool `json:"powerEfficientEncoder"`
// ScalabilityMode identifies the layering mode used for video encoding. Does not exist for audio.
ScalabilityMode string `json:"scalabilityMode"`
}
func (s OutboundRTPStreamStats) statsMarker() {}
func unmarshalOutboundRTPStreamStats(b []byte) (OutboundRTPStreamStats, error) {
var outboundRTPStreamStats OutboundRTPStreamStats
err := json.Unmarshal(b, &outboundRTPStreamStats)
if err != nil {
return OutboundRTPStreamStats{}, fmt.Errorf("unmarshal outbound rtp stream stats: %w", err)
}
return outboundRTPStreamStats, nil
}
// RemoteInboundRTPStreamStats contains statistics for the remote endpoint's inbound
// RTP stream corresponding to an outbound stream that is currently sent with this
// PeerConnection object. It is measured at the remote endpoint and reported in an RTCP
// Receiver Report (RR) or RTCP Extended Report (XR).
type RemoteInboundRTPStreamStats struct {
// Timestamp is the timestamp associated with this object.
Timestamp StatsTimestamp `json:"timestamp"`
// Type is the object's StatsType
Type StatsType `json:"type"`
// ID is a unique id that is associated with the component inspected to produce
// this Stats object. Two Stats objects will have the same ID if they were produced
// by inspecting the same underlying object.
ID string `json:"id"`
// SSRC is the 32-bit unsigned integer value used to identify the source of the
// stream of RTP packets that this stats object concerns.
SSRC SSRC `json:"ssrc"`
// Kind is either "audio" or "video"
Kind string `json:"kind"`
// It is a unique identifier that is associated to the object that was inspected
// to produce the TransportStats associated with this RTP stream.
TransportID string `json:"transportId"`
// CodecID is a unique identifier that is associated to the object that was inspected
// to produce the CodecStats associated with this RTP stream.
CodecID string `json:"codecId"`
// FIRCount counts the total number of Full Intra Request (FIR) packets received
// by the sender. This metric is only valid for video and is sent by receiver.
FIRCount uint32 `json:"firCount"`
// PLICount counts the total number of Picture Loss Indication (PLI) packets
// received by the sender. This metric is only valid for video and is sent by receiver.
PLICount uint32 `json:"pliCount"`
// NACKCount counts the total number of Negative ACKnowledgement (NACK) packets
// received by the sender and is sent by receiver.
NACKCount uint32 `json:"nackCount"`
// SLICount counts the total number of Slice Loss Indication (SLI) packets received
// by the sender. This metric is only valid for video and is sent by receiver.
SLICount uint32 `json:"sliCount"`
// QPSum is the sum of the QP values of frames passed. The count of frames is
// in FramesDecoded for inbound stream stats, and in FramesEncoded for outbound stream stats.
QPSum uint64 `json:"qpSum"`
// PacketsReceived is the total number of RTP packets received for this SSRC.
PacketsReceived uint32 `json:"packetsReceived"`
// PacketsLost is the total number of RTP packets lost for this SSRC. Note that
// because of how this is estimated, it can be negative if more packets are received than sent.
PacketsLost int32 `json:"packetsLost"`
// Jitter is the packet jitter measured in seconds for this SSRC
Jitter float64 `json:"jitter"`
// PacketsDiscarded is the cumulative number of RTP packets discarded by the jitter
// buffer due to late or early-arrival, i.e., these packets are not played out.
// RTP packets discarded due to packet duplication are not reported in this metric.
PacketsDiscarded uint32 `json:"packetsDiscarded"`
// PacketsRepaired is the cumulative number of lost RTP packets repaired after applying
// an error-resilience mechanism. It is measured for the primary source RTP packets
// and only counted for RTP packets that have no further chance of repair.
PacketsRepaired uint32 `json:"packetsRepaired"`
// BurstPacketsLost is the cumulative number of RTP packets lost during loss bursts.
BurstPacketsLost uint32 `json:"burstPacketsLost"`
// BurstPacketsDiscarded is the cumulative number of RTP packets discarded during discard bursts.
BurstPacketsDiscarded uint32 `json:"burstPacketsDiscarded"`
// BurstLossCount is the cumulative number of bursts of lost RTP packets.
BurstLossCount uint32 `json:"burstLossCount"`
// BurstDiscardCount is the cumulative number of bursts of discarded RTP packets.
BurstDiscardCount uint32 `json:"burstDiscardCount"`
// BurstLossRate is the fraction of RTP packets lost during bursts to the
// total number of RTP packets expected in the bursts.
BurstLossRate float64 `json:"burstLossRate"`
// BurstDiscardRate is the fraction of RTP packets discarded during bursts to
// the total number of RTP packets expected in bursts.
BurstDiscardRate float64 `json:"burstDiscardRate"`
// GapLossRate is the fraction of RTP packets lost during the gap periods.
GapLossRate float64 `json:"gapLossRate"`
// GapDiscardRate is the fraction of RTP packets discarded during the gap periods.
GapDiscardRate float64 `json:"gapDiscardRate"`
// LocalID is used for looking up the local OutboundRTPStreamStats object for the same SSRC.
LocalID string `json:"localId"`
// RoundTripTime is the estimated round trip time for this SSRC based on the
// RTCP timestamps in the RTCP Receiver Report (RR) and measured in seconds.
RoundTripTime float64 `json:"roundTripTime"`
// TotalRoundTripTime represents the cumulative sum of all round trip time measurements
// in seconds since the beginning of the session. The individual round trip time is calculated
// based on the RTCP timestamps in the RTCP Receiver Report (RR) [RFC3550], hence requires
// a DLSR value other than 0. The average round trip time can be computed from
// TotalRoundTripTime by dividing it by RoundTripTimeMeasurements.
TotalRoundTripTime float64 `json:"totalRoundTripTime"`
// FractionLost is the fraction packet loss reported for this SSRC.
FractionLost float64 `json:"fractionLost"`
// RoundTripTimeMeasurements represents the total number of RTCP RR blocks received for this SSRC
// that contain a valid round trip time. This counter will not increment if the RoundTripTime can
// not be calculated because no RTCP Receiver Report with a DLSR value other than 0 has been received.
RoundTripTimeMeasurements uint64 `json:"roundTripTimeMeasurements"`
}
func (s RemoteInboundRTPStreamStats) statsMarker() {}
func unmarshalRemoteInboundRTPStreamStats(b []byte) (RemoteInboundRTPStreamStats, error) {
var remoteInboundRTPStreamStats RemoteInboundRTPStreamStats
err := json.Unmarshal(b, &remoteInboundRTPStreamStats)
if err != nil {
return RemoteInboundRTPStreamStats{}, fmt.Errorf("unmarshal remote inbound rtp stream stats: %w", err)
}
return remoteInboundRTPStreamStats, nil
}