import { JingleSession } from "../../libs/strophe/strophe.jingle.session"; import { WebrtcConfigAndUtils } from "./webrtcConfigAndUtils"; /** * Interface containing all stats related to the audio stats for the sessions output-rtp (outflow) + input-rtp (inflow) */ export interface WebrtcStatisticsSsrc { type: string; direction: string; kind: string; ssrc: string; startedAt: string; duration?: number; measures?: number; lastMeasures?: number; codec?: string; originator?: string; rules?: any; audioLevel?: AudioLevelStats; mos?: MosLevelStats; codecId?: string; audioStats?: any; packetStats?: any; jitterMs?: any; mediaSourceId?: string; deltaPacketsLost?: number; totalPacketsLost?: number; deltaPackets?: number; } export interface AudioLevelStats { percentMuted: number; percentQuiet: number; percentBackground: number; percentActive: number; percentLoud: number; } export interface MosLevelStats { percentVeryHigh: number; percentHigh: number; percentMedium: number; percentLow: number; percentMediocre: number; mean: number; } export interface WebrtcStatisticsNetwork { /** * Type of interface usedCan be "wifi", "vpn", "ethernet", "unknown", "cellular" * From networkType in local-candidate report */ networkType?: string; /** * Route used by the peer-connectionFormat: : [type] (protocol) <-> : [type] (protocol) * Computed from statistics in local_candidate and remote_candidate depending on the localCandidateId and remoteCandidateId in the candidate-pair report in use (selectedCandidatePairId in tranport report or selected=true in candidate-pair report */ candidatePair?: string; } /** * Object containing all stats related to the output-rtp (outflow) */ export interface WebrtcStatisticsOutboundAudio { /** * The total of packets sent from the beginning of this RTP stream.From packetsSent in outbound-rtp report */ totalPacketsSent?: number; /** * The total of bytes sent from the beginning of this RTP stream.From bytesSent outbound-rtp report */ totalBytesSent?: number; /** * The delta of the audio KB sent between the current and the last iteration */ deltaAudioKBSent?: number; /** * The current encoder target in bits per second for this RTP stream.From targetBitrate in outbound-rtp report */ targetBitrate?: number; /** * Represents the audio level of the media source detected from the input device */ audioInputLevel?: number; /** * If exist, the value of the media identification-tag negotiated for this RTP streamFrom mid in inbound-rtp or outbound-rtp report */ mid?: number; /** * Name of the inbound codec used (eg: "opus") * Computed from mimeType (remove "audio/") in the codec report associated to this RTP Stream (codecId from the inbound-rtp report) */ codec?: string; /** * The total of audio energy received or sent from the beginning of this RTP stream. */ totalAudioEnergy?: string; } /** * Object containing all stats related to the inbound-rtp (inflow) */ export interface WebrtcStatisticsInboundAudio { /** * The total of packets received from the beginning of this RTP stream.From packetsReceived in inbound-rtp report */ totalPacketsReceived?: number; /** * The total of packets discarded from the beginning of this RTP stream.From packetsDiscarded in inbound-rtp report */ totalPacketsReceivedDiscarded?: number; /** * The total of packets lost from the beginning of this RTP stream.From packetsLost in inbound-rtp report */ totalPacketsLost?: number; /** * The total of bytes received from the beginning of this RTP stream.From bytesReceived inbound-rtp report */ totalBytesReceived?: number; /** * The number of packets received for this RTP stream from the last measurement. Computed from packetsReceived in inbound-rtp report */ deltaPacketsReceived?: number; /** * The number of KB received for this RTP stream since the last measurement. */ deltaKBReceived?: number; /** * Mean of the round trip time during the last interval.Computed from totalRoundTripTime and roundTripTimeMeasurements in remote-outbound-rtp report */ roundTripTimeMs?: number; /** * Mean of the jitter buffer delay during the last interval.Computed From jitterBufferDelay and jitterBufferEmittedCount in inbound-rtp report */ jitterBufferMs?: number; /** * Value of the jitter for the current measurement.From jitter in inbound-rtp report */ jitterMs?: number; /** * Mean for the last interval. Computed from the codec used, the absolute delay computed and the packets lost. */ mos?: number; /** * Name of the inbound codec used (eg: "opus") * Computed from mimeType (remove "audio/") in the codec report associated to this RTP Stream (codecId from the inbound-rtp report) */ codec?: string; /** * If exist, the value of the media identification-tag negotiated for this RTP streamFrom mid in inbound-rtp or outbound-rtp report */ mid?: number; /** * The MEAN MOS value for the current session */ meanMOS?: number; /** * Represents the audio level of the media source seen from the output device */ audioOutputLevel?: number; /** * Represents the absolute delay computed with: absoluteDelay:number = 20 + RTT/2 + JitterBufferDelay where * 20 represents the packetization time (fixed value) in milliseconds * RTT as computed in Compute the Round Trip Time in milliseconds * JitterBufferDelay in milliseconds */ absDelayMs?: number; /** * A value indicating the current quality of the incoming audio based on the MOS value; Values are 0, 1, 2; as 0 is bad, 3 is good */ networkQualityValue?: number; /** * A SSRC Identifiant */ ssrc?: string; /** * The total of audio energy received or sent from the beginning of this RTP stream. */ totalAudioEnergy?: string; /** * Mean of the difference between the processing time and the buffer time during the last interval */ processingMs?: number; /** * Mean of the playout delay during the last interval */ playoutDelayMs?: number; /** * Counter. Inbound The total of FEC packets used from the beginning of this RTP stream.Computed from (fecPacketsReceived - fecPacketsDiscarded) (inbound-rtp report) */ totalFecPacketsUsed?: number; /** * Counter. Inbound The total of packets pulled out of the Jitter Buffer from the beginning of this RTP stream.From jitterBufferEmittedCount (inbound-rtp report) */ totalJitterBufferEmittedCount?: number; /** * Counter. Inbound The total of concealed samples from the beginning of this RTP stream.From concealedSamples (inbound-rtp report) */ totalConcealedSamples?: number; /** * Counter. Inbound The total of samples received from the beginning of this RTP stream.From samplesReceived (inbound-rtp report) */ totalSamplesReceived?: number; /** * Gauge (0..1). Inbound The average loudness for the last intervalComputed from Math.sqrt(totalAudioEnergy / totalSamplesDuration) (inbound-rtp report). With totalSamplesDuration in seconds. */ rms?: number; /** * Counter. Inbound The total of packets emitted for playout from the beginning of this RTP stream.From totalSamplesCount (media-playout report) */ totalSamplesCount?: number; /** * Gauge (0..100%). Inbound The percentage of synthesized samples played for the last interval of this RTP streamComputed from synthesizedSamplesDuration and totalSamplesDuration (media-playout report) */ synthesizedSamplesPlayed?: number; } /** * Interface containing all stats related to the audio stats for the sessions output-rtp (outflow) + input-rtp (inflow) */ export interface WebrtcStatisticsAudio { /** * Object containing all stats related to the output-rtp (outflow) */ sent?: WebrtcStatisticsOutboundAudio; /** * Object containing all stats related to the inbound-rtp (inflow) */ received?: WebrtcStatisticsInboundAudio; } /** * Interface containing all stats related to the OUTBOUND video stats for the sessions * NOTE The same interface is also related to the screen sharing, as in terms of WEBRTC it's a VIDEO stream */ export interface WebrtcStatisticsOutboundVideo { /** * The total of packets sent from the beginning of this RTP stream. From packetsSent in outbound-rtp report */ totalPacketsSent?: number; /** * The total of bytes sent from the beginning of this RTP stream. From bytesSent outbound-rtp report */ totalBytesSent?: number; /** * The total time for encoding the media from the beginning of this RTP stream. From totalEncodeTime in outbound-rtp report */ totalEncodeTimeSeconds?: number; /** * The total number of video frames sent from the beginning of this RTP stream. From framesSent in outbound-rtp report */ totalFramesSent?: number; /** * The number of frames that were successfully encoded from the beginning of this RTP stream.From framesEncoded in outbound-rtp report */ totalFramesEncoded?: number; /** * The number of key frames that were successfully encoded from the beginning of this RTP stream.From keyFramesEncoded in outbound-rtp report */ totalKeyFramesEncoded?: number; /** * The number of Picture Loss Indication (PLI) requests sent (receiver) or received (sender) from the beginning of this RTP stream. * From pliCount in outbound-rtp report */ totalPliCount?: number; /** * The number of Negative Acknowledgment (NACK) requests sent from the beginning of this RTP stream. From nackCount in outbound-rtp report */ totalNackCount?: number; /** * The current encoder target in bits per second for this RTP stream.From targetBitrate in outbound-rtp report */ targetBitrate?: number; /** * The number of horizontal pixels in a video frame for this RTP stream.From frameWidth in inbound-rtp or outbound-rtp report */ frameWidth?: number; /** * The number of vertical pixels in a video frame for this RTP stream.From frameHeigh in inbound-rtp or outbound-rtp report */ frameHeight?: number; /** * The number of individual video frames sent per second at a given time. */ framesPerSecond?: number; /** * The current reason for limiting the resolution and/or framerate. Acceptable values: bandwidth, cpu, other, none. */ qualityLimitationReason?: string; /** * Object containing the details about all quality limitations durations during the session */ qualityLimitationDurations?: any; /** * If exist, the value of the media identification-tag negotiated for this RTP streamFrom mid in inbound-rtp or outbound-rtp report */ mid?: number; /** * The rate of retransmitted packets sent per second. * This metric is useful for evaluating the stability and quality of a WebRTC connection by indicating how many packets needed to be resent to maintain data integrity. */ retransmittedPacketsSent?: number; /** * The average Quantization Parameter (QP) used for encoding a video stream, normalized by the number of frames encoded. * The Quantization Parameter (QP) is a crucial factor in video compression and directly affects the quality and bitrate of the video. * The Quantization Parameter (QP) controls the level of compression during video encoding. Lower QP values (e.g., around 10-20) correspond to higher video quality because the encoding process uses more bits to represent each frame. * Higher QP values (e.g., 30-50) result in higher compression and lower video quality, as fewer bits are used per frame. */ qpValue?: number; /** * Name of the codec used (eg: "vp8")Computed from mimeType (remove "audio/") in the codec report associated to this RTP Stream (codecId from the inbound-rtp report) */ codec?: string; } /** * Interface containing all stats related to the INBOUND video stats for the sessions * NOTE The same interface is also related to the screen sharing, as in terms of WEBRTC it's a VIDEO stream */ export interface WebrtcStatisticsInboundVideo { /** * The total of packets received from the beginning of this RTP stream. From packetsReceived in inbound-rtp report */ totalPacketsReceived?: number; /** * The total of packets discarded from the beginning of this RTP stream.From packetsDiscarded in inbound-rtp report */ totalPacketsReceivedDiscarded?: number; /** * The total of packets lost from the beginning of this RTP stream.From packetsLost in inbound-rtp report */ totalPacketsLost?: number; /** * The total of bytes received from the beginning of this RTP stream.From bytesReceived inbound-rtp report */ totalBytesReceived?: number; /** * The number of KB received for this RTP stream since the last measurement. */ deltaKBReceived?: number; /** * The total time for decoding the media from the beginning of this RTP stream.From totalDecodeTime in inbound-rtp report */ totalDecodeTimeSeconds?: number; /** * The total number of video frames received from the beginning of this RTP stream.From framesReceived in inbound-rtp report */ totalFramesReceived?: number; /** * The number of frames that were successfully decoded from the beginning of this RTP stream.From framesDecoded in inbound-rtp report */ totalFramesDecoded?: number; /** * The number of key frames that were successfully decoded from the beginning of this RTP stream.From keyFramesDecoded in inbound-rtp report */ totalKeyFramesDecoded?: number; /** * The number of frames that were received but not displayed or processed (e.g., due to delays or quality degradation) from the beginning of this RTP stream. * From framesDropped in inbound-rtp report */ totalFramesDropped?: number; /** * The number of video freeze experienced by the receiver from the beginning of this RTP stream.From freezeCount in inbound-rtp report */ totalFreezeCount?: number; /** * The number of Picture Loss Indication (PLI) requests sent (receiver) or received (sender) from the beginning of this RTP stream. */ totalPliCount?: number; /** * The number of Negative Acknowledgment (NACK) requests sent from the beginning of this RTP stream.From nackCount in outbound-rtp report */ totalNackCount?: number; /** * The number of horizontal pixels in a video frame for this RTP stream.From frameWidth in inbound-rtp or outbound-rtp report */ frameWidth?: number; /** * The number of vertical pixels in a video frame for this RTP stream. From frameHeigh in inbound-rtp or outbound-rtp report */ frameHeight?: number; /** * The number of individual video frames received per second at a given time. */ framesPerSecond?: number; /** * If exist, the value of the media identification-tag negotiated for this RTP streamFrom mid in inbound-rtp or outbound-rtp report */ mid?: number; /** * The number of times the media playback has been paused. A pause can happen due to various reasons, including network conditions, user actions, or system-level issues such as performance bottlenecks. */ pauseCount?: number; /** * Name of the codec used (eg: "vp8")Computed from mimeType (remove "audio/") in the codec report associated to this RTP Stream (codecId from the inbound-rtp report) */ codec?: string; } /** * Interface containing all stats related to the video stats for the sessions inbound/outbound * NOTE The same interface is also related to the screen sharing, as in terms of WEBRTC it's a VIDEO stream */ export interface WebrtcStatisticsVideo { /** * Object containing all stats related to the outbound-rtp (outflow) * NB In case of SIMULCAST (only available for Bubble Conferences), there're 3 substreams send to the distant party ("l", "m", "h" for low, medium, high); */ sent?: WebrtcStatisticsOutboundVideo | { l: WebrtcStatisticsOutboundVideo; m: WebrtcStatisticsOutboundVideo; h: WebrtcStatisticsOutboundVideo; }; /** * Object containing all stats related to the inbound-rtp (inflow) */ received?: WebrtcStatisticsInboundVideo; } /** * The type of the session */ export declare enum WebrtcStatisticsType { CONFERENCE = "Conference", WEBINAR = "Webinar", P2P = "P2P" } /** * The current media type of the session based on the kind of Media Streams inside */ export declare enum WebrtcStatisticsMediaType { /** * Audio only */ AUDIO = "AUDIO", /** * BUNDLE (audio + video) */ BUNDLE = "BUNDLE", /** * Video only */ VIDEO = "VIDEO", /** * Screen Sharing only */ SHARING = "SHARING" } /** * The type of the session in terms of media direction: publisher only / receiver only / both */ export declare enum WebrtcStatisticsSessionType { /** * Subscriber and publisher (bidirectionnal) */ PUBSUB = "PUBSUB", /** * Subscriber (or receiver) of the media */ SUBSCRIBER = "SUBSCRIBER", /** * Publisher of the media */ PUBLISHER = "PUBLISHER" } export interface WebrtcStatisticsData { /** * The current iteration number of getting the statistics */ iteration?: number; /** * The interval duration getting the statistics */ intervalDuration?: number; /** * The type of the session related to the statistics; */ type?: WebrtcStatisticsType; /** * In case the session is part of a bubble conference */ conferenceId?: string; /** * In case the session is part of a webinar */ webinarId?: string; /** * The session media type */ mediaType?: WebrtcStatisticsMediaType; /** * The session type */ sessionType?: WebrtcStatisticsSessionType; /** * The audio data, if available */ audio?: WebrtcStatisticsAudio; /** * The video data, if available */ video?: WebrtcStatisticsVideo; /** * The sharing data, if available */ sharing?: WebrtcStatisticsVideo; /** * The network data, if available */ network?: WebrtcStatisticsNetwork; /** * In case of web conference, the remote publisher id */ publisherId?: string; /** * In case of web conference, the remote publisher name (anonymized) */ publisherName?: string; } export declare class WebrtcSessionStatsHandler { private webrtcConfigAndUtils; private contactService; private logger; private settingsService; private xmppService; constructor(webrtcConfigAndUtils: WebrtcConfigAndUtils); private metricsValues; setWebrtcMetricsValues(metrics: any): void; getStatsForSession(session: JingleSession, interval?: number, webinarId?: string): any; private getStatsFromData; /** * Helper functions to ease the calculation for ranges */ private prepareRangeForSsrc; private calculateRulesForSsrc; /** * High Jitter (inbound / outbound audio ssrc) * Rule Computation When the Jitter exceeds this threshold, it often leads to noticeable degraded call quality—resulting in choppy audio or speech that seems to speed up or slow down from the user’s perspective. For every Jitter value, count how many are: Excellent: JITTER < SETTINGS_JITTER_FAIR Fair: SETTINGS_JITTER_FAIR < JITTER <= SETTINGS_JITTER_BAD Bad: SETTINGS_JITTER_BAD < JITTER <= SETTINGS_JITTER_MEDIOCRE Mediocre: JITTER > SETTINGS_JITTER_MEDIOCRE */ private calculateHighJitterForSsrc; /** * * When the RTT exceeds this threshold, it often leads to noticeable latency and degraded call quality—resulting in choppy audio or speech that seems to speed up or slow down from the user’s perspective. * Ranges details The ranges property stores the list of all mediocre periods. A period corresponds to a list of successive values that have incremented the counter. Each period is a JSON structure containing the following properties: start: It corresponds to the timestamp of the first element from a period (timestamp of the report) durationPercent: It corresponds to the duration of the successive measures that have increased the counter - In percentage of the call durationInSeconds (alternative or additional property): It corresponds to the duration of the successive measures that have increased the counter – In seconds If no current period and a new value increments the counter --> create a new period (timestamp of new value, durationInSeconds= interval) If there is a current period and a new value increments the counter --> increment the duration (durationInSeconds + interval) If there is a current period and a new value doesn't increment the counter --> store the current period and reset it if there is no current period and a new value doesn't increment the counter --> do nothing */ private calculateRangeRttForSsrc; /** * Rule Computation When packet loss surpasses this threshold, it often causes noticeable audio degradation, leading to choppy or distorted sound as missing audio frames are concealed or replaced with synthetic sound. For every packetLoss value (mean during the interval, as computed for the MOS), count how many are: Excellent: packetLoss = 0 Fair: 0 < packetLoss <= SETTINGS_PACKETLOSS_BAD Bad: SETTINGS_PACKETLOSS_BAD < packetLoss <= SETTINGS_PACKETLOSS_MEDIOCRE Mediocre: RTT > SETTINGS_PACKETLOSS_MEDIOCRE Ranges details The ranges property stores the list of all mediocre periods. A period corresponds to a list of successive values that have incremented the counter. */ private calculateHighPacketLossForSsrc; /** * * * Audio packets are temporarily stored in a jitter buffer, where they are collected, reordered, and assembled into continuous audio samples. The jitter buffer compensates for variations in network latency—such as delay or packet arrival times—ensuring the audio stream remains smooth and uninterrupted for the listener. Sometimes packets seem to stay a long time in the buffer, whereas the network seems stable and without variation. This is called bufferbloat or high dejitter. Bufferbloat can explain choppy audio or interrupted audio playback. Ranges details The ranges property stores the list of all mediocre periods. A period corresponds to a list of successive values that have incremented the counter. Each period is a JSON structure containing the following properties: start: It corresponds to the timestamp of the first element from a period (timestamp of the report) durationPercent: It corresponds to the duration of the successive measures that have increased the counter - In percentage of the call durationInSeconds (alternative or additional property): It corresponds to the duration of the successive measures that have increased the counter – In seconds If no current period and a new value increments the counter --> create a new period (timestamp of new value, durationInSeconds= interval) If there is a current period and a new value increments the counter --> increment the duration (durationInSeconds + interval) If there is a current period and a new value doesn't increment the counter --> store the current period and reset it if there is no current period and a new value doesn't increment the counter --> do nothing */ private calculateRangeHighDejitterForSsrc; private calculateMosValuesForSsrc; private calculateLateAudioValuesForSSRC; private calculateHighSendAudioDelay; private calculateHighConcealmentValuesForSSRC; private calculateNoPackets; private calculateHighSpeakerDelayForSSRC; private calculateNoAudio; private calculateAudioLevelValuesForSSRC; } //# sourceMappingURL=webrtcSessionStatsHandler.d.ts.map