diff --git a/README.md b/README.md index 6ae9b61805..3f4dc86149 100644 --- a/README.md +++ b/README.md @@ -468,6 +468,32 @@ To disable threadpool, run `cmake .. -DENABLE_KVS_THREADPOOL=OFF` ### Thread stack sizes The default thread stack size for the KVS WebRTC SDK is 64 kb. Notable stack sizes that may need to be changed for your specific application will be the ConnectionListener Receiver thread and the media sender threads. Please modify the stack sizes for these media dependent threads to be suitable for the media your application is processing. +### Set up TWCC +TWCC is a mechanism in WebRTC designed to enhance the performance and reliability of real-time communication over the internet. TWCC addresses the challenges of network congestion by providing detailed feedback on the transport of packets across the network, enabling adaptive bitrate control and optimization of +media streams in real-time. This feedback mechanism is crucial for maintaining high-quality audio and video communication, as it allows senders to adjust their transmission strategies based on comprehensive information about packet losses, delays, and jitter experienced across the entire transport path. +The importance of TWCC in WebRTC lies in its ability to ensure efficient use of available network bandwidth while minimizing the negative impacts of network congestion. By monitoring the delivery of packets across the network, TWCC helps identify bottlenecks and adjust the media transmission rates accordingly. +This dynamic approach to congestion control is essential for preventing degradation in call quality, such as pixelation, stuttering, or drops in audio and video streams, especially in environments with fluctuating network conditions. To learn more about TWCC, you can refer to the [RFC draft](https://datatracker.ietf.org/doc/html/draft-holmer-rmcat-transport-wide-cc-extensions-01) + +In order to enable TWCC usage in the SDK, 2 things need to be set up: + +1. Set the `disableSenderSideBandwidthEstimation` to FALSE. In our samples, the value is set using `disableTwcc` flag in `pSampleConfiguration` + +```c +pSampleConfiguration->disableTwcc = TRUE; // to disable TWCC +pSampleConfiguration->disableTwcc = FALSE; // to enable TWCC +configuration.kvsRtcConfiguration.disableSenderSideBandwidthEstimation = pSampleConfiguration->disableTwcc; +``` + +2. Set the callback that will have the business logic to modify the bitrate based on packet loss information. The callback can be set using `peerConnectionOnSenderBandwidthEstimation()`. + +```c +CHK_STATUS(peerConnectionOnSenderBandwidthEstimation(pSampleStreamingSession->pPeerConnection, (UINT64) pSampleStreamingSession, + sampleSenderBandwidthEstimationHandler)); +``` + +By default, our SDK enables TWCC listener. The SDK has a sample implementation to integrate TWCC into the Gstreamer pipeline via the `sampleSenderBandwidthEstimationHandler` callback. To get more details, look for this specific callback. + + ### Setting ICE related timeouts There are some default timeout values set for different steps in ICE in the [KvsRtcConfiguration](https://awslabs.github.io/amazon-kinesis-video-streams-webrtc-sdk-c/structKvsRtcConfiguration.html). These are configurable in the application. While the defaults are generous, there could be applications that might need more flexibility to improve chances of connection establishment because of poor network. diff --git a/samples/Common.c b/samples/Common.c index 85753ae3f6..d74a5b90ba 100644 --- a/samples/Common.c +++ b/samples/Common.c @@ -604,8 +604,10 @@ STATUS createSampleStreamingSession(PSampleConfiguration pSampleConfiguration, P CHK_STATUS(transceiverOnBandwidthEstimation(pSampleStreamingSession->pAudioRtcRtpTransceiver, (UINT64) pSampleStreamingSession, sampleBandwidthEstimationHandler)); // twcc bandwidth estimation - CHK_STATUS(peerConnectionOnSenderBandwidthEstimation(pSampleStreamingSession->pPeerConnection, (UINT64) pSampleStreamingSession, - sampleSenderBandwidthEstimationHandler)); + if (!pSampleConfiguration->disableTwcc) { + CHK_STATUS(peerConnectionOnSenderBandwidthEstimation(pSampleStreamingSession->pPeerConnection, (UINT64) pSampleStreamingSession, + sampleSenderBandwidthEstimationHandler)); + } pSampleStreamingSession->startUpLatency = 0; CleanUp: @@ -712,6 +714,9 @@ VOID sampleBandwidthEstimationHandler(UINT64 customData, DOUBLE maximumBitrate) DLOGV("received bitrate suggestion: %f", maximumBitrate); } +// Sample callback for TWCC. Average packet is calculated with EMA. If average packet lost is <= 5%, +// the current bitrate is increased by 5%. If more than 5%, the current bitrate +// is reduced by percent lost. Bitrate update is allowed every second and is increased/decreased upto the limits VOID sampleSenderBandwidthEstimationHandler(UINT64 customData, UINT32 txBytes, UINT32 rxBytes, UINT32 txPacketsCnt, UINT32 rxPacketsCnt, UINT64 duration) { @@ -744,18 +749,18 @@ VOID sampleSenderBandwidthEstimationHandler(UINT64 customData, UINT32 txBytes, U if (pSampleStreamingSession->twccMetadata.averagePacketLoss <= 5) { // increase encoder bitrate by 5 percent with a cap at MAX_BITRATE - videoBitrate = (UINT64) MIN(videoBitrate * 1.05f, MAX_VIDEO_BITRATE_KBPS); + videoBitrate = (UINT64) MIN(videoBitrate * 1.05, MAX_VIDEO_BITRATE_KBPS); } else { // decrease encoder bitrate by average packet loss percent, with a cap at MIN_BITRATE - videoBitrate = (UINT64) MAX(videoBitrate * (1.0f - pSampleStreamingSession->twccMetadata.averagePacketLoss / 100.0f), MIN_VIDEO_BITRATE_KBPS); + videoBitrate = (UINT64) MAX(videoBitrate * (1.0 - pSampleStreamingSession->twccMetadata.averagePacketLoss / 100.0), MIN_VIDEO_BITRATE_KBPS); } if (pSampleStreamingSession->twccMetadata.averagePacketLoss <= 5) { // increase encoder bitrate by 5 percent with a cap at MAX_BITRATE - audioBitrate = (UINT64) MIN(audioBitrate * 1.05f, MAX_AUDIO_BITRATE_BPS); + audioBitrate = (UINT64) MIN(audioBitrate * 1.05, MAX_AUDIO_BITRATE_BPS); } else { // decrease encoder bitrate by average packet loss percent, with a cap at MIN_BITRATE - audioBitrate = (UINT64) MAX(audioBitrate * (1.0f - pSampleStreamingSession->twccMetadata.averagePacketLoss / 100.0f), MIN_AUDIO_BITRATE_BPS); + audioBitrate = (UINT64) MAX(audioBitrate * (1.0 - pSampleStreamingSession->twccMetadata.averagePacketLoss / 100.0), MIN_AUDIO_BITRATE_BPS); } // Update the session with the new bitrate and adjustment time @@ -765,9 +770,9 @@ VOID sampleSenderBandwidthEstimationHandler(UINT64 customData, UINT32 txBytes, U pSampleStreamingSession->twccMetadata.lastAdjustmentTimeMs = currentTimeMs; - DLOGV("Adjustment made: average packet loss = %.2f%%, timediff: %llu ms", pSampleStreamingSession->twccMetadata.averagePacketLoss, + DLOGI("Adjustment made: average packet loss = %.2f%%, timediff: %llu ms", pSampleStreamingSession->twccMetadata.averagePacketLoss, ADJUSTMENT_INTERVAL_SECONDS, timeDiff); - DLOGV("Suggested video bitrate %u kbps, suggested audio bitrate: %u bps, sent: %u bytes %u packets received: %u bytes %u packets in %lu msec", + DLOGI("Suggested video bitrate %u kbps, suggested audio bitrate: %u bps, sent: %u bytes %u packets received: %u bytes %u packets in %lu msec", videoBitrate, audioBitrate, txBytes, txPacketsCnt, rxBytes, rxPacketsCnt, duration / 10000ULL); }