diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ed6c1e5ddc..c817eb214d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -552,6 +552,7 @@ jobs: - name: Install dependencies shell: powershell run: | + choco install pkgconfiglite choco install gstreamer --version=1.16.3 choco install gstreamer-devel --version=1.16.3 curl.exe -o C:\tools\pthreads-w32-2-9-1-release.zip ftp://sourceware.org/pub/pthreads-win32/pthreads-w32-2-9-1-release.zip diff --git a/README.md b/README.md index 1d8f7388e8..380a17e560 100644 --- a/README.md +++ b/README.md @@ -321,9 +321,12 @@ To run: ./samples/kvsWebrtcClientViewerGstSample ``` +<<<<<<< HEAD Allowed audio-codec: opus (default codec if nothing is specified) Allowed video-codec: h264 (default codec if nothing is specified), h265 +======= +>>>>>>> ec203eb5f6 ([Bug] GStreamer sample (#1972)) ##### Known issues: Our GStreamer samples leverage [MatroskaMux](https://gstreamer.freedesktop.org/documentation/matroska/matroskamux.html?gi-language=c) to receive media from its peer and save it to a file. However, MatroskaMux is designed for scenarios where the media's format remains constant throughout streaming. When the media's format changes mid-streaming (referred to as "caps changes"), MatroskaMux encounters limitations, its behavior cannot be predicted and it may be unable to handle these changes, resulting in an error message like: @@ -344,6 +347,21 @@ gst-launch-1.0 videotestsrc pattern=ball num-buffers=1500 ! timeoverlay ! videoc gst-launch-1.0 videotestsrc pattern=ball num-buffers=1500 ! timeoverlay ! videoconvert ! video/x-raw,format=I420,width=1280,height=720,framerate=25/1 ! queue ! x265enc speed-preset=veryfast bitrate=512 tune=zerolatency ! video/x-h265,stream-format=byte-stream,alignment=au,profile=main ! multifilesink location="frame-%04d.h265" index=1 ``` +<<<<<<< HEAD +======= +##### AAC + +###### ADTS LC +```shell +gst-launch-1.0 audiotestsrc num-buffers=1500 ! audioconvert ! audioresample ! faac ! capsfilter caps=audio/mpeg,mpegversion=4,stream-format=adts,base-profile=lc,channels=2,rate=16000 ! multifilesink location="sample-%03d.aac" index=1 +``` + +###### RAW LC +```shell +gst-launch-1.0 audiotestsrc num-buffers=1500 ! audioconvert ! audioresample ! faac ! capsfilter caps=audio/mpeg,mpegversion=4,stream-format=raw,base-profile=lc,channels=2,rate=16000 ! multifilesink location="sample-%03d.aac" index=1 +``` + +>>>>>>> ec203eb5f6 ([Bug] GStreamer sample (#1972)) ### Viewing Master Samples After running one of the master samples, when the command line application prints "Signaling client connection to socket established", indicating that your signaling channel is created and the connected master is streaming media to it, you can view the stream. To do so, check the media playback viewer on the KVS Signaling Channels console or open the [WebRTC SDK Test Page](https://awslabs.github.io/amazon-kinesis-video-streams-webrtc-sdk-js/examples/index.html). diff --git a/samples/Common.c b/samples/Common.c index d2a581a410..fda0983ca8 100644 --- a/samples/Common.c +++ b/samples/Common.c @@ -897,6 +897,7 @@ STATUS createSampleConfiguration(PCHAR channelName, SIGNALING_CHANNEL_ROLE_TYPE pSampleConfiguration->trickleIce = trickleIce; pSampleConfiguration->useTurn = useTurn; pSampleConfiguration->enableSendingMetricsToViewerViaDc = FALSE; + pSampleConfiguration->receiveAudioVideoSource = NULL; pSampleConfiguration->channelInfo.version = CHANNEL_INFO_CURRENT_VERSION; pSampleConfiguration->channelInfo.pChannelName = channelName; diff --git a/samples/GstAudioVideoReceiver.c b/samples/GstAudioVideoReceiver.c index 626a963eb3..2bfca35027 100644 --- a/samples/GstAudioVideoReceiver.c +++ b/samples/GstAudioVideoReceiver.c @@ -5,7 +5,6 @@ static UINT64 presentationTsIncrement = 0; static BOOL eos = FALSE; -static RTC_CODEC audioCodec = RTC_CODEC_OPUS; // This function is a callback for the transceiver for every single video frame it receives // It writes these frames to a buffer and pushes it to the `appsrcVideo` element of the @@ -68,13 +67,7 @@ VOID onGstAudioFrameReady(UINT64 customData, PFrame pFrame) GST_BUFFER_DTS(buffer) = presentationTsIncrement; GST_BUFFER_PTS(buffer) = presentationTsIncrement; - - if (audioCodec == RTC_CODEC_AAC) { - GST_BUFFER_DURATION(buffer) = gst_util_uint64_scale(pFrame->size, GST_SECOND, DEFAULT_AUDIO_AAC_BYTE_RATE); - } else { - GST_BUFFER_DURATION(buffer) = gst_util_uint64_scale(pFrame->size, GST_SECOND, DEFAULT_AUDIO_OPUS_BYTE_RATE); - } - // TODO: check for other codecs once the pipelines are added + GST_BUFFER_DURATION(buffer) = gst_util_uint64_scale(pFrame->size, GST_SECOND, DEFAULT_AUDIO_OPUS_BYTE_RATE); if (gst_buffer_fill(buffer, 0, pFrame->frameData, pFrame->size) != pFrame->size) { DLOGE("Buffer fill did not complete correctly"); diff --git a/samples/kvsWebrtcClientMasterGstSample.c b/samples/kvsWebrtcClientMasterGstSample.c index e63a6e563d..2febda0c3d 100644 --- a/samples/kvsWebrtcClientMasterGstSample.c +++ b/samples/kvsWebrtcClientMasterGstSample.c @@ -509,8 +509,7 @@ INT32 main(INT32 argc, CHAR* argv[]) pSampleConfiguration->videoSource = sendGstreamerAudioVideo; pSampleConfiguration->mediaType = SAMPLE_STREAMING_VIDEO_ONLY; - pSampleConfiguration->audioCodec = audioCodec; - pSampleConfiguration->videoCodec = videoCodec; + #ifdef ENABLE_DATA_CHANNEL pSampleConfiguration->onDataChannel = onDataChannel; #endif