diff --git a/lib/ActiveSpeakerDetector.js b/lib/ActiveSpeakerDetector.js index e2d8d88d..baaba419 100644 --- a/lib/ActiveSpeakerDetector.js +++ b/lib/ActiveSpeakerDetector.js @@ -109,7 +109,7 @@ class ActiveSpeakerDetector extends Emitter this.ids.set(track,id); this.tracks.set(id,track); //Start listening to it - this.detector.AddIncomingSourceGroup(source[SharedPointer.Pointer], id); + this.detector.AddIncomingSourceGroup(SharedPointer.getPointer(source), id); //Singal track is attached track.attached(); @@ -142,7 +142,7 @@ class ActiveSpeakerDetector extends Emitter throw new Error("Could not find sourc for track"); //Stop listening to it - this.detector.RemoveIncomingSourceGroup(source[SharedPointer.Pointer]); + this.detector.RemoveIncomingSourceGroup(SharedPointer.getPointer(source)); //Singal track is detached track.detached(); diff --git a/lib/ActiveSpeakerMultiplexer.js b/lib/ActiveSpeakerMultiplexer.js index 9d87bd6c..f477d5fd 100644 --- a/lib/ActiveSpeakerMultiplexer.js +++ b/lib/ActiveSpeakerMultiplexer.js @@ -50,7 +50,7 @@ class ActiveSpeakerMultiplexer extends Emitter ) => { //Get speaker track const incomingStreamTrack = this.speakers.get(speakerId); - const outgoingStreamTrack = this.multiplex.get(multiplexeId).outgoingTrack; + const outgoingStreamTrack = this.multiplex.get(multiplexeId)?.outgoingTrack; //Prevent race conditions if (incomingStreamTrack && outgoingStreamTrack) //Emit event @@ -58,7 +58,7 @@ class ActiveSpeakerMultiplexer extends Emitter }; this.onactivespeakerremoved = (/** @type {number} */ multiplexeId) => { //Get multiplexed track - const outgoingStreamTrack = this.multiplex.get(multiplexeId).outgoingTrack; + const outgoingStreamTrack = this.multiplex.get(multiplexeId)?.outgoingTrack; //Prevent race condition if (outgoingStreamTrack) //Emit event @@ -123,15 +123,6 @@ class ActiveSpeakerMultiplexer extends Emitter } } - /** - * Set minimum period between active speaker changes - * @param {Number} minChangePeriod - */ - setMinChangePeriod(minChangePeriod) - { - this.multiplexer.SetMinChangePeriod(minChangePeriod); - } - /** * Maximux activity score accumulated by an speaker * @param {Number} maxAcummulatedScore diff --git a/lib/EmulatedTransport.js b/lib/EmulatedTransport.js index c2ff3c78..d55708f5 100644 --- a/lib/EmulatedTransport.js +++ b/lib/EmulatedTransport.js @@ -92,7 +92,7 @@ class EmulatedTransport extends Emitter /** * Starts playback * @param {Object} params - * @param {Object} params.start - Set start time + * @param {number} params.start - Set start time */ play(params) { diff --git a/lib/Endpoint.js b/lib/Endpoint.js index d8d6e11d..e1420d42 100644 --- a/lib/Endpoint.js +++ b/lib/Endpoint.js @@ -149,7 +149,7 @@ class Endpoint extends Emitter this.mirrored = { streams : /** @type {WeakMap} */ (new WeakMap()), tracks : /** @type {WeakMap} */ (new WeakMap()), - mirrors : /** @type {Set} */ (new Set()), + mirrors : /** @type {Set} */ (new Set()), }; } @@ -355,7 +355,7 @@ class Endpoint extends Emitter mirrorIncomingStream(incomingStream) { //Get mirrored track - let mirroredStream = this.mirrored.streams.get(incomingStream); + let mirroredStream = /** @type {IncomingStream} */ (this.mirrored.streams.get(incomingStream)); //If not mirrored yet if (!mirroredStream) @@ -373,7 +373,7 @@ class Endpoint extends Emitter //Create mirror track const mirroredStreamTrack = this.mirrorIncomingStreamTrack(incomingStreamTrack); //Add to mirrored stream - mirroredStream.addTrack(mirroredStreamTrack); + mirroredStream.addTrack(/** @type {any} */ (mirroredStreamTrack)); } //Listen for new tacks @@ -381,7 +381,7 @@ class Endpoint extends Emitter //Create mirror track const mirroredStreamTrack = this.mirrorIncomingStreamTrack(incomingStreamTrack); //Add to mirrored stream - mirroredStream.addTrack(mirroredStreamTrack); + mirroredStream.addTrack(/** @type {any} */ (mirroredStreamTrack)); }); // Listen for track removal @@ -392,7 +392,7 @@ class Endpoint extends Emitter // Listen for track removal mirroredStream.on("trackremoved", (incomingStream, incomingStreamTrack) => { this.mirrored.tracks.delete(incomingStreamTrack); - this.mirrored.mirrors.delete(incomingStreamTrack); + this.mirrored.mirrors.delete(/** @type {any} */ (incomingStreamTrack)); }); @@ -427,7 +427,7 @@ class Endpoint extends Emitter mirrorIncomingStreamTrack(incomingStreamTrack) { //Get mirrored track - let mirroredStreamTrack = this.mirrored.tracks.get(incomingStreamTrack); + let mirroredStreamTrack = /** @type {IncomingStreamTrackMirrored} */ (this.mirrored.tracks.get(incomingStreamTrack)); //If not mirrored yet if (!mirroredStreamTrack) diff --git a/lib/IncomingStream.js b/lib/IncomingStream.js index 2586dae4..a478271e 100644 --- a/lib/IncomingStream.js +++ b/lib/IncomingStream.js @@ -37,8 +37,8 @@ class IncomingStream extends Emitter * private constructor */ constructor( - /** @type {Native.DTLSICETransport} */ transport, - /** @type {SharedPointer.Proxy} */ receiver, + /** @type {Native.DTLSICETransport | null} */ transport, + /** @type {SharedPointer.Proxy | null} */ receiver, /** @type {SemanticSDP.StreamInfoLike} */ info) { //Init emitter @@ -324,10 +324,14 @@ class IncomingStream extends Emitter */ createTrack(trackInfo) { - //Check it is associated to a track - if (!this.transport) + const { transport, receiver } = this; + //Check it is associated to a transport + if (!transport) //Launch exception throw new Error("Transport is not set"); + //Check it is associated + if (!receiver) + throw new Error("Receiver is not set"); //Check we are not duplicating tracks if (this.tracks.has(trackInfo.getId())) //Launch exception @@ -354,7 +358,7 @@ class IncomingStream extends Emitter for (const encoding of alternative) { //Create single incoming source - const source = SharedPointer(new Native.RTPIncomingSourceGroupShared(type,this.transport.GetTimeService())); + const source = SharedPointer(new Native.RTPIncomingSourceGroupShared(type,transport.GetTimeService())); //Set mid const mid = trackInfo.getMediaId(); @@ -371,10 +375,11 @@ class IncomingStream extends Emitter const params = encoding.getParams(); //If it has ssrc - if (params && params.has("ssrc")) + const ssrc = params?.get("ssrc"); + if (ssrc !== undefined) { //Get it - source.media.ssrc = parseInt(params.get("ssrc")); + source.media.ssrc = parseInt(ssrc); //Check ssrc groups const groups = trackInfo.getSourceGroups(); @@ -397,7 +402,7 @@ class IncomingStream extends Emitter } } //Add it to transport - if (!this.transport.AddIncomingSourceGroup(source)) + if (!transport.AddIncomingSourceGroup(source)) //Launch exception throw new Error("Could not add incoming source group to native transport"); //Append to sources @@ -418,7 +423,7 @@ class IncomingStream extends Emitter for (let i=0; i{ //For each source for (let id of Object.keys(sources)) //Remove source group - this.transport.RemoveIncomingSourceGroup(sources[id]); + transport.RemoveIncomingSourceGroup(sources[id]); }); //Add track @@ -552,7 +557,6 @@ class IncomingStream extends Emitter super.stop(); //Remove transport reference, so destructor is called on GC - //@ts-expect-error this.transport = null; } } diff --git a/lib/IncomingStreamTrack.js b/lib/IncomingStreamTrack.js index e2df01bd..65f24da4 100644 --- a/lib/IncomingStreamTrack.js +++ b/lib/IncomingStreamTrack.js @@ -4,13 +4,6 @@ const Emitter = require("medooze-event-emitter"); const LayerInfo = require("./LayerInfo"); const SemanticSDP = require("semantic-sdp"); const { - SDPInfo, - Setup, - MediaInfo, - CandidateInfo, - DTLSInfo, - ICEInfo, - StreamInfo, TrackInfo, TrackEncodingInfo, SourceGroupInfo, @@ -22,8 +15,8 @@ const { * @property {number} simulcastIdx * @property {number} spatialLayerId Spatial layer id * @property {number} temporalLayerId Temporatl layer id - * @property {number} [totalBytes] total rtp received bytes for this layer - * @property {number} [numPackets] number of rtp packets received for this layer + * @property {number} totalBytes total rtp received bytes for this layer + * @property {number} numPackets number of rtp packets received for this layer * @property {number} bitrate average media bitrate received during last second for this layer * @property {number} totalBitrate average total bitrate received during last second for this layer * @property {number} [width] video width @@ -36,8 +29,8 @@ const { /** * @typedef {Object} MediaStats stats for each media stream - * @property {number} [lostPackets] total lost packkets - * @property {number} [lostPacketsDelta] total lost/out of order packets during last second + * @property {number} lostPackets total lost packets + * @property {number} lostPacketsDelta total lost/out of order packets during last second * @property {number} [lostPacketsMaxGap] max total consecutive packets lost during last second * @property {number} [lostPacketsGapCount] number of packet loss bursts during last second * @property {number} [dropPackets] droppted packets by media server @@ -82,14 +75,11 @@ const { * @property {number} timestamp When this stats was generated (in order to save workload, stats are cached for 200ms) * @property {PacketWaitTime} waitTime * @property {MediaStats} media Stats for the media stream - * @property {MediaStats} [rtx] Stats for the rtx retransmission stream + * @property {MediaStats} rtx Stats for the rtx retransmission stream * - * @property {number} [rtt] Round Trip Time in ms + * @property {number} rtt Round Trip Time in ms * @property {number} bitrate Bitrate for media stream only in bps - * @property {number} total Accumulated bitrate for media and rtx streams in bps (Deprecated) - * @property {number} totalBitrate average total bitrate received during last second for this layer - * @property {number} totalBytes total rtp received bytes for this layer - * @property {number} [remb] Estimated available bitrate for receiving (only available if not using transport wide cc) + * @property {number} remb Estimated available bitrate for receiving (only available if not using transport wide cc) * @property {number} simulcastIdx Simulcast layer index based on bitrate received (-1 if it is inactive). * @property {number} [lostPacketsRatio] Lost packets ratio * @property {number} [width] video width @@ -98,7 +88,7 @@ const { * @property {number} [targetWidth] signaled target width on the VideoLayersAllocation header * @property {number} [targetHeight] signaled target height on the VideoLayersAllocation header * @property {number} [targetFps] signaled target fps on the VideoLayersAllocation header - * @property {string} [codec] Name of the codec last in use + * @property {string} codec Name of the codec last in use * * Info accumulated for `media` and `rtx` streams: * @@ -106,8 +96,12 @@ const { * @property {number} numFramesDelta * @property {number} numPackets * @property {number} numPacketsDelta - * @property {number} [lostPackets] - * @property {number} [lostPacketsDelta] + * @property {number} lostPackets + * @property {number} lostPacketsDelta + * + * @property {number} total Accumulated bitrate for media and rtx streams in bps (Deprecated) + * @property {number} totalBitrate average total bitrate received during last second for this layer + * @property {number} totalBytes total rtp received bytes for this layer */ /** @typedef {{ [encodingId: string]: EncodingStats }} TrackStats providing the info for each source */ @@ -126,6 +120,8 @@ const { * @property {number} simulcastIdx * @property {number} bitrate * @property {number} totalBitrate average bitrate (media + overhead) received during last second in bps + * @property {number} totalBytes total rtp received bytes for this layer + * @property {number} numPackets number of rtp packets received for this layer * @property {LayerStats[]} layers * @property {number} [width] * @property {number} [height] @@ -184,13 +180,13 @@ function getEncodingStats(/** @type {Encoding} */ encoding) encodingStats.height = mediaStats.height; } //Add optional attributes - if (mediaStats.targetBitrate>0) + if (mediaStats.targetBitrate) encodingStats.targetBitrate = mediaStats.targetBitrate; - if (mediaStats.targetWidth>0) + if (mediaStats.targetWidth) encodingStats.targetWidth = mediaStats.targetWidth; - if (mediaStats.targetHeight>0) + if (mediaStats.targetHeight) encodingStats.targetHeight = mediaStats.targetHeight; - if (mediaStats.targetFps>0) + if (mediaStats.targetFps) encodingStats.targetFps = mediaStats.targetFps; //Done @@ -279,10 +275,10 @@ function getStatsFromIncomingSource(/** @type {Native.RTPIncomingSource} */ sour if (layer.targetFps>0) curated.targetFps = layer.targetFps; //TODO: add width/height to svc layers in c++ - if (layer.width>0) - curated.width = layer.width; - if (layer.height>0) - curated.height = layer.height; + //@ts-expect-error (remove me once added in c++) + if (layer.width>0) curated.width = layer.width; + //@ts-expect-error (remove me once added in c++) + if (layer.height>0) curated.height = layer.height; //Push layyer stats individual.push(curated); @@ -409,19 +405,21 @@ function getActiveLayersFromStats(/** @type {TrackStats} */ stats) const encoding = { id : id, simulcastIdx : stats[id].simulcastIdx, + totalBytes : stats[id].totalBytes, + numPackets : stats[id].numPackets, bitrate : stats[id].bitrate, totalBitrate : stats[id].totalBitrate, layers : [] }; //Add optional attributes - if (stats[id].media.targetBitrate>0) + if (stats[id].media.targetBitrate) encoding.targetBitrate = stats[id].media.targetBitrate; - if (stats[id].media.targetWidth>0) + if (stats[id].media.targetWidth) encoding.targetWidth = stats[id].media.targetWidth; - if (stats[id].media.targetHeight>0) + if (stats[id].media.targetHeight) encoding.targetHeight = stats[id].media.targetHeight; - if (stats[id].media.targetFps>0) + if (stats[id].media.targetFps) encoding.targetFps = stats[id].media.targetFps; //Check if we have width and height @@ -439,11 +437,13 @@ function getActiveLayersFromStats(/** @type {TrackStats} */ stats) for (const layer of layers) { - //Append to encoding - encoding.layers.push({ + /** @type {LayerStats} */ + const layerStats = { simulcastIdx : layer.simulcastIdx, spatialLayerId : layer.spatialLayerId, temporalLayerId : layer.temporalLayerId, + totalBytes : layer.totalBytes, + numPackets : layer.numPackets, bitrate : layer.bitrate, totalBitrate : layer.totalBitrate, targetBitrate : layer.targetBitrate, @@ -452,23 +452,12 @@ function getActiveLayersFromStats(/** @type {TrackStats} */ stats) targetFps : layer.targetFps, width : layer.width, height : layer.height, - }); - + }; + + //Append to encoding + encoding.layers.push(layerStats); //Append to all layer list - all.push({ - encodingId : id, - simulcastIdx : layer.simulcastIdx, - spatialLayerId : layer.spatialLayerId, - temporalLayerId : layer.temporalLayerId, - bitrate : layer.bitrate, - totalBitrate : layer.totalBitrate, - targetBitrate : layer.targetBitrate, - targetWidth : layer.targetWidth, - targetHeight : layer.targetHeight, - targetFps : layer.targetFps, - width : layer.width, - height : layer.height, - }); + all.push({ encodingId: id, ...layerStats }); } //Check if the encoding had svc layers @@ -482,6 +471,8 @@ function getActiveLayersFromStats(/** @type {TrackStats} */ stats) simulcastIdx : encoding.simulcastIdx, spatialLayerId : LayerInfo.MaxLayerId, temporalLayerId : LayerInfo.MaxLayerId, + totalBytes : encoding.totalBytes, + numPackets : encoding.numPackets, bitrate : encoding.bitrate, totalBitrate : encoding.totalBitrate, targetBitrate : encoding.targetBitrate, @@ -490,7 +481,6 @@ function getActiveLayersFromStats(/** @type {TrackStats} */ stats) targetFps : encoding.targetFps, width : encoding.width, height : encoding.height, - }); //Add to encoding list @@ -531,7 +521,7 @@ class IncomingStreamTrack extends Emitter * private constructor */ constructor( - /** @type {"audio" | "video"} */ media, + /** @type {SemanticSDP.TrackType} */ media, /** @type {string} */ id, /** @type {string} */ mediaId, /** @type {Native.TimeService} */ timeService, @@ -585,7 +575,8 @@ class IncomingStreamTrack extends Emitter //If multiple encodings if (this.depacketizer) //Add the source depacketizer producer - this.depacketizer.AttachTo(encoding.depacketizer.toMediaFrameProducer()); + (/** @type {SharedPointer.Proxy} */ (this.depacketizer)) + .AttachTo(encoding.depacketizer.toMediaFrameProducer()); //Add ssrcs to track info source.media && source.media.ssrc && this.trackInfo.addSSRC(source.media.ssrc); @@ -614,6 +605,7 @@ class IncomingStreamTrack extends Emitter //If there is no depacketizer if (!this.depacketizer) //This is the single depaquetizer, so reause it + /** @type {SharedPointer.Proxy} */ this.depacketizer = this.getDefaultEncoding().depacketizer; } @@ -730,7 +722,7 @@ class IncomingStreamTrack extends Emitter } /** * Return ssrcs associated to this track - * @returns {{ [encodingId: string]: import("./Transport").SSRCs }} + * @returns {{ [encodingId: string]: { media: number, rtx: number } }} */ getSSRCs() { @@ -749,7 +741,7 @@ class IncomingStreamTrack extends Emitter /** * Get track media type - * @returns {"audio"|"video"} + * @returns {SemanticSDP.TrackType} */ getMedia() { @@ -946,7 +938,8 @@ class IncomingStreamTrack extends Emitter //If multiple encodings if (this.depacketizer != encoding.depacketizer) //Remove frame listener - encoding.depacketizer.RemoveMediaListener(this.depacketizer.toMediaFrameListener()); + (/** @type {SharedPointer.Proxy} */ (this.depacketizer)) + .Detach(encoding.depacketizer.toMediaFrameProducer()); //Stop the depacketizer encoding.depacketizer.Stop(); //Stop source diff --git a/lib/IncomingStreamTrackMirrored.js b/lib/IncomingStreamTrackMirrored.js index 77982b41..09ce83cc 100644 --- a/lib/IncomingStreamTrackMirrored.js +++ b/lib/IncomingStreamTrackMirrored.js @@ -192,7 +192,7 @@ class IncomingStreamTrackMirrored extends Emitter /** * Get track media type - * @returns {"audio"|"video"} + * @returns {SemanticSDP.TrackType} */ getMedia() { diff --git a/lib/IncomingStreamTrackSimulcastAdapter.js b/lib/IncomingStreamTrackSimulcastAdapter.js index 24656d2c..e3a40977 100644 --- a/lib/IncomingStreamTrackSimulcastAdapter.js +++ b/lib/IncomingStreamTrackSimulcastAdapter.js @@ -205,7 +205,7 @@ class IncomingStreamTrackSimulcastAdapter extends Emitter for (const [id,stat] of Object.entries(trackStats)) { //Get the mirrored encoding for the id - const encoding = encodings.get(id); + const encoding = /** @type {Encoding} */ (encodings.get(id)); //Add stat with mirrored id stats[encoding.id] = stat; } @@ -235,7 +235,7 @@ class IncomingStreamTrackSimulcastAdapter extends Emitter for (const [id,stat] of Object.entries(trackStats)) { //Get the mirrored encoding for the id - const encoding = encodings.get(id); + const encoding = /** @type {Encoding} */ (encodings.get(id)); //Add stat with mirrored id stats[encoding.id] = stat; } @@ -297,11 +297,11 @@ class IncomingStreamTrackSimulcastAdapter extends Emitter /** * Return ssrcs associated to this track - * @returns {Object} + * @returns {{ [encodingId: string]: import("./Transport").SSRCs }} */ getSSRCs() { - const ssrcs = /** @type {{ [encodingId: string]: { media: number, rtx: number } }} */ ({}); + const ssrcs = /** @type {{ [encodingId: string]: import("./Transport").SSRCs }} */ ({}); //For each track for (const [track,encodings] of this.encodingPerTrack) @@ -310,12 +310,12 @@ class IncomingStreamTrackSimulcastAdapter extends Emitter const trackSSRCs = track.getSSRCs(); //for all layers - for (const [id,ssrcs] of Object.entries(trackSSRCs)) + for (const [id,encodingSSRCs] of Object.entries(trackSSRCs)) { //Get the mirrored encoding for the id - const encoding = encodings.get(id); + const encoding = /** @type {Encoding} */ (encodings.get(id)); //Add stat with mirrored id - ssrcs[encoding.id] = ssrcs; + ssrcs[encoding.id] = encodingSSRCs; } } diff --git a/lib/MediaServer.js b/lib/MediaServer.js index cec81604..f224e1a9 100644 --- a/lib/MediaServer.js +++ b/lib/MediaServer.js @@ -35,7 +35,7 @@ const defaultSeed = new LFSR(8, 92914); const endpoints = new Set(); //Replace default seeed -LFSR.prototype._defaultSeed = function(n) { +LFSR.prototype._defaultSeed = function(/** @type {number} */ n) { if (!n) throw new Error('n is required'); return defaultSeed.seq(n); }; @@ -178,7 +178,7 @@ MediaServer.enableUltraDebug = function(flag) MediaServer.createEndpoint = function(ip, params) { //Cretate new rtp endpoint - const endpoint = new Endpoint(ip, Number.isInteger(params?.packetPoolSize) ? params.packetPoolSize : 0); + const endpoint = new Endpoint(ip, Number.isInteger(params?.packetPoolSize) ? params?.packetPoolSize : 0); //Add to endpoint set endpoints.add(endpoint); @@ -340,7 +340,7 @@ MediaServer.createIncomingStreamSimulcastAdapter = function( //Create track const incomingStreamTrack = MediaServer.createIncomingStreamTrackSimulcastAdapter(trackId, mediaId); //Add track to stream - incomingStream.addTrack(incomingStreamTrack); + incomingStream.addTrack(/** @type {any} */ (incomingStreamTrack)); //Done return incomingStream; } diff --git a/lib/OutgoingStream.js b/lib/OutgoingStream.js index 26343bdb..9bf7b399 100644 --- a/lib/OutgoingStream.js +++ b/lib/OutgoingStream.js @@ -17,7 +17,7 @@ const { TrackInfo, } = require("semantic-sdp"); -/** @typedef {import("./Transport").CreateStreamTrackOptions & { media: "audio" | "video" }} CreateTrackOptions */ +/** @typedef {import("./Transport").CreateStreamTrackOptions & { media: SemanticSDP.TrackType }} CreateTrackOptions */ /** * @typedef {Object} OutgoingStreamEvents @@ -215,7 +215,7 @@ class OutgoingStream extends Emitter /** * Get all the tracks - * @param {"audio" | "video"} [type] - The media type (Optional) + * @param {SemanticSDP.TrackType} [type] - The media type (Optional) * @returns {Array} - Array of tracks */ getTracks(type) @@ -301,7 +301,7 @@ class OutgoingStream extends Emitter /** * Create new track from a TrackInfo object and add it to this stream - * @param {CreateTrackOptions | TrackInfo | SemanticSDP.MediaType} params Params plain object, StreamInfo object or media type + * @param {CreateTrackOptions | TrackInfo | SemanticSDP.TrackType} params Params plain object, StreamInfo object or media type * @returns The new outgoing stream */ createTrack(params) @@ -311,7 +311,7 @@ class OutgoingStream extends Emitter //Get media type //@ts-expect-error - const media = /** @type {SemanticSDP.MediaType} */ (params.constructor.name === "TrackInfo" ? params.getMedia() : params && params.media ? params.media : params); + const media = /** @type {SemanticSDP.TrackType} */ (params.constructor.name === "TrackInfo" ? params.getMedia() : params && params.media ? params.media : params); //Is it audio or video const type = /** @type {Native.MediaFrameType} */ (media==="audio" ? 0 : 1); diff --git a/lib/OutgoingStreamTrack.js b/lib/OutgoingStreamTrack.js index 04f6aad2..4a6521b6 100644 --- a/lib/OutgoingStreamTrack.js +++ b/lib/OutgoingStreamTrack.js @@ -128,7 +128,7 @@ class OutgoingStreamTrack extends Emitter * @hideconstructor */ constructor( - /** @type {"audio" | "video"} */ media, + /** @type {SemanticSDP.TrackType} */ media, /** @type {string} */ id, /** @type {string} */ mediaId, /** @type {SharedPointer.Proxy} */ sender, diff --git a/lib/Recorder.js b/lib/Recorder.js index 9f33f2a8..5d4b8afb 100644 --- a/lib/Recorder.js +++ b/lib/Recorder.js @@ -98,7 +98,7 @@ class Recorder extends Emitter /** * Get recording filename - * @returns {Date} + * @returns {Date | undefined} */ getStartTime() { diff --git a/lib/SDPManagerUnified.js b/lib/SDPManagerUnified.js index 94847cdd..f389ae43 100644 --- a/lib/SDPManagerUnified.js +++ b/lib/SDPManagerUnified.js @@ -21,7 +21,7 @@ const { /** * @typedef {Object} Transceiver * @property {String} mid - * @property {SemanticSDP.MediaType} media + * @property {SemanticSDP.TrackType} media * @property {TransceiverLocal} local * @property {TransceiverRemote} remote */ diff --git a/lib/SharedPointer.js b/lib/SharedPointer.js index 09420544..6c07a4c4 100644 --- a/lib/SharedPointer.js +++ b/lib/SharedPointer.js @@ -78,6 +78,17 @@ function SharedPointer( SharedPointer.Target = Symbol("target"); SharedPointer.Pointer = Symbol("pointer"); +/** + * @template T + * @param {Proxy} ptr + * @returns {T} + */ +SharedPointer.getPointer = function (ptr) +{ + return ptr[SharedPointer.Pointer]; +} + + SharedPointer.wrapNativeModule = function (module) { diff --git a/lib/StreamerSession.js b/lib/StreamerSession.js index 60a837c1..0d05bdba 100644 --- a/lib/StreamerSession.js +++ b/lib/StreamerSession.js @@ -19,7 +19,7 @@ class StreamerSession extends Emitter * @hideconstructor * private constructor */ - constructor(media,params) + constructor(/** @type {SemanticSDP.MediaInfo} */ media, params) { //Init emitter super(); @@ -84,9 +84,11 @@ class StreamerSession extends Emitter //Stop listeners this.incoming.once("stopped",()=>{ + //@ts-expect-error this.incoming = null; }); this.outgoing.once("stopped",()=>{ + //@ts-expect-error this.outgoing = null; }); } @@ -103,7 +105,7 @@ class StreamerSession extends Emitter /**Set the rempte rtp/udp ip and port * */ - setRemote(ip,port) + setRemote(/** @type {string} */ ip, /** @type {number} */ port) { //Set them this.session.SetRemotePort(String(ip),parseInt(port)); @@ -143,20 +145,13 @@ class StreamerSession extends Emitter //End this.session.End(); - /** - * StreamerSession stopped event - * - * @name stopped - * @memberof StreamerSession - * @kind event - * @argument {StreamerSession} session - */ this.emit("stopped",this); //Stop emitter super.stop(); //Remove transport reference, so destructor is called on GC + //@ts-expect-error this.session = null; } diff --git a/lib/Transponder.js b/lib/Transponder.js index 1aeefa07..6b2712d4 100644 --- a/lib/Transponder.js +++ b/lib/Transponder.js @@ -3,6 +3,7 @@ const Emitter = require("medooze-event-emitter"); const LayerInfo = require("./LayerInfo"); const Native = require("./Native"); const IncomingStreamTrack = require("./IncomingStreamTrack"); +const SemanticSDP = require("semantic-sdp"); /** @typedef {IncomingStreamTrack.LayerStats} LayerStats */ @@ -46,7 +47,7 @@ class Transponder extends Emitter */ constructor( /** @type {Native.RTPStreamTransponderFacade} */ transponder, - /** @type {"audio"|"video"} */ media) + /** @type {SemanticSDP.TrackType} */ media) { //Init emitter super(); diff --git a/lib/Transport.js b/lib/Transport.js index 91448d48..082738d2 100644 --- a/lib/Transport.js +++ b/lib/Transport.js @@ -64,7 +64,7 @@ const noop = function(){}; /** * @typedef {Object} SSRCs - * @property {number} [media] ssrc for the media track + * @property {number} media ssrc for the media track * @property {number} [fec] ssrc for the fec track (only applicable to video tracks) * @property {number} [rtx] ssrc for the rtx track (only applicable to video tracks) */ @@ -561,8 +561,10 @@ class Transport extends Emitter if ("relay"===candidate.getType()) { //Get relay ip and port - ip = candidate.getRelAddr(); - port = candidate.getRelPort(); + const [relIp, relPort] = [candidate.getRelAddr(), candidate.getRelPort()]; + if (!relIp || !relPort) + return false; + [ip, port] = [relIp, relPort]; } else { //Get ip and port ip = candidate.getAddress(); @@ -743,7 +745,7 @@ class Transport extends Emitter /** * Create new outgoing stream in this transport - * @param {"audio" | "video"} media Track media type + * @param {SemanticSDP.TrackType} media Track media type * @param {CreateStreamTrackOptions} [params] Track parameters * @returns {OutgoingStreamTrack} The new outgoing stream track */ @@ -772,8 +774,8 @@ class Transport extends Emitter source.media.ssrc = opts.ssrcs ? opts.ssrcs.media : this.lfsr.seq(31); if (media=="video") { - source.rtx.ssrc = opts.ssrcs ? opts.ssrcs.rtx : this.lfsr.seq(31); - source.fec.ssrc = opts.ssrcs ? opts.ssrcs.fec : this.lfsr.seq(31); + source.rtx.ssrc = opts.ssrcs?.rtx ? opts.ssrcs.rtx : this.lfsr.seq(31); + source.fec.ssrc = opts.ssrcs?.fec ? opts.ssrcs.fec : this.lfsr.seq(31); } else { @@ -898,7 +900,7 @@ class Transport extends Emitter /** * Create new incoming stream in this transport. TODO: Simulcast is still not supported - * @param {"audio" | "video"} media Track media type + * @param {SemanticSDP.TrackType} media Track media type * @param {CreateStreamTrackOptions} [params] Track parameters * @returns {IncomingStreamTrack} The new incoming stream track */ @@ -921,7 +923,7 @@ class Transport extends Emitter //Set source ssrcs source.media.ssrc = opts.ssrcs ? opts.ssrcs.media : this.lfsr.seq(31); - source.rtx.ssrc = opts.ssrcs ? opts.ssrcs.rtx : this.lfsr.seq(31); + source.rtx.ssrc = opts.ssrcs?.rtx ? opts.ssrcs.rtx : this.lfsr.seq(31); //Add it to transport if (!this.transport.AddIncomingSourceGroup(source)) @@ -963,8 +965,8 @@ class Transport extends Emitter { //Create new incoming stream let outgoingStream = this.createOutgoingStream ({ - audio: incomingStream.getAudioTracks().length, - video: incomingStream.getVideoTracks().length + audio: incomingStream.getAudioTracks().map(t => ({ id: t.getId() })), + video: incomingStream.getVideoTracks().map(t => ({ id: t.getId() })), }); //Attach the streams @@ -1010,7 +1012,7 @@ class Transport extends Emitter this.outgoingStreamTracks.clear(); //Remove dtls listener - this.transport.SetListener(null); + this.transport.SetListener(/** @type {any} */ (null)); //Remove transport/connection from bundle, DO NOT USE them later on this.bundle.RemoveICETransport(this.username); diff --git a/package.json b/package.json index 72856eca..63fac515 100644 --- a/package.json +++ b/package.json @@ -36,13 +36,14 @@ "lfsr": "0.0.3", "medooze-event-emitter": "^1.0.0", "nan": "^2.18.0", - "semantic-sdp": "^3.27.1", + "semantic-sdp": "^3.28.0", "uuid": "^3.3.2" }, "optionalDependencies": { "netlink": "^0" }, "devDependencies": { + "@types/lfsr": "^0.0.0", "@types/node": "^20.8.6", "@types/uuid": "^9.0.5", "documentation": "13.2.5",