Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feat: webrtc streaming rework #837

Merged
merged 1 commit into from
Dec 20, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -16,4 +16,5 @@ src/lib/stores/adminStore.ts
src/lib/stores/chatStore.ts
src/lib/stores/sfxStore.ts
src/lib/stores/funFactStore.ts
/static/sfx/
/static/sfx/
.vscode/launch.json
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "mage-website",
"version": "0.1.12",
"version": "0.1.13",
"license": "GPL-3.0",
"private": true,
"type": "module",
Expand Down
24 changes: 6 additions & 18 deletions src/lib/WHEPClient.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ export default class WHEPClient extends EventTarget {
private peerConnection: RTCPeerConnection
private stream: MediaStream

constructor(private endpoint: string, private videoElement: any, private trackType: string) {
constructor(private endpoint: string, private videoElement: any) {
super()
this.stream = new MediaStream()

Expand All @@ -28,19 +28,14 @@ export default class WHEPClient extends EventTarget {
bundlePolicy: 'max-bundle'
})

const trackOrKind = trackType === 'screen' || trackType === 'webcam' ? 'video' : 'audio'

/** https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection/addTransceiver */
this.peerConnection.addTransceiver(trackOrKind, {
this.peerConnection.addTransceiver('video', {
direction: 'recvonly'
})

//NOTE: used for system audio
if (this.trackType === 'screen') {
this.peerConnection.addTransceiver('audio', {
direction: 'recvonly'
})
}
this.peerConnection.addTransceiver('audio', {
direction: 'recvonly'
})

/**
* When new tracks are received in the connection, store local references,
Expand Down Expand Up @@ -68,20 +63,13 @@ export default class WHEPClient extends EventTarget {
console.log('got unknown track ' + track)
}

if (trackType === 'screen' && track.kind === 'video') {
if (track.kind === 'video') {
if (track.readyState === 'live') {
this.dispatchEvent(new CustomEvent(`isScreenLive`, { detail: true }))
} else {
this.dispatchEvent(new CustomEvent(`isScreenLive`, { detail: false }))
}
}
if (trackType === 'webcam' && track.kind === 'video') {
if (track.readyState === 'live') {
this.dispatchEvent(new CustomEvent(`isWebcamLive`, { detail: true }))
} else {
this.dispatchEvent(new CustomEvent(`isWebcamLive`, { detail: false }))
}
}
}

this.peerConnection.addEventListener('connectionstatechange', () => {
Expand Down
261 changes: 190 additions & 71 deletions src/lib/WHIPClient.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,12 @@ import { getAudioIndicator } from '$lib/utils'
* https://www.ietf.org/archive/id/draft-ietf-wish-whip-01.html
*/
export default class WHIPClient extends EventTarget {
private peerConnection: RTCPeerConnection
public localStream?: MediaStream
public peerConnection: RTCPeerConnection
private localScreenStream?: MediaStream
private localWebcamStream?: MediaStream
private localAudioStream?: MediaStream

constructor(private endpoint: string, private videoElement: any, private trackType: string) {
constructor(private endpoint: string) {
super()
/**
* Create a new WebRTC connection, using public STUN servers with ICE,
Expand All @@ -37,19 +39,6 @@ export default class WHIPClient extends EventTarget {
await negotiateConnectionWithClientOffer(this.peerConnection, this.endpoint)
console.log('Connection negotiation ended')
})

/**
* While the connection is being initialized,
* connect the video stream to the provided <video> element.
*/
this.accessLocalMediaSources(trackType)
.then((stream: any) => {
this.localStream = stream
videoElement.srcObject = stream
})
.catch(() => {
this.disconnectStream()
})
}

/**
Expand All @@ -58,64 +47,96 @@ export default class WHIPClient extends EventTarget {
*
* https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia
*/
private async accessLocalMediaSources(trackType: string) {
if (trackType === 'screen') {
return navigator.mediaDevices.getDisplayMedia({ video: true, audio: true }).then((stream) => {
if (!stream.getAudioTracks().length) {
const audioContext = new AudioContext()
const oscillator = audioContext.createOscillator()
const destination = audioContext.createMediaStreamDestination()
oscillator.connect(destination)
oscillator.frequency.setValueAtTime(0, audioContext.currentTime)
oscillator.start()
const audioTrack = destination.stream.getAudioTracks()[0]
audioTrack.enabled = true
// audioTrack.id = 'silent-audio-track'
// audioTrack.label = 'Silent Audio Track'
public async accessLocalScreenMediaSources(canvasElement: HTMLCanvasElement, videoElement: any) {
try {
/**
* While the connection is being initialized,
* connect the video stream to the provided <video> element.
*/
navigator.mediaDevices
.getDisplayMedia({ video: true, audio: true })
.then(async (stream) => {
let audioTrack = stream.getAudioTracks()[0]
if (!audioTrack) {
const audioContext = new AudioContext()
const oscillator = audioContext.createOscillator()
const destination = audioContext.createMediaStreamDestination()
oscillator.connect(destination)
oscillator.frequency.setValueAtTime(0, audioContext.currentTime)
oscillator.start()
audioTrack = destination.stream.getAudioTracks()[0]
audioTrack.enabled = true
// audioTrack.id = 'silent-audio-track'
// audioTrack.label = 'Silent Audio Track'
}
this.peerConnection.addTransceiver(audioTrack, {
direction: 'sendonly'
})
}
stream.getTracks().forEach((track) => {
const transceiver = this.peerConnection.addTransceiver(track, {
/** WHIP is only for sending streaming media */
direction: 'sendonly'
})
if (track.kind == 'video' && transceiver.sender.track) {
transceiver.sender.track.applyConstraints({
width: 1920,
height: 1080
// Add the stream to the canvas and get the canvas stream
const canvasStream = await this.addStreamToCanvas(
stream,
canvasElement,
videoElement,
true
)
// Add the canvas stream's tracks to the peer connection
canvasStream.getTracks().forEach((track) => {
this.peerConnection.addTransceiver(track, {
direction: 'sendonly'
})
})
stream.getVideoTracks()[0].addEventListener('ended', () => this.disconnectStreamScreen())
if (stream.getVideoTracks()[0].readyState === 'live') {
this.dispatchEvent(new CustomEvent(`isScreenLive`, { detail: true }))
}
this.localScreenStream = stream
})
stream.getVideoTracks()[0].addEventListener('ended', () => this.disconnectStream())
if (stream.getVideoTracks()[0].readyState === 'live') {
this.dispatchEvent(new CustomEvent(`isScreenLive`, { detail: true }))
}
return stream
})
} else if (trackType === 'webcam') {
return navigator.mediaDevices.getUserMedia({ video: true, audio: false }).then((stream) => {
stream.getTracks().forEach((track) => {
const transceiver = this.peerConnection.addTransceiver(track, {
/** WHIP is only for sending streaming media */
direction: 'sendonly'
})
if (track.kind == 'video' && transceiver.sender.track) {
transceiver.sender.track.applyConstraints({
width: 1280,
height: 720
.catch(() => {
this.disconnectStreamScreen()
})
} catch (error) {
this.disconnectStreamScreen()
}
}

public async accessLocalWebcamMediaSources(
canvasElement: HTMLCanvasElement,
videoElement: HTMLVideoElement
) {
try {
navigator.mediaDevices
.getUserMedia({ video: true, audio: false })
.then(async (stream) => {
// Add the stream to the canvas and get the canvas stream
const canvasStream = await this.addStreamToCanvas(
stream,
canvasElement,
videoElement,
false
)
// Add the canvas stream's tracks to the peer connection
canvasStream.getTracks().forEach((track) => {
this.peerConnection.addTransceiver(track, {
direction: 'sendonly'
})
})
stream.getVideoTracks()[0].addEventListener('ended', () => this.disconnectStreamWebcam())
if (stream.getVideoTracks()[0].readyState === 'live') {
this.dispatchEvent(new CustomEvent(`isWebcamLive`, { detail: true }))
}
this.localWebcamStream = stream
})
stream.getVideoTracks()[0].addEventListener('ended', () => this.disconnectStream())
if (stream.getVideoTracks()[0].readyState === 'live') {
this.dispatchEvent(new CustomEvent(`isWebcamLive`, { detail: true }))
}
return stream
})
} else if (trackType === 'audio') {
return navigator.mediaDevices
.catch(() => {
this.disconnectStreamWebcam()
})
} catch (error) {
this.disconnectStreamWebcam()
}
}

public async accessLocalAudioMediaSources(audioElement: any) {
try {
navigator.mediaDevices
.getUserMedia({
video: false,
audio: {
Expand All @@ -125,21 +146,81 @@ export default class WHIPClient extends EventTarget {
}
})
.then((stream) => {
stream.getTracks().forEach((track) => {
stream.getAudioTracks().forEach((track) => {
this.peerConnection.addTransceiver(track, {
/** WHIP is only for sending streaming media */
direction: 'sendonly'
})
})
stream.getAudioTracks()[0].addEventListener('ended', () => {
this.disconnectStream()
this.disconnectStreamAudio()
})
getAudioIndicator(stream, this)
return stream
this.localAudioStream = stream
audioElement.srcObject = stream
})
.catch(() => {
this.disconnectStreamAudio()
})
} catch (error) {
this.disconnectStreamAudio()
}
}

private async addStreamToCanvas(
stream: MediaStream,
canvasElement: HTMLCanvasElement,
videoElement: HTMLVideoElement,
isScreen: boolean
): Promise<MediaStream> {
// Play the stream in the video element
videoElement.srcObject = stream
videoElement.play()
// Draw the video frame to the canvas
canvasElement.width = 1920
canvasElement.height = 1080
const context = canvasElement.getContext('2d')
const drawVideoFrame = () => {
if (videoElement.readyState === videoElement.HAVE_ENOUGH_DATA) {
if (isScreen) {
context?.drawImage(videoElement, 0, 0, canvasElement.width, canvasElement.height)
if (this.localWebcamStream) {
context?.drawImage(
videoElement,
canvasElement.width * 0.75,
canvasElement.height * 0.75,
canvasElement.width * 0.25,
canvasElement.height * 0.25
)
}
} else {
context?.drawImage(videoElement, 0, 0, canvasElement.width, canvasElement.height)
}
}
requestAnimationFrame(drawVideoFrame)
}
drawVideoFrame()
// Capture the stream from the canvas
const canvasStream = canvasElement.captureStream(30)
// Apply constraints to the video track
const constraints = isScreen
? {
width: 1920,
height: 1080
}
: {
width: 1280,
height: 720
}
canvasStream.getVideoTracks()[0].applyConstraints(constraints)

// Clear the canvas when the stream is disconnected
stream.getVideoTracks()[0].addEventListener('ended', () => {
context?.clearRect(0, 0, canvasElement.width, canvasElement.height)
})
return canvasStream
}

/**
* Terminate the streaming session
* 1. Notify the WHIP server by sending a DELETE request
Expand All @@ -154,9 +235,47 @@ export default class WHIPClient extends EventTarget {
// mode: 'cors'
// })
this.peerConnection.close()
this.localStream?.getTracks().forEach((track) => track.stop())
this.videoElement.srcObject = null
this.dispatchEvent(new CustomEvent(`localStreamStopped-${this.trackType}`))
console.log('Disconnected')
}

public disconnectStreamScreen() {
this.localScreenStream?.getTracks().forEach((track) => track.stop())
this.dispatchEvent(new CustomEvent(`localStreamStopped-screen`))
console.log('Screen stream disconnected')
if (this.areAllStreamsStopped()) {
this.disconnectStream()
}
}

public disconnectStreamWebcam() {
this.localWebcamStream?.getTracks().forEach((track) => track.stop())
this.dispatchEvent(new CustomEvent(`localStreamStopped-webcam`))
console.log('Webcam stream disconnected')
if (this.areAllStreamsStopped()) {
this.disconnectStream()
}
}

public disconnectStreamAudio() {
this.localAudioStream?.getTracks().forEach((track) => track.stop())
this.dispatchEvent(new CustomEvent(`localStreamStopped-audio`))
console.log('Audio stream disconnected')
if (this.areAllStreamsStopped()) {
this.disconnectStream()
}
}

private areAllStreamsStopped(): boolean {
const screenStopped =
!this.localScreenStream ||
this.localScreenStream.getTracks().every((track) => track.readyState === 'ended')
const webcamStopped =
!this.localWebcamStream ||
this.localWebcamStream.getTracks().every((track) => track.readyState === 'ended')
const audioStopped =
!this.localAudioStream ||
this.localAudioStream.getTracks().every((track) => track.readyState === 'ended')

return screenStopped && webcamStopped && audioStopped
}
}
Loading