-
Notifications
You must be signed in to change notification settings - Fork 454
/
Copy pathProgram.cs
executable file
·195 lines (163 loc) · 7.98 KB
/
Program.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
//-----------------------------------------------------------------------------
// Filename: Program.cs
//
// Description: An example WebRTC server application that uses OpenGL to produce
// a video stream for the remote peer. In this case OpenGL is used to process the
// received audio stream from the remote peer and generates a visual representation
// which is sent back in a video stream.
//
// The high level steps are:
// 1. Establish a WebRTC peer connection with a remote peer with a receive only
// audio stream and a send only video stream.
// 2. Receive audio packets from the remote peer and process them with the OpenGL
// program to generate a visual representation of teh received audio samples.
// 3. Send the visual representation back to the remote peer as a video stream.
//
// Author(s):
// Aaron Clauson (aaron@sipsorcery.com)
//
// History:
// 04 Jan 2025 Aaron Clauson Created, Dublin, Ireland.
//
// License:
// BSD 3-Clause "New" or "Revised" License, see included LICENSE.md file.
//-----------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Threading;
using System.Threading.Tasks;
using System.Windows.Forms;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using Serilog;
using Serilog.Extensions.Logging;
using SIPSorcery.Media;
using SIPSorcery.Net;
using SIPSorceryMedia.Encoders;
using WebSocketSharp.Server;
using AudioScope;
using System.Numerics;
using SIPSorceryMedia.Abstractions;
namespace demo
{
class Program
{
private const int WEBSOCKET_PORT = 8081;
private const int AUDIO_PACKET_DURATION = 20; // 20ms of audio per RTP packet for PCMU & PCMA.
private static Microsoft.Extensions.Logging.ILogger logger = NullLogger.Instance;
private static FormAudioScope _audioScopeForm;
private static RTCPeerConnection _pc;
static void Main()
{
Console.WriteLine("WebRTC OpenGL Demo - Audio Scope");
logger = AddConsoleLogger();
// Spin up a dedicated STA thread to run WinForms.
Thread uiThread = new Thread(() =>
{
// WinForms initialization must be on an STA thread.
Application.EnableVisualStyles();
Application.SetCompatibleTextRenderingDefault(false);
_audioScopeForm = new FormAudioScope(false);
Application.Run(_audioScopeForm);
});
uiThread.SetApartmentState(ApartmentState.STA);
uiThread.IsBackground = true;
uiThread.Start();
// Start web socket.
Console.WriteLine("Starting web socket server...");
var webSocketServer = new WebSocketServer(IPAddress.Any, WEBSOCKET_PORT);
webSocketServer.AddWebSocketService<WebRTCWebSocketPeer>("/", (peer) =>
{
// For the purposes of the demo only one peer conenction at a time is managed.
peer.CreatePeerConnection = CreatePeerConnection;
_pc = peer.RTCPeerConnection;
});
webSocketServer.Start();
Console.WriteLine($"Waiting for web socket connections on {webSocketServer.Address}:{webSocketServer.Port}...");
Console.WriteLine("Press ctrl-c to exit.");
// Ctrl-c will gracefully exit the call at any point.
ManualResetEvent exitMre = new ManualResetEvent(false);
Console.CancelKeyPress += delegate (object sender, ConsoleCancelEventArgs e)
{
Console.WriteLine("Exiting...");
e.Cancel = true;
_pc?.Close("User exit");
_audioScopeForm.Invoke(() => _audioScopeForm.Close());
exitMre.Set();
};
// Wait for a signal saying the call failed, was cancelled with ctrl-c or completed.
exitMre.WaitOne();
}
private static Task<RTCPeerConnection> CreatePeerConnection()
{
RTCConfiguration config = new RTCConfiguration
{
//iceServers = new List<RTCIceServer> { new RTCIceServer { urls = STUN_URL } },
//X_BindAddress = IPAddress.Any
};
var pc = new RTCPeerConnection(config);
var videoEncoderEndPoint = new VideoEncoderEndPoint();
var audioEncoder = new AudioEncoder();
// For the sake of the demo stick to a basic audio format with a predictable sampling rate.
var supportedAudioFormats = new List<AudioFormat>
{
new AudioFormat(SDPWellKnownMediaFormatsEnum.PCMU),
new AudioFormat(SDPWellKnownMediaFormatsEnum.PCMA),
};
MediaStreamTrack videoTrack = new MediaStreamTrack(videoEncoderEndPoint.GetVideoSourceFormats(), MediaStreamStatusEnum.SendOnly);
pc.addTrack(videoTrack);
MediaStreamTrack audioTrack = new MediaStreamTrack(supportedAudioFormats, MediaStreamStatusEnum.RecvOnly);
pc.addTrack(audioTrack);
videoEncoderEndPoint.OnVideoSourceEncodedSample += pc.SendVideo;
pc.OnVideoFormatsNegotiated += (formats) => videoEncoderEndPoint.SetVideoSourceFormat(formats.First());
pc.oniceconnectionstatechange += (state) => logger.LogDebug($"ICE connection state change to {state}.");
pc.onconnectionstatechange += (state) => logger.LogDebug($"Peer connection state change to {state}.");
pc.onsignalingstatechange += () =>
{
logger.LogDebug($"Signalling state change to {pc.signalingState}.");
if (pc.signalingState == RTCSignalingState.have_local_offer)
{
logger.LogDebug($"Local SDP offer:\n{pc.localDescription.sdp}");
}
else if (pc.signalingState == RTCSignalingState.stable)
{
logger.LogDebug($"Remote SDP offer:\n{pc.remoteDescription.sdp}");
}
};
pc.OnRtpPacketReceived += (IPEndPoint rep, SDPMediaTypesEnum media, RTPPacket rtpPkt) =>
{
//logger.LogDebug($"RTP {media} pkt received, SSRC {rtpPkt.Header.SyncSource}, payload {rtpPkt.Header.PayloadType}, SeqNum {rtpPkt.Header.SequenceNumber}.");
if (media == SDPMediaTypesEnum.audio)
{
var decodedSample = audioEncoder.DecodeAudio(rtpPkt.Payload, pc.AudioStream.NegotiatedFormat.ToAudioFormat());
var samples = decodedSample
.Select(s => new Complex(s / 32768f, 0f))
.ToArray();
var frame = _audioScopeForm.Invoke(() => _audioScopeForm.ProcessAudioSample(samples));
videoEncoderEndPoint.ExternalVideoSourceRawSample(AUDIO_PACKET_DURATION,
FormAudioScope.AUDIO_SCOPE_WIDTH,
FormAudioScope.AUDIO_SCOPE_HEIGHT,
frame,
VideoPixelFormatsEnum.Rgb);
}
};
return Task.FromResult(pc);
}
/// <summary>
/// Adds a console logger. Can be omitted if internal SIPSorcery debug and warning messages are not required.
/// </summary>
private static Microsoft.Extensions.Logging.ILogger AddConsoleLogger()
{
var seriLogger = new LoggerConfiguration()
.Enrich.FromLogContext()
.MinimumLevel.Is(Serilog.Events.LogEventLevel.Debug)
.WriteTo.Console()
.CreateLogger();
var factory = new SerilogLoggerFactory(seriLogger);
SIPSorcery.LogFactory.Set(factory);
return factory.CreateLogger<Program>();
}
}
}