|
| 1 | +using System; |
| 2 | +using System.Linq; |
| 3 | +using SIPSorcery.Net; |
| 4 | +using SIPSorceryMedia.Abstractions; |
| 5 | +using SIPSorcery.Media; |
| 6 | +using Microsoft.Extensions.Logging; |
| 7 | +using Microsoft.Extensions.Logging.Abstractions; |
| 8 | +using System.Net; |
| 9 | +using System.Threading.Tasks; |
| 10 | +using LanguageExt; |
| 11 | +using LanguageExt.Common; |
| 12 | +using System.Text.Json; |
| 13 | +using System.Text; |
| 14 | + |
| 15 | +namespace demo; |
| 16 | + |
| 17 | +public class OpenAIRealtimeWebRTCEndPoint : IOpenAIRealtimeWebRTCEndPoint |
| 18 | +{ |
| 19 | + private const string OPENAI_MODEL = "gpt-4o-realtime-preview-2024-12-17"; |
| 20 | + private const string OPENAI_DATACHANNEL_NAME = "oai-events"; |
| 21 | + |
| 22 | + private ILogger _logger = NullLogger.Instance; |
| 23 | + |
| 24 | + public AudioEncoder AudioEncoder { get; } |
| 25 | + public AudioFormat AudioFormat { get; } |
| 26 | + |
| 27 | + private readonly IOpenAIRealtimeRestClient _openAIRealtimeRestClient; |
| 28 | + |
| 29 | + private RTCPeerConnection? _rtcPeerConnection = null; |
| 30 | + public RTCPeerConnection? PeerConnection => _rtcPeerConnection; |
| 31 | + |
| 32 | + public event Action<IPEndPoint, SDPMediaTypesEnum, RTPPacket>? OnRtpPacketReceived; |
| 33 | + |
| 34 | + public event Action? OnPeerConnectionConnected; |
| 35 | + |
| 36 | + public event Action? OnPeerConnectionClosedOrFailed; |
| 37 | + |
| 38 | + public event Action<RTCDataChannel, OpenAIServerEventBase>? OnDataChannelMessageReceived; |
| 39 | + |
| 40 | + public OpenAIRealtimeWebRTCEndPoint( |
| 41 | + ILogger<OpenAIRealtimeWebRTCEndPoint> logger, |
| 42 | + IOpenAIRealtimeRestClient openAIRealtimeRestClient) |
| 43 | + { |
| 44 | + _logger = logger; |
| 45 | + _openAIRealtimeRestClient = openAIRealtimeRestClient; |
| 46 | + |
| 47 | + AudioEncoder = new AudioEncoder(includeOpus: true); |
| 48 | + AudioFormat = AudioEncoder.SupportedFormats.Single(x => x.FormatName == AudioCodecsEnum.OPUS.ToString()); |
| 49 | + } |
| 50 | + |
| 51 | + public async Task<Either<Error, Unit>> StartConnectAsync(RTCConfiguration? pcConfig = null, string? model = null) |
| 52 | + { |
| 53 | + if(_rtcPeerConnection != null) |
| 54 | + { |
| 55 | + return Unit.Default; |
| 56 | + } |
| 57 | + |
| 58 | + _rtcPeerConnection = CreatePeerConnection(pcConfig); |
| 59 | + |
| 60 | + var useModel = string.IsNullOrWhiteSpace(model) ? OPENAI_MODEL : model; |
| 61 | + |
| 62 | + var offer = _rtcPeerConnection.createOffer(); |
| 63 | + await _rtcPeerConnection.setLocalDescription(offer).ConfigureAwait(false); |
| 64 | + |
| 65 | + var sdpAnswerResult = await _openAIRealtimeRestClient.GetSdpAnswerAsync(offer.sdp, useModel).ConfigureAwait(false); |
| 66 | + |
| 67 | + return sdpAnswerResult.Map(sdpAnswer => |
| 68 | + { |
| 69 | + var answer = new RTCSessionDescriptionInit |
| 70 | + { |
| 71 | + type = RTCSdpType.answer, |
| 72 | + sdp = sdpAnswer |
| 73 | + }; |
| 74 | + _rtcPeerConnection.setRemoteDescription(answer); |
| 75 | + return Unit.Default; |
| 76 | + }); |
| 77 | + } |
| 78 | + |
| 79 | + private RTCPeerConnection CreatePeerConnection(RTCConfiguration? pcConfig) |
| 80 | + { |
| 81 | + _rtcPeerConnection = new RTCPeerConnection(pcConfig); |
| 82 | + |
| 83 | + MediaStreamTrack audioTrack = new MediaStreamTrack(AudioFormat, MediaStreamStatusEnum.SendRecv); |
| 84 | + _rtcPeerConnection.addTrack(audioTrack); |
| 85 | + |
| 86 | + // This call is synchronous when the WebRTC connection is not yet connected. |
| 87 | + var dataChannel = _rtcPeerConnection.createDataChannel(OPENAI_DATACHANNEL_NAME).Result; |
| 88 | + |
| 89 | + _rtcPeerConnection.onconnectionstatechange += state => _logger.LogDebug($"Peer connection connected changed to {state}."); |
| 90 | + _rtcPeerConnection.OnTimeout += mediaType => _logger.LogDebug($"Timeout on media {mediaType}."); |
| 91 | + _rtcPeerConnection.oniceconnectionstatechange += state => _logger.LogDebug($"ICE connection state changed to {state}."); |
| 92 | + |
| 93 | + _rtcPeerConnection.onsignalingstatechange += () => |
| 94 | + { |
| 95 | + if (_rtcPeerConnection.signalingState == RTCSignalingState.have_local_offer) |
| 96 | + { |
| 97 | + _logger.LogDebug($"Local SDP:\n{_rtcPeerConnection.localDescription.sdp}"); |
| 98 | + } |
| 99 | + else if (_rtcPeerConnection.signalingState is RTCSignalingState.have_remote_offer or RTCSignalingState.stable) |
| 100 | + { |
| 101 | + _logger.LogDebug($"Remote SDP:\n{_rtcPeerConnection.remoteDescription?.sdp}"); |
| 102 | + } |
| 103 | + }; |
| 104 | + |
| 105 | + _rtcPeerConnection.OnRtpPacketReceived += (ep, mt, rtp) => OnRtpPacketReceived?.Invoke(ep, mt, rtp); |
| 106 | + |
| 107 | + _rtcPeerConnection.onconnectionstatechange += (state) => |
| 108 | + { |
| 109 | + if (state is RTCPeerConnectionState.closed or |
| 110 | + RTCPeerConnectionState.failed or |
| 111 | + RTCPeerConnectionState.disconnected) |
| 112 | + { |
| 113 | + OnPeerConnectionClosedOrFailed?.Invoke(); |
| 114 | + } |
| 115 | + }; |
| 116 | + |
| 117 | + dataChannel.onopen += () => OnPeerConnectionConnected?.Invoke(); |
| 118 | + |
| 119 | + dataChannel.onmessage += OnDataChannelMessage; |
| 120 | + |
| 121 | + dataChannel.onclose += () => OnPeerConnectionClosedOrFailed?.Invoke(); |
| 122 | + |
| 123 | + return _rtcPeerConnection; |
| 124 | + } |
| 125 | + |
| 126 | + public void SendAudio(uint durationRtpUnits, byte[] sample) |
| 127 | + { |
| 128 | + if (_rtcPeerConnection != null && _rtcPeerConnection.connectionState == RTCPeerConnectionState.connected) |
| 129 | + { |
| 130 | + _rtcPeerConnection.SendAudio(durationRtpUnits, sample); |
| 131 | + } |
| 132 | + } |
| 133 | + |
| 134 | + public Either<Error, Unit> SendSessionUpdate(OpenAIVoicesEnum voice, string? instructions = null, string? model = null) |
| 135 | + { |
| 136 | + if (_rtcPeerConnection == null || _rtcPeerConnection.connectionState != RTCPeerConnectionState.connected) |
| 137 | + { |
| 138 | + return Error.New("Peer connection not established."); |
| 139 | + } |
| 140 | + |
| 141 | + var responseCreate = new OpenAISessionUpdate |
| 142 | + { |
| 143 | + EventID = Guid.NewGuid().ToString(), |
| 144 | + Session = new OpenAISession |
| 145 | + { |
| 146 | + Voice = voice, |
| 147 | + Instructions = "You are a joke bot. Tell a Dad joke every chance you get.", |
| 148 | + } |
| 149 | + }; |
| 150 | + |
| 151 | + if(!string.IsNullOrWhiteSpace(model)) |
| 152 | + { |
| 153 | + responseCreate.Session.Model = model; |
| 154 | + } |
| 155 | + |
| 156 | + if (!string.IsNullOrWhiteSpace(instructions)) |
| 157 | + { |
| 158 | + responseCreate.Session.Instructions = instructions; |
| 159 | + } |
| 160 | + |
| 161 | + var dc = _rtcPeerConnection.DataChannels.First(); |
| 162 | + |
| 163 | + _logger.LogInformation($"Sending initial response create to first call data channel {dc.label}."); |
| 164 | + _logger.LogDebug(responseCreate.ToJson()); |
| 165 | + |
| 166 | + dc.send(responseCreate.ToJson()); |
| 167 | + |
| 168 | + return Unit.Default; |
| 169 | + } |
| 170 | + |
| 171 | + public Either<Error, Unit> SendResponseCreate(OpenAIVoicesEnum voice, string instructions) |
| 172 | + { |
| 173 | + if(_rtcPeerConnection == null || _rtcPeerConnection.connectionState != RTCPeerConnectionState.connected) |
| 174 | + { |
| 175 | + return Error.New("Peer connection not established."); |
| 176 | + } |
| 177 | + |
| 178 | + var responseCreate = new OpenAIResponseCreate |
| 179 | + { |
| 180 | + EventID = Guid.NewGuid().ToString(), |
| 181 | + Response = new OpenAIResponseCreateResponse |
| 182 | + { |
| 183 | + Instructions = instructions, |
| 184 | + Voice = voice.ToString() |
| 185 | + } |
| 186 | + }; |
| 187 | + |
| 188 | + var dc = _rtcPeerConnection.DataChannels.First(); |
| 189 | + |
| 190 | + _logger.LogInformation($"Sending initial response create to first call data channel {dc.label}."); |
| 191 | + _logger.LogDebug(responseCreate.ToJson()); |
| 192 | + |
| 193 | + dc.send(responseCreate.ToJson()); |
| 194 | + |
| 195 | + return Unit.Default; |
| 196 | + } |
| 197 | + |
| 198 | + /// <summary> |
| 199 | + /// Event handler for WebRTC data channel messages. |
| 200 | + /// </summary> |
| 201 | + private void OnDataChannelMessage(RTCDataChannel dc, DataChannelPayloadProtocols protocol, byte[] data) |
| 202 | + { |
| 203 | + //logger.LogInformation($"Data channel {dc.label}, protocol {protocol} message length {data.Length}."); |
| 204 | + |
| 205 | + var message = Encoding.UTF8.GetString(data); |
| 206 | + var serverEvent = JsonSerializer.Deserialize<OpenAIServerEventBase>(message, JsonOptions.Default); |
| 207 | + |
| 208 | + var serverEventModel = OpenAIDataChannelManager.ParseDataChannelMessage(data); |
| 209 | + serverEventModel.IfSome(e => |
| 210 | + { |
| 211 | + OnDataChannelMessageReceived?.Invoke(dc, e); |
| 212 | + }); |
| 213 | + } |
| 214 | +} |
0 commit comments