public void SendAudioFrame(byte[] payload, uint frameSpacing, int payloadType)
{
try
{
if (_isClosed)
{
logger.Warn("SendAudioFrame cannot be called on a closed RTP channel.");
}
else if (_rtpSocketError != SocketError.Success)
{
logger.Warn("SendAudioFrame was called for an RTP socket in an error state of " + _rtpSocketError + ".");
}
else
{
_timestamp = (_timestamp == 0) ? DateTimeToNptTimestamp32(DateTime.Now) : (_timestamp + frameSpacing) % UInt32.MaxValue;
RTPPacket rtpPacket = new RTPPacket(payload.Length);
rtpPacket.Header.SyncSource = _syncSource;
rtpPacket.Header.SequenceNumber = _sequenceNumber++;
rtpPacket.Header.Timestamp = _timestamp;
rtpPacket.Header.MarkerBit = 1;
rtpPacket.Header.PayloadType = payloadType;
Buffer.BlockCopy(payload, 0, rtpPacket.Payload, 0, payload.Length);
byte[] rtpBytes = rtpPacket.GetBytes();
//Stopwatch sw = new Stopwatch();
//sw.Start();
_rtpSocket.SendTo(rtpBytes, rtpBytes.Length, SocketFlags.None, _remoteEndPoint);
//sw.Stop();
//if (sw.ElapsedMilliseconds > 15)
//{
// logger.Warn(" SendAudioFrame offset " + offset + ", payload length " + payloadLength + ", sequence number " + rtpPacket.Header.SequenceNumber + ", marker " + rtpPacket.Header.MarkerBit + ", took " + sw.ElapsedMilliseconds + "ms.");
//}
}
}
catch (Exception excp)
{
if (!_isClosed)
{
logger.Warn("Exception RTPChannel.SendAudioFrame attempting to send to the RTP socket at " + _remoteEndPoint + ". " + excp);
OnRTPSocketDisconnected?.Invoke();
}
}
}