private void FrameDataCallBackFun(IntPtr pBuf, uint uiBufSize, ref ST_FRAME_DATA pFrameData, uint uiChannel)
{
NLogEx.LoggerEx logEx = new NLogEx.LoggerEx(log);
ST_FRAME_DATA frameData = pFrameData;
MediaDataSender mediaDataSender = null;
if (this.handelOperateLock.TryEnterReadLock(CgwConst.ENTER_LOCK_WAIT_TIME))
{
try
{
if (this.videoChannelDataSenderDic.ContainsKey(uiChannel))
{
mediaDataSender = this.videoChannelDataSenderDic[uiChannel];
}
}
finally
{
this.handelOperateLock.ExitReadLock();
}
}
if (mediaDataSender == null)
{
logEx.Warn("FrameDataCallBackFun mediaDataSender = NULL");
return;
}
StreamType streamType = StreamType.VIDEO_H264;
//对于支持的码流类型,用break退出switch,对于不支持的码流类型直接舍弃,用return返回
switch (frameData.iStreamType)
{
//对于音频只接收G711A和G711U,其他舍弃
case (int)IvsStreamType.PAY_LOAD_TYPE_PCMU:
streamType = StreamType.AUDIO_G711U;
break;
case (int)IvsStreamType.PAY_LOAD_TYPE_PCMA:
streamType = StreamType.AUDIO_G711A;
break;
//只接收H264的视频码流
case (int)IvsStreamType.PAY_LOAD_TYPE_H264:
//H264的标准视频流,作为视频流处理
streamType = StreamType.VIDEO_H264;
break;
default:
//不支持的类型,直接舍弃,返回
logEx.Warn("FrameDataCallBackFun iStreamType is not right");
return;
}
if (streamType == StreamType.AUDIO_G711A || streamType == StreamType.AUDIO_G711U)
{
//如果是音频流,需要判断mic的状态,开启时才发送音频流
if (this.micOperateLock.TryEnterReadLock(CgwConst.ENTER_LOCK_WAIT_TIME))
{
try
{
if (this.cameraMicStatusDic.ContainsKey(mediaDataSender.CameraNo))
{
//如果mic为非开启状态,则不发送音频码流,
if (!this.cameraMicStatusDic[mediaDataSender.CameraNo])
{
//logEx.Warn("This data is audio,but the mic is off.Chuck the data.Camera no:{0}", mediaDataSender.CameraNo);
return;
}
}
else
{
//默认为关闭状态,因此如果cameraMicStatusDic不包含该摄像头,则认为处于关闭状态,舍弃音频码流
//logEx.Warn("This data is audio,but the mic is off.Chuck the data.Camera no:{0}", mediaDataSender.CameraNo);
return;
}
}
finally
{
this.micOperateLock.ExitReadLock();
}
}
}
try
{
MediaData mediaData = new MediaData();
//获取非托管的数据
byte[] datagram = new byte[uiBufSize];
Marshal.Copy(pBuf, datagram, 0, (int)uiBufSize);
//视频数据增加头信息
if (!(streamType == StreamType.AUDIO_G711A || streamType == StreamType.AUDIO_G711U))
{
//头部增加四个四节的开始表实0x000001
byte[] newDatagram = new byte[uiBufSize + 4];
datagram.CopyTo(newDatagram, 4);
newDatagram[3] = 1;
mediaData.Data = newDatagram;
mediaData.Size = (uint)(uiBufSize + 4);
}
else
{
mediaData.Data = datagram;
mediaData.Size = (uint)(uiBufSize);
}
//裸码流
mediaData.DataType = MediaDataType.FRAME_DATA;
mediaData.StreamType = streamType;
//将帧类型转换成各融合网关统一的帧类型
string name = Enum.GetName(typeof(IvsH264NaluType), frameData.iFrameDataType);
if (Enum.IsDefined(typeof(FrameDataType), name))
{
FrameDataType frameDataType = (FrameDataType)Enum.Parse(typeof(FrameDataType), name);
mediaData.FrameType = frameDataType;
}
else
{
mediaData.FrameType = FrameDataType.H264_NALU_TYPE_UNDEFINED;
logEx.Warn("T28181 FrameDataCallBackFun FrameType is Not Defined, FrameType:{0}", frameData.iFrameDataType);
}
//logEx.Debug("FrameDataCallBackFun.mediaData.DataType={0},FrameType = {1},StreamType = {2},Size = {3}", Enum.GetName(typeof(MediaDataType), mediaData.DataType),
//Enum.GetName(typeof(FrameDataType), mediaData.FrameType), Enum.GetName(typeof(StreamType), mediaData.StreamType), mediaData.Size);
//向回调函数转发码流
mediaDataSender.SendData(mediaData, this.sender);
}
catch (System.Exception ex)
{
logEx.Error("FrameDataCallBackFun failed.Execption message:{0}", ex.Message);
}
}