概述
行业应用多是集成业务,需要集成多种设备多种协议,metaRTC3.0将助力行业应用实现一类库支持WEBRTC/RTMP/RTSP/GB2811/SRT/QUIC等多种协议,降低行业应用中流媒体的复杂性。
下载源码
https://github.com/metartc/metaRTChttps://github.com/metartc/metaRTCmetaRTC: metaRTC为嵌入式版本的webrtc
https://gitee.com/metartc/metaRTC
源代码
初始化代码
//YangContext *m_context;
//YangStreamConfig 设置
YangStreamConfig streamConf;
streamConf.streamOptType=Yang_Stream_Publish;//Yang_Stream_Play
strcpy(streamConf.app,app.c_str());
strcpy(streamConf.serverIp,server.c_str());
streamConf.serverPort=pport;
strcpy(streamConf.stream,stream.c_str());
streamConf.uid=0;//0 singleuser 1 multiuser
memset(streamConf.localIp,0,sizeof(streamConf.localIp));
strcpy(streamConf.localIp,localIp.c_str());
streamConf.localPort=localPort;
.....
//建立流协议处理
YangStreamHandle* sh=(YangStreamHandle*)calloc(sizeof(YangStreamHandle),1);
//Yang_Webrtc Yang_Rtmp Yang_Srt
int32_t nettype=Yang_Webrtc;
//YangContext* m_context
yang_create_streamHandle(nettype,sh,0,&m_context->avinfo,&m_context->stream,NULL);
sh->init(sh->context,&streamConf);
数据回调函数
void g_playrecv_receiveAudio(void* user,YangFrame *audioFrame){
if(user==NULL) return;
YangPlayReceive* rtcHandle=(YangPlayReceive*)user;
rtcHandle->receiveAudio(audioFrame);
}
void g_playrecv_receiveVideo(void* user,YangFrame *videoFrame){
if(user==NULL) return;
YangPlayReceive* rtcHandle=(YangPlayReceive*)user;
rtcHandle->receiveVideo(videoFrame);
}
//设置YangReceiveCallback m_recvCallback;
m_recvCallback.receiveAudio=g_playrecv_receiveAudio;
m_recvCallback.receiveVideo=g_playrecv_receiveVideo;
m_recvCallback.context=this;
......
streamConf.streamOptType = Yang_Stream_Play;
YangStreamHandle* sh=(YangStreamHandle*)calloc(sizeof(YangStreamHandle),1);
int32_t nettype=Yang_Webrtc;
yang_create_streamHandle(nettype,sh,m_context->avinfo,&m_context->stream,&m_recvCallback);
删除代码
if(m_recv) m_recv->disConnectServer(m_recv->context);
yang_destroy_streamHandle(m_recv);
yang_free(m_recv);
扩展协议支持
//既可以用纯C实现也可以用C++实现
//这是srt例子
//实现yang_create_stream_srt和yang_destroy_stream_srt
//实现函数connectServer/disConnectServer 等
void yang_create_stream_srt(YangStreamHandle* handle) {
if(handle==NULL||handle->context==NULL) return;
handle->context->context=calloc(sizeof(YangStreamSrt),1);
YangStreamSrt* srt=(YangStreamSrt*)handle->context->context;
srt->callback.context=handle->context;
srt->callback.on_data_callback=yang_stream_srt_on_data_callback;
srt->bufLen = 0;
srt->buffer = NULL;
srt->bufReceiveLen = 0, srt->bufRemainLen = 0;
srt->srt = NULL;
memset(&srt->audioFrame,0,sizeof(YangFrame));
memset(&srt->videoFrame,0,sizeof(YangFrame));
handle->connectServer = yang_stream_srt_connectServer;
handle->disConnectServer = yang_stream_srt_disConnectServer;
handle->getConnectState = yang_stream_srt_getConnectState;
handle->isconnected = yang_stream_srt_isconnected;
handle->publishAudioData = yang_stream_srt_publishAudioData;
handle->publishVideoData = yang_stream_srt_publishVideoData;
handle->receiveData = yang_stream_srt_receiveData;
handle->reconnect = yang_stream_srt_reconnect;
}
void yang_destroy_stream_srt(YangStreamHandle* handle) {
if(handle==NULL||handle->context==NULL) return;
YangStreamSrt* srt=(YangStreamSrt*)handle->context->context;
if (srt->buffer)
delete[] srt->buffer;
srt->buffer = NULL;
}
//修改代码支持新的协议 libmetartc:yangstream/YangStreamHandle.cpp
void yang_create_streamHandle(int32_t transType,YangStreamHandle* streamHandle,int32_t puid,YangAVInfo* avinfo,YangContextStream* stream,YangReceiveCallback* callback) {
if(streamHandle==NULL) return;
yang_create_stream(transType,streamHandle,puid,avinfo,stream,callback);
if(transType==Yang_Srt){
yang_create_stream_srt(streamHandle);
}
}
void yang_destroy_streamHandle(YangStreamHandle* streamHandle) {
if(streamHandle==NULL) return;
if(streamHandle->context->transtype==Yang_Srt){
yang_destroy_stream_srt(streamHandle);
}
yang_destroy_stream(streamHandle);
}