平台 | 内核版本 |
---|---|
RK1108 | Linux3.1 |
之前的程序不是很清楚,我们再重新看一下:
主函数
文件:main.c
int MiniGUIMain(int argc, const char* argv[])
{
printf("stat--->%s,%s\n",DEVICE_SVERSION,DEVICE_KVERSION);
configLoad();
sqlInit();
gpioInit();
myMixerInit();
myVideoInit();
protocolInit();
sensorDetectorInit();
formVideoLayerCreate();
return 0;
}
我们分析跟video相关的程序:
文件:my_video.c
void myVideoInit(void)
{
my_video = (MyVideo *) calloc(1,sizeof(MyVideo));
my_video->showLocalVideo = showLocalVideo;
my_video->showPeerVideo = showPeerVideo;
my_video->hideVideo = hideVideo;
my_video->faceRegist = faceRegist;
my_video->faceDelete = faceDelete;
my_video->faceRecognizer = faceRecognizer;
my_video->capture = capture;
my_video->recordStart = recordStart;
my_video->recordStop = recordStop;
my_video->videoCallOut = videoCallOut;
my_video->videoCallOutAll = videoCallOutAll;
my_video->videoCallIn = videoCallIn;
my_video->videoHangup = videoHangup;
my_video->videoAnswer = videoAnswer;
my_video->videoGetCallTime = videoGetCallTime;
my_video->recordWriteCallback = recordWriteCallback;
my_video->delaySleepTime = delaySleepTime;
memset(&talk_data,0,sizeof(talk_data));
pthread_mutexattr_t mutexattr;
pthread_mutexattr_init(&mutexattr);
pthread_mutexattr_settype(&mutexattr, PTHREAD_MUTEX_RECURSIVE_NP);
pthread_mutex_init(&mutex, &mutexattr);
pthread_mutexattr_destroy(&mutexattr);
init();
stm = stateMachineCreate(ST_IDLE,
state_table,
sizeof (state_table) / sizeof ((state_table) [0]),
0,
stmHandle,
my_video,
&st_debug);
createThread(threadVideoTimer,NULL);
}
首先看下回调函数的定义:
typedef struct _MyVideo {
void (*showLocalVideo)(void); // 显示本地视频
void (*showPeerVideo)(void); // 显示远程视频
void (*hideVideo)(void); // 隐藏视频
int (*faceRegist)( unsigned char *image_buff,
int w,int h,
char *id,char *nick_name,char *url);// 注册人脸
void (*faceDelete)(char *id); // 删除人脸
int (*faceRecognizer)( unsigned char *image_buff,
int w,int h,
int *age,int *sex);// 识别人脸
void (*capture)(int type,int count,char *nick_name,char *user_id);
void (*recordStart)(int type);
void (*recordWriteCallback)(char *data,int size);
void (*recordStop)(void);
void (*videoCallOut)(char *user_id);
void (*videoCallOutAll)(void);
void (*videoCallIn)(char *user_id);
void (*videoAnswer)(int dir,int dev_type); // dir 0本机接听 1对方接听
void (*videoHangup)(void);
int (*videoGetCallTime)(void);
int (*delaySleepTime)(int type); // 延长睡眠时间0短 1长
}MyVideo;
我们重点看下初始化函数init
:
static void init(void)
{
jpegIncDecInit();
myFaceInit();
#ifdef USE_VIDEO
rkVideoInit();
#endif
}
文件:video_server.cpp
我们重点看下:rkVideoInit
:
int rkVideoInit(void)
{
// createThread(threadVideoInit,NULL);
rkvideo = new RKVideo();
}
RKVideo分析
文件:video_server.cpp
分析:
首先看一下类:
class RKVideo {
public:
RKVideo();
~RKVideo();
int connect(std::shared_ptr<CamHwItf::PathBase> mpath,
std::shared_ptr<StreamPUBase> next,
frm_info_t& frmFmt, const uint32_t num,
std::shared_ptr<RKCameraBufferAllocator> allocator);
void disconnect(std::shared_ptr<CamHwItf::PathBase> mpath,
std::shared_ptr<StreamPUBase> next);
void displayPeer(int w,int h,void* decCallback);
void displayLocal(void);
void displayOff(void);
void faceOnOff(bool type);
void h264EncOnOff(bool type,int w,int h,EncCallbackFunc encCallback);
void capture(char *file_name);
void recordStart(EncCallbackFunc recordCallback);
void recordSetStopFunc(RecordStopCallbackFunc recordCallback);
void recordStop(void);
private:
int display_state_; // 0关闭 1本地视频 2远程视频
bool face_state_;
bool h264enc_state_;
struct rk_cams_dev_info cam_info;
std::shared_ptr<RKCameraBufferAllocator> ptr_allocator;
CameraFactory cam_factory;
std::shared_ptr<RKCameraHal> cam_dev;
std::shared_ptr<DisplayProcess> display_process;
std::shared_ptr<FaceProcess> face_process;
std::shared_ptr<H264Encoder> encode_process;
};
然后我们看下实例化:
RKVideo::RKVideo()
{
display_state_ = 0;
face_state_ = false;
h264enc_state_ = false;
memset(&cam_info, 0, sizeof(cam_info));
CamHwItf::getCameraInfos(&cam_info);
if (cam_info.num_camers <= 0) {
printf("[rv_video:%s]fail\n",__func__);
return ;
}
shared_ptr<CamHwItf> new_dev = cam_factory.GetCamHwItf(&cam_info, 0);
cam_dev = ((shared_ptr<RKCameraHal>)
new RKCameraHal(new_dev, cam_info.cam[0]->index, cam_info.cam[0]->type));
cam_dev->init(1280, 720, 25);
ptr_allocator = shared_ptr<RKCameraBufferAllocator>(new RKCameraBufferAllocator());
cam_dev->start(5, ptr_allocator);
display_process = std::make_shared<DisplayProcess>();
if (display_process.get() == nullptr)
std::cout << "[rv_video]DisplayProcess make_shared error" << std::endl;
face_process = std::make_shared<FaceProcess>();
if (face_process.get() == nullptr)
std::cout << "[rv_video]FaceProcess make_shared error" << std::endl;
encode_process = std::make_shared<H264Encoder>();
if (encode_process.get() == nullptr)
std::cout << "[rv_video]H264Encoder make_shared error" << std::endl;
init_ok = 1;
}
其中第一步也是判断摄像头个数,然后通过getCamHwItf
获取摄像控制器
控制器种类:
- CamIsp10DevHwItf(ISP10,RK3288)/CamIsp11DevHwItf(ISP11,RK1108)
- CamCifDevHwItf(CIF 控制器)
- CamUSBDevHwItf(USB Camera)
shared_ptr<CamHwItf> new_dev = cam_factory.GetCamHwItf(&cam_info, 0);
GetCamHwItf
如下获取Camera
的Hardware
的接口:
文件:md_camera_factory.h
class CameraFactory final {
public:
CameraFactory() {}
virtual ~CameraFactory() {}
shared_ptr<CamHwItf> GetCamHwItf(struct rk_cams_dev_info* cam_info, const int index)
{
shared_ptr<CamHwItf> dev;
if (cam_info->cam[index]->type == RK_CAM_ATTACHED_TO_ISP)
{
dev = getCamHwItf(&cam_info->isp_dev);
}
else if (cam_info->cam[index]->type == RK_CAM_ATTACHED_TO_CIF)
{
int cif_index = ((struct rk_cif_dev_info*)(cam_info->cam[index]->dev))->cif_index;
dev = shared_ptr<CamHwItf>(new CamCifDevHwItf(&(cam_info->cif_devs.cif_devs[cif_index])));
}
else
{
return NULL;
}
return dev;
}
};
然后对应实例化:
cam_dev = ((shared_ptr<RKCameraHal>)
new RKCameraHal(new_dev, cam_info.cam[0]->index, cam_info.cam[0]->type));
cam_dev->init(1280, 720, 25);
CameraHal分析
首先看下硬件抽象层的类
class RKCameraHal {
public:
int type(void) {
return type_;
}
int index(void) {
return index_;
}
virtual frm_info_t& format(void) {
return format_;
}
RKCameraHal(std::shared_ptr<CamHwItf> dev, int index, int type);
~RKCameraHal();
void init(const uint32_t width, const uint32_t height, const int fps);
void start(const uint32_t num, std::shared_ptr<RKCameraBufferAllocator> ptr_allocator);
void stop(void);
std::shared_ptr<CamHwItf::PathBase>& mpath(void) {
return mpath_;
}
std::shared_ptr<CamHwItf> camdev;
std::shared_ptr<CamHwItf::PathBase> mpath_;
int type_;
int index_;
frm_info_t format_;
};
然后看一下我们的实例化:
RKCameraHal::RKCameraHal(std::shared_ptr<CamHwItf> dev,
int index, int type)
:camdev(dev)
,index_(index)
,type_(type)
{
printf("[RKCameraHal:%s]\n",__func__);
if (camdev->initHw(index) == false)
printf("[RKCameraHal:%s] camdev initHw error\n",__func__);
mpath_ = camdev->getPath(CamHwItf::MP);
}
然后看一下init
void RKCameraHal::init(const uint32_t width,
const uint32_t height, const int fps)
{
printf("[RKCameraHal:%s] \n",__func__);
format_.frmSize.width = width;
format_.frmSize.height = height;
format_.frmFmt = HAL_FRMAE_FMT_NV12;
format_.colorSpace = HAL_COLORSPACE_JPEG;
format_.fps = fps;
HAL_FPS_INFO_t fps_info;
fps_info.numerator = 1;
fps_info.denominator = fps;
if (!camdev->setFps(fps_info))
printf("[RKCameraHal:%s]dev set fps is %.2f\n", __func__,
1.0 * fps_info.denominator / fps_info.numerator);
}
上面初始化完以后:
RKCameraBufferAllocator
ptr_allocator = shared_ptr<RKCameraBufferAllocator>(new RKCameraBufferAllocator());
首先看下类定义:
class RKCameraBufferAllocator : public CameraBufferAllocator {
friend class RKCameraBuffer;
public:
RKCameraBufferAllocator(void);
virtual ~RKCameraBufferAllocator(void);
virtual std::shared_ptr<CameraBuffer> alloc(const char* camPixFmt, unsigned int width, unsigned int height,
unsigned int usage, weak_ptr<ICameraBufferOwener> bufOwener) override;
virtual void free(CameraBuffer* buffer) override;
private:
int mIonClient;
};
实例化为:
RKCameraBufferAllocator::RKCameraBufferAllocator(void) {
mIonClient = ion_open();
if (mIonClient < 0) {
printf("open /dev/ion failed!\n");
mError = true;
}
}
start
void RKCameraHal::start(const uint32_t num,
std::shared_ptr<RKCameraBufferAllocator> ptr_allocator)
{
printf("[RKCameraHal:%s] \n",__func__);
if (mpath()->prepare(format_, num, *ptr_allocator, false, 0) == false) {
printf("[RKCameraHal:%s] mpath prepare failed \n",__func__);
return;
}
if (!mpath()->start()) {
printf("[RKCameraHal:%s] mpath start failed \n",__func__);
return;
}
}
初始化等动作完成以后:
如何拿到数据
StreamPU instance: 继承 StreamPUBase, 将该对象加入到 CamDev instance, 可接收来自 CamDev instance 的帧 Buffer,该对象并非必需,只是为了方便后续的数据流处理而提供。用户也可直接继 NewCameraBufferReadyNotifier类,然后将该类对象指针加入到 CamDev instance 来接收帧 Buffer。
int RKVideo::connect(std::shared_ptr<CamHwItf::PathBase> mpath,
std::shared_ptr<StreamPUBase> next,
frm_info_t& frmFmt, const uint32_t num,
std::shared_ptr<RKCameraBufferAllocator> allocator)
{
if (!mpath.get() || !next.get()) {
printf("[rv_video:%s]PathBase,PU is NULL\n",__func__);
}
mpath->addBufferNotifier(next.get());
next->prepare(frmFmt, num, allocator);
if (!next->start()) {
printf("[rv_video:%s]PathBase,PU start failed!\n",__func__);
}
return 0;
}
我们重点看下addBufferNotifier