QT获取Android、Linux、Windows系统上的摄像头数据帧并处理显示

一、操作系统介绍

Linux系统:  ubuntu18.04 64位

Android系统:  Android 8.1/9.0

windows系统:  win10

QT版本:  5.12

二、需求介绍

使用QT本身代码在linux平台、Android平台、windows平台实时获取摄像头每一帧数据,进行处理再进行显示。

比如:  捕获数据之后传递给opencv实现图像识别、传递给ffmpeg实现MP4保存录制、或者实现rtsp实时推流。

如果仅仅是显示,不处理就很简单,这里介绍的方式是截取摄像头的原始数据。比如: YUYV、NV21格式。

三、QT获取摄像头数据需要用到的一些头文件

使用QT读取摄像头数据需要用到以下相关头文件:

#include <QCameraViewfinder>
#include <QCameraImageCapture>
#include <QCameraInfo>
#include <QAbstractVideoSurface>
#include <QVideoProbe>

在pro工程文件还需要加上:

QT += multimedia

四、QT获取摄像头的几种方式介绍

第一种方式:  

子类化QAbstractVideoSurface,重写present函数这种方法获取视频帧。

只要摄像头捕获到数据,就会调用Present函数,在Present函数函数就可以得到摄像头捕获的每一帧数据,非常适合做图像处理等操作。

经过测试这种方法在Android平台上无法使用,在windows和ubuntu平台都可以正常使用。

#ifndef MAINWINDOW_H
#define MAINWINDOW_H
#include <QWidget>
#include <QCameraViewfinder>
#include <QCameraImageCapture>
#include <QCameraInfo>
#include <QMessageBox>
#include <QFile>
#include <QFileDialog>
#include <QNetworkAccessManager>
#include <QNetworkRequest>
#include <QNetworkReply>
#include <QHttpMultiPart>
#include <QFile>
#include <QUrlQuery>
#include <QJsonParseError>
#include <QJsonDocument>
#include <QJsonObject>
#include <QJsonArray>
#include <QBuffer>
#include <QFileDialog>
#include <QMessageBox>
#include <QMainWindow>
#include <QTimer>
#include <QAbstractVideoSurface>
#include <QVideoProbe>

QT_BEGIN_NAMESPACE
namespace Ui { class MainWindow; }
QT_END_NAMESPACE

class MainWindow : public QMainWindow
{
    Q_OBJECT

public:
    MainWindow(QWidget *parent = nullptr);
    ~MainWindow();
    QCamera * camera;
    QTimer *timer;
    QCameraViewfinder * view_finder; //取景器
    QList<QCameraInfo> cameras;      //存放系统支持的摄像头列表
    QCameraImageCapture* camera_image_capture;
    QImageEncoderSettings iamge_setting;
private slots:
    void on_pushButton_open_camera_clicked();
    void on_pushButton_Camear_up_clicked();
    void Camear_handleFrame(QImage image);
private:
    Ui::MainWindow *ui;
};

class CameraFrameGrabber :public QAbstractVideoSurface
{
    Q_OBJECT
public:
    CameraFrameGrabber(QObject *parent = nullptr);

    QList<QVideoFrame::PixelFormat> supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType = QAbstractVideoBuffer::NoHandle) const override;
    bool present(const QVideoFrame &frame) override;

signals:
    void frameAvailable(QImage frame);

public slots:

};

#endif // MAINWINDOW_H

2.2.2 mainwindow.cpp代码
#include "mainwindow.h"
#include "ui_mainwindow.h"

MainWindow::MainWindow(QWidget *parent)
    : QMainWindow(parent)
    , ui(new Ui::MainWindow)
{
    ui->setupUi(this);

    cameras = QCameraInfo::availableCameras();
    if(cameras.count())
    {
       for(int i=0;i<cameras.count();i++)
       {
           ui->comboBox->addItem(tr("%1").arg(i));
       }
    }
    else
    {
       QMessageBox::warning(this,tr("提示"),"本机没有可用的摄像头!\n"
                                                "软件作者:DS小龙哥\n"
                                                "BUG反馈:[email protected]");
    }
}

MainWindow::~MainWindow()
{
    delete ui;
}

void MainWindow::Camear_handleFrame(QImage image)
{
    QImage image1=image.copy();
    ui->label_ImageDisplay->setPixmap(QPixmap::fromImage(image1));
}

void MainWindow::on_pushButton_open_camera_clicked()
{
    int i=ui->comboBox->currentText().toInt();
    /*创建摄像头对象,根据选择的摄像头打开*/
    camera = new QCamera(cameras.at(i));
    CameraFrameGrabber *_cameraFrameGrabber = new CameraFrameGrabber(this);
    camera->setViewfinder(_cameraFrameGrabber);
    connect(_cameraFrameGrabber, SIGNAL(frameAvailable(QImage)), this, SLOT(Camear_handleFrame(QImage)));
    camera->start();

}

void MainWindow::on_pushButton_Camear_up_clicked()
{

}

CameraFrameGrabber::CameraFrameGrabber(QObject *parent) :
    QAbstractVideoSurface(parent)
{
}

QList<QVideoFrame::PixelFormat> CameraFrameGrabber::supportedPixelFormats(QAbstractVideoBuffer::HandleType handleType) const
{
    if (handleType == QAbstractVideoBuffer::NoHandle)
    {
        qDebug()<<"无效格式1."<<QList<QVideoFrame::PixelFormat>()<< QVideoFrame::Format_RGB32<< QVideoFrame::Format_ARGB32<< QVideoFrame::Format_ARGB32_Premultiplied<< QVideoFrame::Format_RGB565<< QVideoFrame::Format_RGB555;
        return QList<QVideoFrame::PixelFormat>()<< QVideoFrame::Format_RGB32<< QVideoFrame::Format_ARGB32<< QVideoFrame::Format_ARGB32_Premultiplied<< QVideoFrame::Format_RGB565<< QVideoFrame::Format_RGB555;
           /*
        return QList<QVideoFrame::PixelFormat>()
                  <<QVideoFrame::Format_RGB32
                  <<QVideoFrame::Format_YUV420P
                  <<QVideoFrame::Format_YUYV

                  <<QVideoFrame::Format_ARGB32
                  <<QVideoFrame::Format_ARGB32_Premultiplied
                  <<QVideoFrame::Format_RGB565
                  <<QVideoFrame::Format_Jpeg
                  <<QVideoFrame::Format_RGB555;*/
       }
       else
       {
          qDebug()<<"无效格式2."<<QList<QVideoFrame::PixelFormat>();
          return  QList<QVideoFrame::PixelFormat>();
       }
}

bool CameraFrameGrabber::present(const QVideoFrame &frame)
{
    if (frame.isValid()) {
        QVideoFrame cloneFrame(frame);
        cloneFrame.map(QAbstractVideoBuffer::ReadOnly);
        //qDebug()<<"width="<<cloneFrame.width();
        //qDebug()<<"height="<<cloneFrame.height();
       // qDebug()<<"pixelFormat="<<cloneFrame.pixelFormat();
        const QImage image(cloneFrame.bits(),
                           cloneFrame.width(),
                           cloneFrame.height(),
                           QVideoFrame::imageFormatFromPixelFormat(cloneFrame.pixelFormat()));
        emit frameAvailable(image);
        cloneFrame.unmap();
        return true;
    }
    return false;
}

第二种方式:

使用QVideoProbe类捕获视频帧

QvideoProbe适合所有平台使用。下面代码在Android、ubuntu、windows系统上测试成功显示。

在window和ubuntu系统代码设置摄像头输出的数据格式是YUYV,在我的Android平板上只能输出NV21格式,下面代码里编写了NV21、YUYUV转RGB24的代码。

  mainwindow.cpp代码

#include "mainwindow.h"
#include "ui_mainwindow.h"

//#define ANDROID_DEVICE

#ifdef ANDROID_DEVICE
//设置保存文件的路径
#define SAVE_FILE_PATH "/sdcard/DCIM/Camera/"
#else
//设置保存文件的路径
#define SAVE_FILE_PATH "./"
#endif


/*
 * 设置QT界面的样式
*/
void MainWindow::SetStyle(const QString &qssFile) {
    QFile file(qssFile);
    if (file.open(QFile::ReadOnly)) {
        QString qss = QLatin1String(file.readAll());
        qApp->setStyleSheet(qss);
        QString PaletteColor = qss.mid(20,7);
        qApp->setPalette(QPalette(QColor(PaletteColor)));
        file.close();
    }
    else
    {
        qApp->setStyleSheet("");
    }
}

MainWindow::MainWindow(QWidget *parent)
    : QMainWindow(parent)
    , ui(new Ui::MainWindow)
{
    ui->setupUi(this);

    this->SetStyle(":/images/blue.css");     //设置样式表
    this->setWindowIcon(QIcon(":/log.ico")); //设置图标
    this->setWindowTitle("照相机");

    //获取本机可用摄像头
    cameras = QCameraInfo::availableCameras();
       if(cameras.count())
       {
           for(int i=0;i<cameras.count();i++)
           {
               ui->comboBox->addItem(tr("%1").arg(i));
           }
       }
       else
       {
           QMessageBox::warning(this,tr("提示"),"本机没有可用的摄像头!\n"
                                                    "软件作者:DS小龙哥\n"
                                                    "BUG反馈:[email protected]");
       }

       ui->pushButton_stop->setEnabled(false); //设置停止按钮不可用
       ui->pushButton_Camear_up->setEnabled(false);


       //创建工作目录
   #ifdef ANDROID_DEVICE
       QDir dir;
       if(!dir.exists("/sdcard/DCIM/Camera/"))
       {
           if(dir.mkpath("/sdcard/DCIM/Camera/"))
           {
               Log_Display("/sdcard/DCIM/Camera/目录创建成功.\n");
           }
           else
           {
               Log_Display("/sdcard/DCIM/Camera/目录创建失败.\n");
           }
       }
   #endif
}

MainWindow::~MainWindow()
{
    delete ui;
}


void MainWindow::Log_Display(QString text)
{
    ui->plainTextEdit->insertPlainText(text);
}

void MainWindow::on_pushButton_open_camera_clicked()
{
    int i=ui->comboBox->currentText().toInt();
    /*创建摄像头对象,根据选择的摄像头打开*/
    camera = new QCamera(cameras.at(i));

    m_pProbe = new QVideoProbe(this);
    if(m_pProbe != nullptr)
    {
        m_pProbe->setSource(camera); // Returns true, hopefully.
        connect(m_pProbe, SIGNAL(videoFrameProbed(QVideoFrame)), this, SLOT(slotOnProbeFrame(QVideoFrame)), Qt::DirectConnection);
    }

    /*配置摄像头捕获模式为帧捕获模式*/
    //camera->setCaptureMode(QCamera::CaptureStillImage);  //如果在Linux系统下运行就这样设置
     camera->setCaptureMode(QCamera::CaptureVideo);//如果在android系统下运行就这样设置

    /*启动摄像头*/
    camera->start();

    /*设置摄像头的采集帧率和分辨率*/
    QCameraViewfinderSettings settings;
    settings.setPixelFormat(QVideoFrame::Format_YUYV); //设置像素格式  Android上只支持NV21格式
    settings.setResolution(QSize(640, 480)); //设置摄像头的分辨率
    camera->setViewfinderSettings(settings);

    //获取摄像头支持的分辨率、帧率等参数
    //获取摄像头支持的分辨率、帧率等参数
    /*
    QList<QCameraViewfinderSettings > ViewSets = camera->supportedViewfinderSettings();
    foreach (QCameraViewfinderSettings ViewSet, ViewSets) {
        //qDebug() << i++ <<" max rate = " << ViewSet.maximumFrameRate() << "min rate = "<< ViewSet.minimumFrameRate() << "resolution "<<ViewSet.resolution();

         Log_Display(tr("Format=%1\n").arg(ViewSet.pixelFormat()));
    }*/

    ui->pushButton_open_camera->setEnabled(false);
    ui->pushButton_stop->setEnabled(true); //设置停止按钮可用
    ui->pushButton_Camear_up->setEnabled(true);
}

/*
 * 摄像头输出的信息
D libandroid_camera_save.so: 32  max rate =  15 min rate =  15 resolution  QSize(640, 480) Format= Format_NV21  QSize(1, 1)
D libandroid_camera_save.so: 33  max rate =  30 min rate =  30 resolution  QSize(640, 480) Format= Format_NV21  QSize(1, 1)
D libandroid_camera_save.so: 34  max rate =  15 min rate =  15 resolution  QSize(640, 480) Format= Format_YV12  QSize(1, 1)
D libandroid_camera_save.so: 35  max rate =  30 min rate =  30 resolution  QSize(640, 480) Format= Format_YV12  QSize(1, 1)
*/
void MainWindow::slotOnProbeFrame(const QVideoFrame &frame)
{
   QVideoFrame cloneFrame(frame);
   cloneFrame.map(QAbstractVideoBuffer::ReadOnly);
   //qDebug()<<"height:"<<cloneFrame.height();
   //qDebug()<<"width:"<<cloneFrame.width();
   //qDebug()<<"bytesPerLine:"<<cloneFrame.bytesPerLine();
   //qDebug()<<"mappedBytes:"<<cloneFrame.mappedBytes();
   qDebug()<<"pixelFormat:"<<cloneFrame.pixelFormat();

   if(cloneFrame.pixelFormat()==QVideoFrame::Format_NV21)
   {
        unsigned char rgb_buffer[640*480*3];
        NV21_TO_RGB24(cloneFrame.bits(),rgb_buffer,cloneFrame.width(),cloneFrame.height());
        const QImage image(rgb_buffer,
                           cloneFrame.width(),
                           cloneFrame.height(),
                           QImage::Format_RGB888);
        QPixmap my_pixmap;
        my_pixmap.convertFromImage(image);
        ui->label_ImageDisplay->setPixmap(my_pixmap);
   }
   else if(cloneFrame.pixelFormat()==QVideoFrame::Format_YUYV)
   {
       unsigned char rgb_buffer[640*480*3];
       yuyv_to_rgb(cloneFrame.bits(),rgb_buffer,cloneFrame.width(),cloneFrame.height());
       const QImage image(rgb_buffer,
                          cloneFrame.width(),
                          cloneFrame.height(),
                          QImage::Format_RGB888);
       QPixmap my_pixmap;
       my_pixmap.convertFromImage(image);
       ui->label_ImageDisplay->setPixmap(my_pixmap);
   }
   else
   {
       Log_Display(tr("当前格式编码为%1,暂时不支持转换.\n").arg(cloneFrame.pixelFormat()));
   }
    cloneFrame.unmap();
}

void MainWindow::on_pushButton_Camear_up_clicked()
{
   const QPixmap pix=ui->label_ImageDisplay->pixmap()->copy();
   QDateTime dateTime(QDateTime::currentDateTime());
   //时间效果: 2020-03-05 16:25::04 周四
   QString qStr="";
   qStr+=SAVE_FILE_PATH;  //Android 手机的照相机文件夹
   qStr+=dateTime.toString("yyyy-MM-dd-hh-mm-ss");
   qStr+=".jpg";
   pix.save(qStr);
   Log_Display(tr("照片保存路径:%1\n").arg(qStr));
}

void MainWindow::YUV420P_to_RGB24(unsigned char *data, unsigned char *rgb, int width, int height)
{
    int index = 0;
    unsigned char *ybase = data;
    unsigned char *ubase = &data[width * height];
    unsigned char *vbase = &data[width * height * 5 / 4];
    for (int y = 0; y < height; y++) {
        for (int x = 0; x < width; x++) {
            //YYYYYYYYUUVV
            unsigned char Y = ybase[x + y * width];
            unsigned char U = ubase[y / 2 * width / 2 + (x / 2)];
            unsigned char V = vbase[y / 2 * width / 2 + (x / 2)];
            rgb[index++] = Y + 1.402 * (V - 128); //R
            rgb[index++] = Y - 0.34413 * (U - 128) - 0.71414 * (V - 128); //G
            rgb[index++] = Y + 1.772 * (U - 128); //B
        }
    }
}


/**
 * NV12属于YUV420SP格式,android相机默认格式
 * @param data
 * @param rgb
 * @param width
 * @param height
 */
void MainWindow::NV21_TO_RGB24(unsigned char *yuyv, unsigned char *rgb, int width, int height)
{
        const int nv_start = width * height ;
        int  index = 0, rgb_index = 0;
        uint8_t y, u, v;
        int r, g, b, nv_index = 0,i, j;

        for(i = 0; i < height; i++){
            for(j = 0; j < width; j ++){
                //nv_index = (rgb_index / 2 - width / 2 * ((i + 1) / 2)) * 2;
                nv_index = i / 2  * width + j - j % 2;

                y = yuyv[rgb_index];
                u = yuyv[nv_start + nv_index ];
                v = yuyv[nv_start + nv_index + 1];

                r = y + (140 * (v-128))/100;  //r
                g = y - (34 * (u-128))/100 - (71 * (v-128))/100; //g
                b = y + (177 * (u-128))/100; //b

                if(r > 255)   r = 255;
                if(g > 255)   g = 255;
                if(b > 255)   b = 255;
                if(r < 0)     r = 0;
                if(g < 0)     g = 0;
                if(b < 0)     b = 0;

                index = rgb_index % width + (height - i - 1) * width;
                //rgb[index * 3+0] = b;
                //rgb[index * 3+1] = g;
                //rgb[index * 3+2] = r;

                //颠倒图像
                //rgb[height * width * 3 - i * width * 3 - 3 * j - 1] = b;
                //rgb[height * width * 3 - i * width * 3 - 3 * j - 2] = g;
                //rgb[height * width * 3 - i * width * 3 - 3 * j - 3] = r;

                //正面图像
                rgb[i * width * 3 + 3 * j + 0] = b;
                rgb[i * width * 3 + 3 * j + 1] = g;
                rgb[i * width * 3 + 3 * j + 2] = r;

                rgb_index++;
            }
        }
}

void MainWindow::YUV420P_TO_RGB24(unsigned char *yuv420p, unsigned char *rgb24, int width, int height) {
    int index = 0;
    for (int y = 0; y < height; y++) {
        for (int x = 0; x < width; x++) {
            int indexY = y * width + x;
            int indexU = width * height + y / 2 * width / 2 + x / 2;
            int indexV = width * height + width * height / 4 + y / 2 * width / 2 + x / 2;

            unsigned char Y = yuv420p[indexY];
            unsigned char U = yuv420p[indexU];
            unsigned char V = yuv420p[indexV];

            rgb24[index++] = Y + 1.402 * (V - 128); //R
            rgb24[index++] = Y - 0.34413 * (U - 128) - 0.71414 * (V - 128); //G
            rgb24[index++] = Y + 1.772 * (U - 128); //B
        }
    }
}

void MainWindow::on_pushButton_stop_clicked()
{
    camera->stop();
    delete  camera;
    delete m_pProbe;

    ui->pushButton_open_camera->setEnabled(true);
    ui->pushButton_stop->setEnabled(false); //设置停止按钮不可用
    ui->pushButton_Camear_up->setEnabled(false);
}

/*
函数功能: 将YUV数据转为RGB格式
函数参数:
unsigned char *yuv_buffer: YUV源数据
unsigned char *rgb_buffer: 转换之后的RGB数据
int iWidth,int iHeight   : 图像的宽度和高度
*/
void MainWindow::yuyv_to_rgb(unsigned char *yuv_buffer,unsigned char *rgb_buffer,int iWidth,int iHeight)
{
    int x;
    int z=0;
    unsigned char *ptr = rgb_buffer;
    unsigned char *yuyv= yuv_buffer;
    for (x = 0; x < iWidth*iHeight; x++)
    {
        int r, g, b;
        int y, u, v;

        if (!z)
        y = yuyv[0] << 8;
        else
        y = yuyv[2] << 8;
        u = yuyv[1] - 128;
        v = yuyv[3] - 128;

        r = (y + (359 * v)) >> 8;
        g = (y - (88 * u) - (183 * v)) >> 8;
        b = (y + (454 * u)) >> 8;

        *(ptr++) = (r > 255) ? 255 : ((r < 0) ? 0 : r);
        *(ptr++) = (g > 255) ? 255 : ((g < 0) ? 0 : g);
        *(ptr++) = (b > 255) ? 255 : ((b < 0) ? 0 : b);

        if(z++)
        {
            z = 0;
            yuyv += 4;
        }
    }
}

mainwindow.h代码

#ifndef MAINWINDOW_H
#define MAINWINDOW_H
#include <QWidget>
#include <QCameraViewfinder>
#include <QCameraImageCapture>
#include <QCameraInfo>
#include <QMessageBox>
#include <QFile>
#include <QFileDialog>
#include <QNetworkAccessManager>
#include <QNetworkRequest>
#include <QNetworkReply>
#include <QHttpMultiPart>
#include <QFile>
#include <QUrlQuery>
#include <QJsonParseError>
#include <QJsonDocument>
#include <QJsonObject>
#include <QJsonArray>
#include <QBuffer>
#include <QFileDialog>
#include <QMessageBox>
#include <QMainWindow>
#include <QTimer>
#include <QAbstractVideoSurface>
#include <QVideoProbe>

QT_BEGIN_NAMESPACE
namespace Ui { class MainWindow; }
QT_END_NAMESPACE

class MainWindow : public QMainWindow
{
    Q_OBJECT

public:
     void SetStyle(const QString &qssFile);
    MainWindow(QWidget *parent = nullptr);
    ~MainWindow();
    QCamera * camera;
    QTimer *timer;
    QCameraViewfinder * view_finder; //取景器
    QList<QCameraInfo> cameras;      //存放系统支持的摄像头列表
    QCameraImageCapture* camera_image_capture;
    QImageEncoderSettings iamge_setting;
    QVideoProbe *m_pProbe;
    void Log_Display(QString text);
    void YUV420P_to_RGB24(unsigned char *data, unsigned char *rgb, int width, int height);
    void NV21_TO_RGB24(unsigned char *data, unsigned char *rgb, int width, int height);
    void YUV420P_TO_RGB24(unsigned char *yuv420p, unsigned char *rgb24, int width, int height);
    void yuyv_to_rgb(unsigned char *yuv_buffer,unsigned char *rgb_buffer,int iWidth,int iHeight);
private slots:
    void slotOnProbeFrame(const QVideoFrame &frame);
    void on_pushButton_open_camera_clicked();
    void on_pushButton_Camear_up_clicked();
    void on_pushButton_stop_clicked();

private:
    Ui::MainWindow *ui;
};


#endif // MAINWINDOW_H

UI界面:

 

下面公众号里有全套QT、C、C++基础学习教程:

发布了14 篇原创文章 · 获赞 9 · 访问量 1万+

猜你喜欢

转载自blog.csdn.net/xiaolong1126626497/article/details/104811175
今日推荐