当前位置 博文首页 > 草上爬的博客:WebRTC学习之九:摄像头的捕捉和显示
mainwindow.h
#ifndef MAINWINDOW_H
#define MAINWINDOW_H
#include <QMainWindow>
#include <QDebug>
#include <map>
#include <memory>
#include <string>
#include "webrtc/base/sigslot.h"
#include "webrtc/modules/video_capture/video_capture.h"
#include "webrtc/modules/video_capture/video_capture_factory.h"
#include "webrtc/media/base/videocapturer.h"
#include "webrtc/media/engine/webrtcvideocapturer.h"
#include "webrtc/media/engine/webrtcvideoframe.h"
namespace Ui {
class MainWindow;
}
class MainWindow : public QMainWindow,public sigslot::has_slots<>
{
Q_OBJECT
public:
explicit MainWindow(QWidget *parent = 0);
~MainWindow();
void OnFrameCaptured(cricket::VideoCapturer* capturer, const cricket::CapturedFrame* frame);
void OnStateChange(cricket::VideoCapturer* capturer, cricket::CaptureState state);
private slots:
void on_pushButtonOpen_clicked();
private:
void getDeviceList();
private:
Ui::MainWindow *ui;
cricket::WebRtcVideoCapturer *videoCapturer;
cricket::WebRtcVideoFrame *videoFrame;
std::unique_ptr<uint8_t[]> videoImage;
QStringList deviceNameList;
QStringList deviceIDList;
};
#endif // MAINWINDOW_H
mainwindow.cpp
#include "mainwindow.h"
#include "ui_mainwindow.h"
MainWindow::MainWindow(QWidget *parent) :
QMainWindow(parent),
ui(new Ui::MainWindow),
videoCapturer(new cricket::WebRtcVideoCapturer()),
videoFrame(new cricket::WebRtcVideoFrame())
{
ui->setupUi(this);
getDeviceList();
}
MainWindow::~MainWindow()
{
delete ui;
videoCapturer->SignalFrameCaptured.disconnect(this);
videoCapturer->SignalStateChange.disconnect(this);
videoCapturer->Stop();
}
void MainWindow::OnFrameCaptured(cricket::VideoCapturer* capturer,const cricket::CapturedFrame* frame)
{
videoFrame->Init(frame, frame->width, frame->height,true);
//将视频图像转成RGB格式
videoFrame->ConvertToRgbBuffer(cricket::FOURCC_ARGB,
videoImage.get(),
videoFrame->width()*videoFrame->height()*32/8,
videoFrame->width()*32/8);
QImage image(videoImage.get(), videoFrame->width(), videoFrame->height(), QImage::Format_RGB32);
ui->label->setPixmap(QPixmap::fromImage(image));
}
void MainWindow::OnStateChange(cricket::VideoCapturer* capturer, cricket::CaptureState state)
{
}
void MainWindow::getDeviceList()
{
deviceNameList.clear();
deviceIDList.clear();
webrtc::VideoCaptureModule::DeviceInfo *info=webrtc::VideoCaptureFactory::CreateDeviceInfo(0);
int deviceNum=info->NumberOfDevices();
for (int i = 0; i < deviceNum; ++i)
{
const uint32_t kSize = 256;
char name[kSize] = {0};
char id[kSize] = {0};
if (info->GetDeviceName(i, name, kSize, id, kSize) != -1)
{
deviceNameList.append(QString(name));
deviceIDList.append(QString(id));
ui->comboBoxDeviceList->addItem(QString(name));
}
}
if(deviceNum==0)
{
ui->pushButtonOpen->setEnabled(false);
}
}
void MainWindow::on_pushButtonOpen_clicked()
{
static bool flag=true;
if(flag)
{
ui->pushButtonOpen->setText(QStringLiteral("关闭"));
const std::string kDeviceName = ui->comboBoxDeviceList->currentText().toStdString();
const std::string kDeviceId = deviceIDList.at(ui->comboBoxDeviceList->currentIndex()).toStdString();
videoCapturer->Init(cricket::Device(kDeviceName, kDeviceId));
int width=videoCapturer->GetSupportedFormats()->at(0).width;
int height=videoCapturer->GetSupportedFormats()->at(0).height;
cricket::VideoFormat format(videoCapturer->GetSupportedFormats()->at(0));
//开始捕捉
if(cricket::CS_STARTING == videoCapturer->Start(format))
{
qDebug()<<"Capture is started";
}
//连接WebRTC的信号和槽
videoCapturer->SignalFrameCaptured.connect(this,&MainWindow::OnFrameCaptured);
videoCapturer->SignalStateChange.connect(this,&MainWindow::OnStateChange);
if(videoCapturer->IsRunning())
{
qDebug()<<"Capture is running";
}
videoImage.reset(new uint8_t[width*height*32/8]);
}
else
{
ui->pushButtonOpen->setText(QStringLiteral("打开"));
//重复连接会报错,需要先断开,才能再次连接
videoCapturer->SignalFrameCaptured.disconnect(this);
videoCapturer->SignalStateChange.disconnect(this);
videoCapturer->Stop();
if(!videoCapturer->IsRunning())
{
qDebug()<<"Capture is stoped";
}
ui->label->clear();
}
flag=!flag;
}
main.cpp
#include "mainwindow.h"
#include <QApplication>
int main(int argc, char *argv[])
{
QApplication a(argc, argv);
MainWindow w;
w.show();
while(true)
{
//WebRTC消息循环
rtc::Thread::Current()->ProcessMessages(0);
rtc::Thread::Current()->SleepMs(1);
//Qt消息循环
a.processEvents( );
}
}
注意main函数中对WebRTC和Qt消息循环的处理,这是用Qt调用WebRTC进行摄像头捕捉和显示的关键。
三.效果