注:这段代码存在问题,在点击关闭按钮时不能退出。后续添加了qttimer和qt线程的处理方式
代码 videocapture.h
#ifndef VIDEOCAPTURE_H
#define VIDEOCAPTURE_H
#include <iostream>
#include <QWidget>
#include <QLabel>
#include <QPushButton>
#include <QCloseEvent>
extern "C"
{
#include <stdio.h>
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libavdevice/avdevice.h>
#include <libavformat/version.h>
#include <libavutil/time.h>
#include <libavutil/mathematics.h>
//#include <libavutil/imgutils.h">
#include <SDL2/SDL.h>
}
//event message
#define REFRESH_EVENT (SDL_USEREVENT + 1)
#define QUIT_EVENT (SDL_USEREVENT + 2)
QT_BEGIN_NAMESPACE
namespace Ui { class VideoCapture; }
QT_END_NAMESPACE
class VideoCapture : public QWidget
{
Q_OBJECT
public:
VideoCapture(QWidget *parent = nullptr);
~VideoCapture();
void close_play();
private:
QLabel *label;
Ui::VideoCapture *ui;
QPushButton *start_btn;
QRect rect;
bool isplay;
private slots:
void start_play();
protected:
void closeEvent(QCloseEvent *event);
};
#endif // VIDEOCAPTURE_H
代码 videocapture.cpp
#include "videocapture.h"
#include "ui_videocapture.h"
#include <QDebug>
#include <QTimer>
VideoCapture::VideoCapture(QWidget *parent)
: QWidget(parent)
, ui(new Ui::VideoCapture)
{
ui->setupUi(this);
qDebug() << "123";
this->setFixedSize(1280,720);
label = new QLabel(this);
label->resize(1280,720);
qDebug() << "456";
label->move(0,0);
rect = label->geometry();//记录widget位置,恢复时使用
isplay = false;
start_btn = new QPushButton("btn1" ,this);
connect(start_btn,&QPushButton::clicked,this,&VideoCapture::start_play);
connect(start_btn,&QPushButton::clicked,this,&VideoCapture::start_play);
}
VideoCapture::~VideoCapture()
{
delete ui;
}
static AVFrame* create_frame(int width, int height, AVPixelFormat pix_fmt)
{
int ret = 0;
AVFrame* frame = NULL;
frame = av_frame_alloc();
if (!frame)
{
printf("Error, No Memory!\n");
goto __ERROR;
}
//设置参数
frame->width = width;
frame->height = height;
frame->format = pix_fmt;
//alloc inner memory
ret = av_frame_get_buffer(frame, 32); //按32位对齐 【视频必须是32位对齐】
if (ret < 0)
{
printf("Error, Failed to alloc buffer for frame!\n");
goto __ERROR;
}
return frame;
__ERROR:
if (frame)
{
av_frame_free(&frame);
}
return NULL;
}
int thread_exit = 0;
int refresh_video_timer(void *udata)
{
thread_exit = 0;
while (!thread_exit)
{
SDL_Event event;
event.type = REFRESH_EVENT;
SDL_PushEvent(&event);
SDL_Delay(40);
}
thread_exit = 0;
//push quit event
SDL_Event event;
event.type = QUIT_EVENT;
SDL_PushEvent(&event);
return 0;
}
void VideoCapture::close_play()
{
thread_exit = 1;
SDL_Event event;
event.type = QUIT_EVENT;
SDL_PushEvent(&event);
}
void VideoCapture::closeEvent(QCloseEvent *event)
{
thread_exit = 1;
isplay = 0;
}
void VideoCapture::start_play()
{
//使用ffmpeg -dshow采集视频
int ret = 0;
AVInputFormat *in_format = nullptr;
AVFormatContext* fmt_ctx = nullptr;
AVDictionary* options = nullptr;
char device_name[256] = "video=Integrated Webcam";
isplay = true;
SDL_Rect sdlRect;
//av_register_all();
avdevice_register_all();
in_format = av_find_input_format("dshow");
if (in_format == nullptr) {
std::cout<< "av_find_input_format error!" << std::endl;
//goto _END;
}
//设置采样参数
av_dict_set(&options, "video_size", "1280*720", 0);
av_dict_set(&options, "framerate", "10", 0);
av_dict_set(&options, "pixel_format", "yuyv422", 0);
if ((ret = avformat_open_input(&fmt_ctx, device_name, in_format, &options)) != 0) {
std::cout << "Failed to open video device," << ret << std::endl;
//goto _END;
}
AVPacket* pkt = av_packet_alloc();
av_init_packet(pkt);
//sdl初始化 ------------ start -------------
if (SDL_Init(SDL_INIT_VIDEO)) {
qDebug() << "Could not initialize SDL";
return;
}
SDL_Window* screen;
//SDL 2.0 Support for multiple windows
screen = SDL_CreateWindowFrom((void *)label->winId());
//screen = SDL_CreateWindow("Simplest Video Play SDL2", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED,
// screen_w, screen_h, SDL_WINDOW_OPENGL | SDL_WINDOW_RESIZABLE);
if (!screen) {
printf("SDL: could not create window - exiting:%s\n", SDL_GetError());
return;
}
SDL_Renderer* sdlRenderer = SDL_CreateRenderer(screen, -1, 0);
Uint32 pixformat = 0;
//IYUV: Y + U + V (3 planes)
//YV12: Y + V + U (3 planes)
pixformat = SDL_PIXELFORMAT_IYUV;
const int pixel_w = 1280, pixel_h = 720;
SDL_Texture* sdlTexture1 = SDL_CreateTexture(sdlRenderer, pixformat, SDL_TEXTUREACCESS_STREAMING, pixel_w, pixel_h);
//sdl初始化 ------------- end --------------
//图像格式转换------------------------------start----------------------
//图像转换参数
AVFrame* frame_yuyv422 = NULL;
AVFrame* frame_yuv420 = NULL;
struct SwsContext* img_convert_ctx = NULL;
AVPixelFormat in_pix_fmt = AV_PIX_FMT_YUYV422;
AVPixelFormat out_pix_fmt = AV_PIX_FMT_YUV420P;
static int sws_flags = SWS_BICUBIC; //差值算法,双三次
//创建avframe
frame_yuyv422 = create_frame(1280, 720, in_pix_fmt);
frame_yuv420 = create_frame(1280, 720, out_pix_fmt);
//设置转换context
if (img_convert_ctx == NULL)
{
img_convert_ctx = sws_getContext(1280, 720,
(AVPixelFormat)in_pix_fmt,
1280,
720,
(AVPixelFormat)out_pix_fmt,
sws_flags, NULL, NULL, NULL);
if (img_convert_ctx == NULL)
{
std::cout << "Cannot initialize the conversion context\n" << std::endl;
}
}
//图像格式转换------------------------------end----------------------
SDL_Event event;
SDL_Thread *timer_thread = NULL;
timer_thread = SDL_CreateThread(refresh_video_timer,
NULL,
NULL);
while (isplay) {
SDL_WaitEvent(&event);
if (event.type == REFRESH_EVENT){
while (!av_read_frame(fmt_ctx, pkt)) {
printf("Size of collected data %d\n", pkt->size);
// (宽*高)*(yuv420=1.5/yuv422=2/yuv444=3)
//fwrite(pkt->data, 1, 1280 * 720 *2, out_file);
int numBytes = avpicture_get_size(in_pix_fmt,1280,720);
uint8_t *out_buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t));
ret = avpicture_fill((AVPicture*)frame_yuv420, out_buffer, (AVPixelFormat)out_pix_fmt, 1280, 720);
ret = avpicture_fill((AVPicture*)frame_yuyv422, (unsigned char*)pkt->data, (AVPixelFormat)in_pix_fmt, 1280, 720);
std::cout << "avpicture_fill: " << ret << std::endl;
// av_image_fill_arrays(frame_yuyv422->data, frame_yuyv422->linesize, pkt->data, in_pix_fmt, 1280, 720, 4);
//yuyv422 -> yuv420p
ret = sws_scale(img_convert_ctx, frame_yuyv422->data, frame_yuyv422->linesize,
0, 720, frame_yuv420->data, frame_yuv420->linesize);
std::cout << "sws_scale-ret:" << ret<< std::endl;
//SDL_UpdateTexture(sdlTexture1, NULL, pkt->data, pixel_w);
SDL_UpdateTexture(sdlTexture1, NULL, frame_yuv420->data[0], 1280);
SDL_RenderClear(sdlRenderer);
sdlRect.x = 0;
sdlRect.y = 0;
sdlRect.w = pixel_w;
sdlRect.h = pixel_h;
SDL_RenderCopy(sdlRenderer, sdlTexture1, NULL, &sdlRect);
SDL_RenderPresent(sdlRenderer);
//Delay 40ms
SDL_Delay(40);
std::cout << "delay 40ms" << std::endl;
if (out_buffer)
av_free(out_buffer);
}
} else if (event.type == SDL_WINDOWEVENT) {
//if resize
//SDL_GetWindowSize(window, &w_width, &w_height);
}
else if (event.type == SDL_QUIT) {
thread_exit = 1;
} else if (event.type == QUIT_EVENT) {
break;
}
SDL_DestroyTexture(sdlTexture1);
SDL_DestroyRenderer(sdlRenderer);
SDL_DestroyWindow(screen);
SDL_Quit();
av_packet_unref(pkt);
}
//_END:
if (pkt) {
av_packet_free(&pkt);
pkt = nullptr;
}
if (fmt_ctx) {
avformat_close_input(&fmt_ctx);
fmt_ctx = nullptr;
}
if (frame_yuyv422)
{
av_frame_free(&frame_yuyv422);
frame_yuyv422 = NULL;
}
if (frame_yuv420)
{
av_frame_free(&frame_yuv420);
frame_yuv420 = NULL;
}
SDL_DestroyRenderer(sdlRenderer);
SDL_DestroyWindow(screen);
SDL_Quit();
return;
}