c++ Dxgi+FFmpeg 完成对截图屏幕,以及将屏幕数据保存为MP4格式,以完成录屏功能

c++ Dxgi+FFmpeg 完成对截图屏幕,以及将屏幕数据保存为MP4格式,以完成录屏功能,第1张

前言

c++ Dxgi+FFmpeg 完成对截图屏幕,以及将屏幕数据保存为MP4格式,以完成录屏功能

完整项目下载地址:
https://download.csdn.net/download/weixin_43174052/85168036

提示:以下是本篇文章正文内容,下面案例可供参考

一、Dxgi,FFmpeg是什么?

Desktop Duplication API是通过Microsoft DirectX Graphics Infrastructure (DXGI)来提供桌面图像的,速度非常快。DXGI是通过GPU实现的,因此cpu占用率很低,性能非常高。 Duplication API获取到的桌面数据,不管显示模式如何设置,都永远是32位RGBA数据,这就给屏幕捕捉带来了很大的方便性,不再需要考虑各种显示模式的问题了,
FFmpeg视频采集功能非常强大,不仅可以采集视频采集卡或USB摄像头的图像,还可以进行屏幕录制,同时还支持以RTP方式将视频流传送给支持RTSP的流媒体服务器,支持直播应用

二、使用步骤 1.引入库

代码如下(示例):
//FFMPEG
extern “C” {
#include
#include
#include
#include
#include
#include
}

//DXGI
#include “DXGICapture.h”

2.初始化FFMPEG

void th_run::Ini(QString savePath)
{
//QString folder_path = QFileDialog::getExistingDirectory(this, “选择文件目录”, “”, QFileDialog::ShowDirsOnly
// | QFileDialog::DontResolveSymlinks);
//if (folder_path.isEmpty()) return;

QString outFilename = savePath;/* // "recode.mp4";QFileDialog::getSaveFileName(this, "选择保存路径", folder_path + "/视频文件", "*.mp4;; *.avi");
								if (outFilename.isEmpty())   return;*/

								//    QStringList filters;
								//    filters< pnglist;
								//    while(dir_iterator.hasNext())       //获取目录下的png文件
								//    {
								//        dir_iterator.next();
								//        QFileInfo file_info = dir_iterator.fileInfo();
								//        pnglist.append(QString(tr("%1").arg(file_info.absoluteFilePath())));
								//    }






tmp_frame = av_frame_alloc();
av_init_packet(&kAVPacket);

av_register_all();     //1、注册所有容易和编解码器

					   /* 2、分配输出媒体上下文 */
avformat_alloc_output_context2(&kAVFormatContext, NULL, NULL, outFilename.toLocal8Bit().data());      //上下文结构体、输出结构体、文件类型、路径

if (!kAVFormatContext)        //如果根据文件名没有找到对应的格式则默认mpeg格式
{
	ret = avformat_alloc_output_context2(&kAVFormatContext, NULL, "mpeg", outFilename.toLocal8Bit().data());    //没有找到文件类型默认mpeg(MP4)
}
if (!kAVOutputFormat) {   }

kAVOutputFormat = kAVFormatContext->oformat;    //把输出 *** 作结构体赋值出来

if (!(kAVOutputFormat->flags & AVFMT_NOFILE))   /* 如果需要,打开输出文件*/
{
	ret = avio_open(&kAVFormatContext->pb, outFilename.toLocal8Bit().data(), AVIO_FLAG_WRITE);  //3、打开输出文件
	if (ret < 0) { qDebug() << "打开输出文件失败。" << ret; }
}

/* 使用默认格式的编解码器添加音频和视频流,并初始化编解码器。*/
if (kAVOutputFormat->video_codec != AV_CODEC_ID_NONE)
{
	video_codec = avcodec_find_encoder(kAVOutputFormat->video_codec);   //4、找到编码器
	if (!video_codec) {};

	kAVStream = avformat_new_stream(kAVFormatContext, NULL);         //5、新建一个输出流
	if (!kAVStream) { qDebug() << "创建流kAVStream失败。";  }

	kAVCodecContext = avcodec_alloc_context3(video_codec);      //初始化一个AVCodecContext
	if (!kAVCodecContext) { qDebug() << "用编码器video_codec初始化的kAVCodecContext默认参数失败";  }

	switch (video_codec->type)
	{
	case AVMEDIA_TYPE_VIDEO:

		kAVCodecContext->codec_id = video_codec->id;
		kAVCodecContext->bit_rate = 1000 * 1000;
		kAVCodecContext->width = 1920;
		kAVCodecContext->height = 1080;
		kAVStream->time_base = time;
		kAVCodecContext->time_base = time;
		kAVCodecContext->gop_size = 10; /*最多每十二帧发射一个内帧 */
		kAVCodecContext->pix_fmt = AV_PIX_FMT_YUV420P;
		if (kAVCodecContext->codec_id == AV_CODEC_ID_MPEG2VIDEO)
		{
			kAVCodecContext->max_b_frames = 2;  /*为了测试,我们还添加了b帧*/
		}
		if (kAVCodecContext->codec_id == AV_CODEC_ID_MPEG1VIDEO)
		{
			/* 需要避免使用一些coeffs溢出的宏块。这在正常的视频中不会发生,
			* 只是在色度平面的运动与luma平面不匹配时才会发生。 */
			kAVCodecContext->mb_decision = 2;
		}

		break;
	case AVMEDIA_TYPE_AUDIO:
		break;

	default:
		break;
	}

	if (kAVOutputFormat->flags & AVFMT_GLOBALHEADER)/*有些格式希望流标头是单独的*/
	{
		kAVCodecContext->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
	}
}

av_dict_copy(&opt, kAVDictionary, 0);
ret = avcodec_open2(kAVCodecContext, video_codec, &opt);      //6、打开编码器
if (ret<0) { qDebug() << "打开视频编解码器失败" << ret;  }
av_dict_free(&opt);

frame = av_frame_alloc();
if (!frame) { qDebug() << "分配帧失败.";  }
frame->format = kAVCodecContext->pix_fmt;
frame->width = kAVCodecContext->width;
frame->height = kAVCodecContext->height;

ret = av_frame_get_buffer(frame, 32);   //分配内存空间frame必须要有上三条件,32也就是4字节对齐
if (ret < 0) { qDebug() << "frame分配内存失败";   }

ret = avcodec_parameters_from_context(kAVStream->codecpar, kAVCodecContext);/*将流参数复制到muxer */
if (ret < 0) { qDebug() << "无法复制流参数";   }

av_dump_format(kAVFormatContext, 0, outFilename.toLocal8Bit().data(), 1);     //打印输出文件的详细信息



ret = avformat_write_header(kAVFormatContext, &kAVDictionary);/* 7、写流标头(如果有的话)*/
if (ret < 0) { qDebug() << "写流标题失败";   }

}

将图压入MP4数据流

void th_run::Enc(QImage img)
{

//8、写入每一帧数据
 

	//        if(av_compare_ts(next_pts, kAVCodecContext->time_base,  10.0, time_1) > 0)      /*这里只生成10s的视频*/
	//        {
	//            qDebug()<<"暂时不需要生成新的帧"; break;
	//        }
	if (av_frame_make_writable(frame) < 0) {   }  /*当我们传递一个帧给编码器时,它可能会在内部保留一个对它的引用;确保我们没有在这里覆盖它*/

														  /*QImage img(tr("%1/%2.jpg").arg(folder_path).arg(i+1));
														  if (img.isNull()) { qDebug() << "打开图片失败"; break; }
														  img = img.convertToFormat(QImage::Format_RGB888);*/
	//QImage img = getDeskImg();

	kSwsContext = sws_getContext(kAVCodecContext->width, kAVCodecContext->height,
		AV_PIX_FMT_RGB24, kAVCodecContext->width, kAVCodecContext->height,
		AV_PIX_FMT_YUV420P, SWS_LANCZOS | SWS_ACCURATE_RND, NULL, NULL, NULL);
	if (!kSwsContext) { qDebug() << "无法初始化图片转换器";      }


	intBuffer = (uint8_t*)malloc(sizeof(uint8_t)*img.bytesPerLine()*img.height());
	memcpy(intBuffer, img.constBits(), sizeof(uint8_t)*img.bytesPerLine()*img.height());
	avpicture_fill((AVPicture *)tmp_frame, intBuffer, AV_PIX_FMT_RGB24, kAVCodecContext->width, kAVCodecContext->height); //pAVFrame32的data指针指向了outBuffer

	sws_scale(kSwsContext, (const uint8_t * const *)tmp_frame->data,
		tmp_frame->linesize, 0, kAVCodecContext->height, frame->data, frame->linesize);   //图片信息转换
	sws_freeContext(kSwsContext);
	kSwsContext = 0;

	frame->pts = next_pts++;
	//frame->pts = i*(kAVStream->time_base.den)/((kAVStream->time_base.num)*24);

	got_packer = 0;
	  ret = avcodec_encode_video2(kAVCodecContext, &kAVPacket, frame, &got_packer);  //8、编码、把一帧数据编码成AVPacket
	if (ret < 0) { qDebug() << "错误编码视频帧" << ret;   }

	if (got_packer)
	{
		av_packet_rescale_ts(&kAVPacket, kAVCodecContext->time_base, kAVStream->time_base);
		kAVPacket.stream_index = kAVStream->index;
		ret = av_write_frame(kAVFormatContext, &kAVPacket); /*将压缩帧写入媒体文件。 */
		av_free_packet(&kAVPacket);
	}
	else
	{
		ret = 0;
	}
	free(intBuffer); intBuffer = 0;
	if (ret<0) { qDebug() << "写入video文件失败" << ret << kAVPacket.stream_index;   }

}

关闭数据流完成MP4生成

void th_run::end()
{
/* 选择写预告片,如果有的话。预告片必须在你之前写好 当你写标题时关闭CodecContexts打开;否则
av_write_trailer()可以尝试使用被释放的内存 av_codec_close()。要编码的流 */
av_write_trailer(kAVFormatContext);

/* 关闭每个编解码器。 */
avcodec_free_context(&kAVCodecContext);
av_frame_free(&frame);

if (!(kAVOutputFormat->flags & AVFMT_NOFILE))
	avio_closep(&kAVFormatContext->pb);/*关闭输出文件*/


avformat_free_context(kAVFormatContext);   /*释放上下文*/

}

入口执行

void th_run::run()
{

Ini(commom::GetString("tb_path").data()+ QDateTime::currentDateTime().toString("yyyy_MM_dd_hh_mm_ss")+".mp4");
//getdesk _getdesk;
while (isbegin)
{
	 
   //GDI截屏方式
	 /*
	QImage img = getDeskImg();
	 Enc(img); */


//Dxgi方式
	QImage img;
	getdesk(img);
	Enc(img); 
	Sleep(30);
}

end();
emit  sendmsg( QStringLiteral("完成"));
//MessageBoxA(NULL, "完成", "", 0);

}

获取桌面取图部分

CDXGICapture* pCapture;
void getdesk(QImage& _img)
{
int width = 1920, height = 1080;
if (_first)
{
pCapture=new CDXGICapture(1920, 1080);

	_first = false;

}
 
 
int t1 = GetTickCount();
char* pImageData = (char*)malloc(width*height*4);
memset(pImageData, 0xff, width*height * 4);
int nLen = 0;
 
	BOOL bRet = pCapture->CaptureImage( pImageData, &nLen);
 
char t[100] = { 0 };
sprintf(t, "%d\r\n", GetTickCount() - t1);
OutputDebugStringA(t);

	QImage img(width, height, QImage::Format::Format_RGB32);
	memcpy(img.bits(), pImageData, width * 4 * height);


	///add mouse
	QPainter p;
	p.begin(&img);


	QPoint point = QCursor::pos();

	QPen pen;
	pen.setWidth(4);
	pen.setColor(Qt::red);
	p.setPen(pen);
	//p.drawPoint(point.x(), point.y());
	p.drawEllipse(point, 5, 5);

	if (_lastpoint.x() != point.x() || _lastpoint.y() != point.y())
	{
		pen.setWidth(4);
		pen.setColor(Qt::green);
		p.setPen(pen);
		p.drawLine(_lastpoint.x(), _lastpoint.y(), point.x(), point.y());
	}
	_lastpoint = point;
	p.end();
	///add mouse

	img = img.convertToFormat(QImage::Format::Format_RGB888);
	 // img.save("a.bmp");
	_img= img;
	free(pImageData);

}

DXGICapture.h

#ifndef DXGICAPTURE_H
#define DXGICAPTURE_H
#include
#include
#define RELEASE_OBJECT(obj) {if(obj) obj->Release(); obj = NULL;}
class CDXGICapture
{
public:
CDXGICapture(int maxWidth, int maxHeight);
virtual ~CDXGICapture();

virtual BOOL CaptureImage(char*& pImageData, int* nLen);

private:
BOOL Init();
void DelInit();
//Attach desktop to this thread
static BOOL AttachToThread();
BOOL QueryFrame(char*& pImageData, int* nImageSize);
private:
BOOL m_isInitSuccess;
ID3D11Device* m_pDevice;
ID3D11DeviceContext* m_deviceContext;
DXGI_OUTPUT_DESC m_dxgiOutputDesc;
IDXGIOutputDuplication* m_pDeskDuplication;
static BOOL s_isAttach;
unsigned char* m_pImageData;
int m_maxWidth;
int m_maxHeight;
};

#endif

DXGICapture.cpp

#include “DXGICapture.h”
#pragma comment(lib, “d3d11.lib”)
#pragma comment(lib, “dxgi.lib”)
BOOL CDXGICapture:😒_isAttach = FALSE;
CDXGICapture::CDXGICapture(int maxWidth, int maxHeight):
m_isInitSuccess(FALSE),
m_pImageData(NULL)
{
m_pImageData = new unsigned char[maxWidth * maxHeight * 4];
Init();
}
CDXGICapture::~CDXGICapture()
{

}

BOOL CDXGICapture::Init()
{
do
{
HRESULT hr = S_OK;
if (m_isInitSuccess)
{
//已初始化
break;
}
//Driver types supported 支持的设备类型
//类型说明 https://docs.microsoft.com/zh-cn/windows/win32/api/d3dcommon/ne-d3dcommon-d3d_driver_type
D3D_DRIVER_TYPE driverTypes[] =
{
D3D_DRIVER_TYPE_HARDWARE, //硬件驱动,硬件支持所有Direct3D功能
D3D_DRIVER_TYPE_WARP, //WARP驱动,这是一个搞性能软件光栅,这个软件光栅支持9_1到10.1的功能级别
D3D_DRIVER_TYPE_REFERENCE //参考驱动程序,是支持每个Direct3D功能的软件实现
};
unsigned int numDriverTypes = ARRAYSIZE(driverTypes);
//描述Direct3D设备所针对的功能集 https://docs.microsoft.com/zh-cn/windows/win32/api/d3dcommon/ne-d3dcommon-d3d_feature_level
D3D_FEATURE_LEVEL featureLevels[] =
{
D3D_FEATURE_LEVEL_11_0, //Direct3D 11.0支持的目标功能,包括着色器模型5
D3D_FEATURE_LEVEL_10_1, //Direct3D 10.1支持的目标功能,包括着色器模型4
D3D_FEATURE_LEVEL_10_0, //Direct3D 10.0支持的目标功能,包括着色器模型4
D3D_FEATURE_LEVEL_9_1 //目标功能[功能级别](/ windows / desktop / direct3d11 / overviews-direct3d-11-devices-downlevel-intro)9.1支持,包括着色器模型2
};
unsigned int numFeatureLevels = ARRAYSIZE(featureLevels);
D3D_FEATURE_LEVEL featureLevel;
//1.Create D3D device 创建D3D设备
/注意
如果计算机上存在Direct3D 11.1运行时且pFeatureLevels设置为NULL,
则此函数不会创建D3D_FEATURE_LEVEL_11_1设备。要创建D3D_FEATURE_LEVEL_11_1设备,
必须显式提供包含D3D_FEATURE_LEVEL_11_1的D3D_FEATURE_LEVEL数组。如果在未安装
Direct3D 11.1运行时的计算机上提供包含D3D_FEATURE_LEVEL_11_1的D3D_FEATURE_LEVEL数组,
则此函数会立即失败并显示E_INVALIDARG
/
for (int driverTypeIndex = 0; driverTypeIndex < numDriverTypes; ++driverTypeIndex)
{
hr = D3D11CreateDevice(NULL, driverTypes[driverTypeIndex], NULL, 0, featureLevels, numFeatureLevels, D3D11_SDK_VERSION, &m_pDevice, &featureLevel, &m_deviceContext);
if (SUCCEEDED(hr))
{
break;
}
}
if (FAILED(hr))
{
break;
}
//2.创建DXGI设备
IDXGIDevice* pDxgiDevice = NULL;
hr = m_pDevice->QueryInterface(__uuidof(IDXGIDevice), reinterpret_cast(&pDxgiDevice));
if (FAILED(hr))
{
break;
}
//3.获取DXGI adapter
IDXGIAdapter* pDxgiAdapter = NULL;
hr = pDxgiDevice->GetParent(__uuidof(IDXGIAdapter), reinterpret_cast(&pDxgiAdapter));
RELEASE_OBJECT(pDxgiDevice);
if (FAILED(hr))
{
break;
}
//4.获取IDXGIOutput
int nOutput = 0;
IDXGIOutput* pDxgiOutput = NULL;
//枚举适配器(视频卡)输出
hr = pDxgiAdapter->EnumOutputs(nOutput, &pDxgiOutput);
RELEASE_OBJECT(pDxgiAdapter);
//5.获取DXGI_OUTPUT_DESC 参数
pDxgiOutput->GetDesc(&m_dxgiOutputDesc);
if (FAILED(hr))
{
break;
}
//6.获取IDXGIOutput1
IDXGIOutput1* pDxgiOutput1 = NULL;
hr = pDxgiOutput->QueryInterface(_uuidof(pDxgiOutput1), reinterpret_cast(&pDxgiOutput1));
RELEASE_OBJECT(pDxgiOutput);
if (FAILED(hr))
{
break;
}
//7.创建复制桌面
hr = pDxgiOutput1->DuplicateOutput(m_pDevice, &m_pDeskDuplication);
RELEASE_OBJECT(pDxgiOutput1);
if (FAILED(hr))
{
break;
}
//初始化成功
m_isInitSuccess = TRUE;
} while (0);
return m_isInitSuccess;

}

void CDXGICapture::DelInit()
{
if (!m_isInitSuccess)
{
return;
}
m_isInitSuccess = FALSE;
RELEASE_OBJECT(m_pDeskDuplication);
RELEASE_OBJECT(m_pDevice);
RELEASE_OBJECT(m_deviceContext)
}
BOOL CDXGICapture::AttachToThread()
{
if (s_isAttach)
{
return TRUE;
}
HDESK hCurrentDesktop = OpenInputDesktop(0, FALSE, GENERIC_ALL);
if (!hCurrentDesktop)
{
return FALSE;
}
//Attach desktop to this thread
BOOL isDesktopAttached = SetThreadDesktop(hCurrentDesktop);
CloseDesktop(hCurrentDesktop);
hCurrentDesktop = NULL;
s_isAttach = TRUE;
return isDesktopAttached;
}
int buffsize = 0;
BOOL CDXGICapture::QueryFrame(char*& pImageData, int* nImageSize)
{

*nImageSize = 0;
if (!m_isInitSuccess || !AttachToThread())
{
	return FALSE;
}

IDXGIResource* pDesktopResource = NULL;
DXGI_OUTDUPL_FRAME_INFO frameInfo;
HRESULT hr = m_pDeskDuplication->AcquireNextFrame(0, &frameInfo, &pDesktopResource);
if (FAILED(hr))
{
	memcpy(pImageData, m_pImageData, buffsize);
	//在一些win10的系统上,如果桌面没有发生变化的情况
	return TRUE;
}
//Query next frame staging buffer
ID3D11Texture2D* pAcquiredDesktopImage = NULL;
hr = pDesktopResource->QueryInterface(__uuidof(ID3D11Texture2D), reinterpret_cast(&pAcquiredDesktopImage));
RELEASE_OBJECT(pDesktopResource);
if (FAILED(hr))
{
	return FALSE;
}
//copy old description
D3D11_TEXTURE2D_DESC frameDescriptor;
pAcquiredDesktopImage->GetDesc(&frameDescriptor);

//create a new stging buffer for fill frame image
ID3D11Texture2D* pNewDesktopImage = NULL;
frameDescriptor.Usage = D3D11_USAGE_STAGING;
frameDescriptor.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
frameDescriptor.BindFlags = 0;
frameDescriptor.MiscFlags = 0;
frameDescriptor.MipLevels = 1;
frameDescriptor.ArraySize = 1;
frameDescriptor.SampleDesc.Count = 1;
hr = m_pDevice->CreateTexture2D(&frameDescriptor, NULL, &pNewDesktopImage);
if (FAILED(hr))
{
	RELEASE_OBJECT(pAcquiredDesktopImage);
	m_pDeskDuplication->ReleaseFrame();
	return FALSE;
}

//copy next staging buffer to new staging buffer
m_deviceContext->CopyResource(pNewDesktopImage, pAcquiredDesktopImage);
RELEASE_OBJECT(pAcquiredDesktopImage);
m_pDeskDuplication->ReleaseFrame();

//create staging buffer for map bits
IDXGISurface* pStagingSurf = NULL;
hr = pNewDesktopImage->QueryInterface(__uuidof(IDXGISurface), (void**)(&pStagingSurf));
RELEASE_OBJECT(pNewDesktopImage);
if (FAILED(hr))
{
	return FALSE;
}
//copy bits to user space 
DXGI_MAPPED_RECT mappedRect;
hr = pStagingSurf->Map(&mappedRect, DXGI_MAP_READ);
if (SUCCEEDED(hr))
{
	*nImageSize = m_dxgiOutputDesc.DesktopCoordinates.right * m_dxgiOutputDesc.DesktopCoordinates.bottom * 4;
	if (buffsize == 0)
	{
		buffsize = *nImageSize;
	}

	 memcpy(m_pImageData, mappedRect.pBits, buffsize);
	 memcpy(pImageData, m_pImageData, buffsize);
	//memset((BYTE*)m_pImageData, 0xff, *nImageSize);
	//*pImageData = m_pImageData;
	
	pStagingSurf->Unmap();
}
RELEASE_OBJECT(pStagingSurf);
return SUCCEEDED(hr);

}

BOOL CDXGICapture::CaptureImage(char* &pImageData, int* nLen)
{
return QueryFrame(pImageData, nLen);
}

欢迎分享,转载请注明来源:内存溢出

原文地址: http://outofmemory.cn/langs/676133.html

(0)
打赏 微信扫一扫 微信扫一扫 支付宝扫一扫 支付宝扫一扫
上一篇 2022-04-19
下一篇 2022-04-19

发表评论

登录后才能评论

评论列表(0条)

保存