环境:VS 2015
    windows下用ffmpeg获取视频缩略图 - 图1

    运行程序,生成RGB图像数据:
    windows下用ffmpeg获取视频缩略图 - 图2

    将RGB图像数据转换成yuv格式,通过ffmpeg命令,如下:
    windows下用ffmpeg获取视频缩略图 - 图3

    windows下用ffmpeg获取视频缩略图 - 图4

    完整代码:

    1. #include "stdafx.h"
    2. extern "C"
    3. {
    4. #include "libavformat\avformat.h"
    5. #include "libswscale\swscale.h"
    6. }
    7. #define DEBUG_SPEND_TIME 1
    8. #ifdef DEBUG_SPEND_TIME
    9. #ifdef _WIN32
    10. #include "windows.h"
    11. #include "mmsystem.h"
    12. #pragma comment(lib, "winmm.lib")
    13. #else
    14. #include <sys/time.h>
    15. #endif
    16. #endif
    17. AVFormatContext* m_pFormatContext = NULL;
    18. AVCodecContext* m_pCodecContext = NULL;
    19. int m_nStreamIndex[AVMEDIA_TYPE_NB] = { -1 };
    20. AVCodec* m_pVideoCodec = NULL;
    21. AVFrame* m_pAVFrame = NULL;
    22. AVFrame* m_pThumbFrame = NULL;
    23. const char* strInputFileName[] = {
    24. "C:\\oppo.mp4",
    25. };
    26. const char strThumbFileName[] = "D:\\thumb.rgb";
    27. #define THUMB_WIDTH 640
    28. #define THUMB_HEIGHT 480
    29. #define BRIGHTNESS_VALUE 0xF0
    30. #define DARKNESS_VALUE 0x30
    31. int initFFmpegContext()
    32. {
    33. avcodec_register_all();
    34. av_register_all();
    35. return 0;
    36. }
    37. int setDataSource(const char* url)
    38. {
    39. int ret = -1;
    40. if (m_pFormatContext)
    41. {
    42. avformat_free_context(m_pFormatContext);
    43. m_pFormatContext = NULL;
    44. }
    45. m_pFormatContext = avformat_alloc_context();
    46. if (!m_pFormatContext)
    47. {
    48. return -1;
    49. }
    50. ret = avformat_open_input(&m_pFormatContext, url, NULL, NULL);
    51. if (ret != 0)
    52. {
    53. delete m_pFormatContext;
    54. return ret;
    55. }
    56. ret = avformat_find_stream_info(m_pFormatContext, NULL);
    57. if (ret != 0)
    58. {
    59. delete m_pFormatContext;
    60. return ret;
    61. }
    62. m_nStreamIndex[AVMEDIA_TYPE_VIDEO] = av_find_best_stream(m_pFormatContext, AVMEDIA_TYPE_VIDEO, -1, -1, NULL, 0);
    63. m_nStreamIndex[AVMEDIA_TYPE_AUDIO] = av_find_best_stream(m_pFormatContext, AVMEDIA_TYPE_AUDIO, -1, -1, NULL, 0);
    64. return ret;
    65. }
    66. int openDecoder()
    67. {
    68. int ret = -1;
    69. m_pCodecContext = m_pFormatContext->streams[m_nStreamIndex[AVMEDIA_TYPE_VIDEO]]->codec;
    70. if (m_pCodecContext)
    71. {
    72. m_pVideoCodec = avcodec_find_decoder(m_pCodecContext->codec_id);
    73. ret = avcodec_open2(m_pCodecContext, m_pVideoCodec, NULL);
    74. if (ret != 0)
    75. {
    76. return ret;
    77. }
    78. avcodec_flush_buffers(m_pCodecContext);
    79. }
    80. return ret;
    81. }
    82. void closeDecoder()
    83. {
    84. if (m_pVideoCodec)
    85. {
    86. avcodec_close(m_pCodecContext);
    87. m_pCodecContext = NULL;
    88. }
    89. }
    90. int decodeOneFrame(AVFrame* pFrame)
    91. {
    92. int ret = 0;
    93. bool frame_found = false;
    94. int decoded_frame_count = 0;
    95. AVPacket pkt;
    96. do
    97. {
    98. int got_frame = 0;
    99. ret = av_read_frame(m_pFormatContext, &pkt);
    100. if (ret < 0)
    101. {
    102. break;
    103. }
    104. if (pkt.flags != AV_PKT_FLAG_KEY)
    105. {
    106. av_free_packet(&pkt);
    107. continue;
    108. }
    109. if (pkt.stream_index == m_nStreamIndex[AVMEDIA_TYPE_VIDEO])
    110. {
    111. ret = avcodec_decode_video2(m_pCodecContext, pFrame, &got_frame, &pkt);
    112. if (got_frame && ret >=0)
    113. {
    114. if (pFrame->width != m_pCodecContext->width || pFrame->height != m_pCodecContext->height)
    115. {
    116. m_pCodecContext->width = pFrame->width;
    117. m_pCodecContext->height = pFrame->height;
    118. decoded_frame_count++;
    119. av_free_packet(&pkt);
    120. continue;
    121. }
    122. decoded_frame_count++;
    123. //skip black and white pitures
    124. uint32_t y_value = 0;
    125. uint32_t y_half = 0;
    126. uint32_t y_count = 0;
    127. int pixel_count = pFrame->width * pFrame->height;
    128. bool bHalf = false;
    129. for (int i = 0; i < pixel_count; i+=3)
    130. {
    131. uint8_t y_temp = (uint8_t)(*(uint8_t*)((uint8_t*)(pFrame->data[0]) + i));
    132. y_value += y_temp;
    133. y_count++;
    134. if (!bHalf && i > pixel_count / 6)
    135. {
    136. y_half = y_value / y_count;
    137. bHalf = true;
    138. }
    139. }
    140. y_value /= y_count;
    141. if (y_half == y_value)
    142. {
    143. printf("decoded frame count = %d y_half=%d == y_value=%d, skip this frame!\n", decoded_frame_count, y_half, y_value);
    144. continue;
    145. }
    146. if (y_value < BRIGHTNESS_VALUE && y_value > DARKNESS_VALUE)
    147. {
    148. frame_found = true;
    149. printf("frame_found = true -----------------------decoded frame count = %d\n", decoded_frame_count);
    150. }
    151. }
    152. #ifdef SAVE_YUV_FRAME
    153. char szName[128];
    154. sprintf(szName, "D:\\test_%d.yuv", frame_count);
    155. // save the yuv
    156. FILE *pFile = fopen(szName, "ab");
    157. if (pFile)
    158. {
    159. fwrite(pFrame->data[0], 1, pFrame->width * pFrame->height, pFile);
    160. fwrite(pFrame->data[1], 1, pFrame->width * pFrame->height * 1 / 4, pFile);
    161. fwrite(pFrame->data[2], 1, pFrame->width * pFrame->height * 1 / 4, pFile);
    162. fclose(pFile);
    163. }
    164. #endif
    165. }
    166. av_free_packet(&pkt);
    167. } while ((!frame_found) && (ret >= 0));
    168. av_free_packet(&pkt);
    169. return ret;
    170. }
    171. int getThumbnail(AVFrame* pInputFrame, AVFrame* pOutputFrame, int desW, int desH)
    172. {
    173. if (pInputFrame == NULL || pOutputFrame == NULL)
    174. {
    175. return -1;
    176. }
    177. SwsContext* pSwsContext = NULL;
    178. pSwsContext = sws_getCachedContext(pSwsContext, pInputFrame->width, pInputFrame->height, (AVPixelFormat)pInputFrame->format,
    179. desW, desH, AV_PIX_FMT_RGB565, SWS_BICUBIC, NULL, NULL, NULL);
    180. if (pSwsContext == NULL)
    181. {
    182. return -1;
    183. }
    184. m_pThumbFrame->width = desW;
    185. m_pThumbFrame->height = desH;
    186. m_pThumbFrame->format = AV_PIX_FMT_RGB565;
    187. av_frame_get_buffer(m_pThumbFrame, 16);
    188. sws_scale(pSwsContext, pInputFrame->data, pInputFrame->linesize, 0, pInputFrame->height, m_pThumbFrame->data, m_pThumbFrame->linesize);
    189. sws_freeContext(pSwsContext);
    190. return 0;
    191. }
    192. int getFrameAt(int64_t timeUs, int width, int height)
    193. {
    194. int ret = -1;
    195. AVFrame* pFrame = NULL;
    196. ret = avformat_seek_file(m_pFormatContext, -1, INT16_MIN, timeUs, INT16_MAX, 0);
    197. pFrame = av_frame_alloc();
    198. m_pThumbFrame = av_frame_alloc();
    199. ret = openDecoder();
    200. if (ret != 0)
    201. {
    202. av_frame_free(&pFrame);
    203. av_frame_free(&m_pThumbFrame);
    204. return ret;
    205. }
    206. #ifdef DEBUG_SPEND_TIME
    207. #ifdef _WIN32
    208. DWORD start_time = timeGetTime();
    209. #else
    210. struct timeval start, end;
    211. gettimeofday(&start, NULL);
    212. #endif
    213. #endif
    214. ret = decodeOneFrame(pFrame);
    215. if (ret < 0)
    216. {
    217. av_frame_free(&pFrame);
    218. av_frame_free(&m_pThumbFrame);
    219. return ret;
    220. }
    221. #ifdef DEBUG_SPEND_TIME
    222. #ifdef _WIN32
    223. DWORD end_time = timeGetTime();
    224. printf("decodeOneFrame spend time = %d ms\n", end_time - start_time);
    225. #else
    226. gettimeofday(&end, NULL);
    227. int spend_time = (end.tv_sec - start.tv_sec) * 1000 + (end.tv_usec - start.tv_usec) / 1000;
    228. printf("spend_time = %d ms\n", spend_time);
    229. #endif
    230. #endif
    231. ret = getThumbnail(pFrame, m_pThumbFrame, width, height);
    232. if (ret < 0)
    233. {
    234. av_frame_free(&pFrame);
    235. av_frame_free(&m_pThumbFrame);
    236. return ret;
    237. }
    238. // save the rgb565
    239. FILE *pFile = fopen(strThumbFileName, "ab");
    240. if (pFile)
    241. {
    242. fwrite(m_pThumbFrame->data[0], 1, m_pThumbFrame->width * m_pThumbFrame->height * 2, pFile);
    243. fclose(pFile);
    244. }
    245. av_frame_free(&pFrame);
    246. av_frame_free(&m_pThumbFrame);
    247. closeDecoder();
    248. return ret;
    249. }
    250. int _tmain(int argc, _TCHAR* argv[])
    251. {
    252. int ret = -1;
    253. initFFmpegContext();
    254. int file_count = sizeof(strInputFileName) / sizeof(strInputFileName[0]);
    255. for (int i = 0; i < file_count; i++)
    256. {
    257. const char* pFileName = strInputFileName[i];
    258. ret = setDataSource(pFileName);
    259. getFrameAt(-1, THUMB_WIDTH, THUMB_HEIGHT);
    260. }
    261. //pause
    262. printf("finished, pause ....\n");
    263. getchar();
    264. return 0;
    265. }

    如约智惠.png