共计 6944 个字符,预计需要花费 18 分钟才能阅读完成。
在我之前写的一篇文章《SkeyeRTSPLive 传统视频监控互联网 + 实现利器解决方案》中提到 RTSP 转 RTMP 的转流过程,简化流程就是通过 SkeyeRTSPClient 拉 RTSP 流,获取音视频编码数据,而后再通过 SkeyeRTMP 推出去,流程非常简单;而后再理论开发过程中,咱们发现其实这个过程并没有设想中那么简略;首先,RTSP 协定反对多种音视频编码格局,如音频反对 AAC,G711,G726, 等,视频反对 H264,H625,MJPEG, MPEG 等等各种格局,而 SkeyeRTMPPusher 推流只反对 H264(已扩大反对 H265)格局,这时,音频咱们能够通过 SkeyeAACEncoder 将音频转码成 AAC 格局,而视频咱们能够通过 SkeyeVideoDecoder 解码成原始数据,而后再通过 SkeyeVideoEncoder 将原始数据转码成 RTMP 推送指定的格局,本文,咱们将重点讲述 SkeyeVideoDecoder 的软解码流程。
1. SkeyeVideoDecoder 软解码接口申明如下:
#ifndef __SKEYE_DECODER_API_H__
#define __FF_DECODER_API_H__
#include <windows.h>
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#define SKEYEDECODER_API __declspec(dllexport)
//=======================================================
//Decoder
#ifndef DECODER_H264
#define DECODER_H264 0x1C //28
#endif
#ifndef DECODER_MPEG4
#define DECODER_MPEG4 0x0D //13
#endif
#ifndef DECODER_MPEG2
#define DECODER_MPEG2 0x02 //2
#endif
#ifndef DECODER_MJPEG
#define DECODER_MJPEG 0x08 //8
#endif
#ifndef DECODER_MP3
#define DECODER_MP3 0x15001 //86017
#endif
#ifndef DECODER_AAC
#define DECODER_AAC 0x15002 //86018
#endif
//=======================================================
// 输入格局
#ifndef OUTPUT_PIX_FMT_YUV420P
#define OUTPUT_PIX_FMT_YUV420P 0
#endif
#ifndef OUTPUT_PIX_FMT_YUYV422
#define OUTPUT_PIX_FMT_YUYV422 1
#endif
#ifndef OUTPUT_PIX_FMT_RGB565LE
#define OUTPUT_PIX_FMT_RGB565LE 44
#endif
#ifndef OUTPUT_PIX_FMT_RGBA
#define OUTPUT_PIX_FMT_RGBA 28
#endif
//=======================================================
// 图像处理
//=======================================================
typedef enum __VIDEO_FILTER_TYPE
{
VIDEO_ROTATION_90_0 = 0, // 顺时针旋转 90 度
VIDEO_ROTATION_90_1, // 逆时针旋转 90 度
VIDEO_ROTATION_90_0_FLIP, // 顺时针旋转 90 度, 再程度翻转
VIDEO_ROTATION_90_1_FLIP, // 逆时针旋转 90 度, 再垂直翻转
VIDEO_TEXT,
}VIDEO_FILTER_TYPE;
//=======================================================
typedef void *SKEYEDEC_HANDLE;
//=======================================================
extern "C"
{int SKEYEDECODER_API SKEYEDECODER_Init(SKEYEDEC_HANDLE *_handle);
int SKEYEDECODER_API SKEYEDECODER_Deinit(SKEYEDEC_HANDLE *_handle);
int SKEYEDECODER_API SKEYEDECODER_SetVideoDecoderParam(SKEYEDEC_HANDLE _handle, int _width, int _height, int _decoder, int _outformat);
int SKEYEDECODER_API SKEYEDECODER_SetAudioDecoderParam(SKEYEDEC_HANDLE _handle, unsigned char _channel, unsigned int _sample_rate, unsigned int _decoder);
int SKEYEDECODER_API SKEYEDECODER_GetVideoDecoderInfo(SKEYEDEC_HANDLE _handle, int *_decoder, int *_width, int *_height);
int SKEYEDECODER_API SKEYEDECODER_DecodeVideo(SKEYEDEC_HANDLE _handle, char *pInBuf, int inputSize, char **pOutBuf, int dstW, int dstH);
//desc: 解码后的数据,间接送到指定的内存中
int SKEYEDECODER_API SKEYEDECODER_DecodeVideo2Buf(SKEYEDEC_HANDLE _handle, char *_inbuf, int _bufsize, void *_outbuf[8], int _pitch);
int SKEYEDECODER_API SKEYEDECODER_DecodeVideo3(SKEYEDEC_HANDLE _handle, char *_inbuf, int _bufsize, void *yuvbuf, int dstW, int dstH);
int SKEYEDECODER_API SKEYEDECODER_DecodeVideoPacket(SKEYEDEC_HANDLE _handle, char *pCodecCtx, unsigned char *avPacket, char **_outbuf);
int SKEYEDECODER_API SKEYEDECODER_DecodeAudio(SKEYEDEC_HANDLE _handle, char *pInBuf, int inputSize, char *pOutBuf, int *outSize);
int SKEYEDECODER_API SKEYEDECODER_DecodeAudioPacket(SKEYEDEC_HANDLE _handle, char *pCodecCtx, unsigned char *avPacket, char *pOutBuf, int *outSize);
};
#endif
2. 软解码通过 ffmpeg 解码实现流程
和网上大多数的 ffmpeg 解码示例调用类似。软解码实现分四步走,具体流程如下:
-
第一步,全局注册 ffmpeg 编解码器
avcodec_register_all();/* 注册所有的编码解码器 */ av_register_all();// // 注册所有可解码类型
- 第二步,初始化视频解码参数
int InitVideoDecoder(int _width, int _height, int _videoCodec, int _outformat)
{if (NULL != decoderObj.pVideoCodecCtx) return -1; //or call DeinitVideoDecoder();
AVCodec *pAVCodec;
pAVCodec = avcodec_find_decoder((AVCodecID)_videoCodec);
if (NULL == pAVCodec) return -1;
decoderObj.pVideoCodecCtx = avcodec_alloc_context3(pAVCodec);
decoderObj.pVideoCodecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
decoderObj.pVideoCodecCtx->width = _width;
decoderObj.pVideoCodecCtx->height = _height;
//decoderObj.pVideoCodecCtx->thread_count = 2;
//decoderObj.pVideoCodecCtx->active_thread_type = decoderObj.pVideoCodecCtx->thread_type = FF_THREAD_FRAME;
int ret = avcodec_open2(decoderObj.pVideoCodecCtx, pAVCodec, NULL);
if (ret < 0) goto $fail;
int numBytes = avpicture_get_size(AV_PIX_FMT_YUV420P, _width, _height);
decoderObj.pBuffYuv420 = (uint8_t *)av_malloc(numBytes * sizeof(uint8_t));
decoderObj.mVideoFrame420 = av_frame_alloc();
if (avpicture_fill((AVPicture *)decoderObj.mVideoFrame420, decoderObj.pBuffYuv420, AV_PIX_FMT_YUV420P,
decoderObj.width, decoderObj.height) < 0)
{ }
#ifdef ADD_VIDEO_FILTER
SetVideoFilter(VIDEO_TEXT, NULL);
#endif
av_init_packet(&decoderObj.avVidPacket);
decoderObj.outputFormat = _outformat;
decoderObj.codec = _videoCodec;
decoderObj.width = _width;
decoderObj.height = _height;
return 0;
$fail:
{DeinitVideoDecoder();
return -1;
}
}
- 第三步,间接解码视频帧并输入指定色调格局
int DecodeVideo(char *_inbuf, int _bufsize, char **_outbuf, int dstW, int dstH)
{if (NULL == _inbuf) return -1;
if (1 > _bufsize) return -1;
//if (NULL == decoderObj.pSws_ctx) return -2;
if (NULL == decoderObj.mVideoFrame420) decoderObj.mVideoFrame420 = av_frame_alloc();
decoderObj.avVidPacket.size = _bufsize;
decoderObj.avVidPacket.data = (uint8_t*)_inbuf;
int frameFinished = 0;
int nDecode = avcodec_decode_video2(decoderObj.pVideoCodecCtx, decoderObj.mVideoFrame420, &frameFinished, &decoderObj.avVidPacket);//(uint8_t*)pInBuffer, inputSize);
if (nDecode < 0) return -3;
if (!frameFinished) return -4;
if (NULL == decoderObj.pAvFrameYuv)
{int numBytes = avpicture_get_size((AVPixelFormat)decoderObj.outputFormat, dstW, dstH);
decoderObj.pBuffYuv = (uint8_t *)av_malloc(numBytes * sizeof(uint8_t));
decoderObj.pAvFrameYuv = av_frame_alloc();
if (avpicture_fill((AVPicture *)decoderObj.pAvFrameYuv, decoderObj.pBuffYuv, (AVPixelFormat)decoderObj.outputFormat,
dstW, dstH) < 0)
{}}
if (NULL == decoderObj.pSws_ctx)
{decoderObj.pSws_ctx = sws_getCachedContext(decoderObj.pSws_ctx, decoderObj.width, decoderObj.height, (AVPixelFormat)AV_PIX_FMT_YUV420P,
//decoderObj.width/2, decoderObj.height/2, (PixelFormat)PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);
dstW, dstH, (AVPixelFormat)decoderObj.outputFormat, SWS_BICUBIC, NULL, NULL, NULL);
}
if (NULL == decoderObj.pSws_ctx) return -1;
sws_scale(decoderObj.pSws_ctx, decoderObj.mVideoFrame420->data, decoderObj.mVideoFrame420->linesize, 0, decoderObj.pVideoCodecCtx->height,
decoderObj.pAvFrameYuv->data, decoderObj.pAvFrameYuv->linesize);
//sws_freeContext(decoderObj.pSws_ctx);
//decoderObj.pSws_ctx = NULL;
*_outbuf = (char*)decoderObj.pAvFrameYuv->data[0];
return 0;
}
- 第四步,进行解码后,销毁解码器申请的资源
void DeinitVideoDecoder()
{if (NULL != decoderObj.mVideoFrame420)
{av_frame_free(&decoderObj.mVideoFrame420);
decoderObj.mVideoFrame420 = NULL;
}
if (NULL != decoderObj.pBuffYuv420)
{av_free(decoderObj.pBuffYuv420);
decoderObj.pBuffYuv420 = NULL;
}
if (NULL != decoderObj.pAvFrameSws)
{av_frame_free(&decoderObj.pAvFrameSws);
decoderObj.pAvFrameSws = NULL;
}
if (NULL != decoderObj.pAvFrameYuv)
{av_frame_free(&decoderObj.pAvFrameYuv);
decoderObj.pAvFrameYuv = NULL;
}
if (NULL != decoderObj.pBuffYuv)
{av_free(decoderObj.pBuffYuv);
decoderObj.pBuffYuv = NULL;
}
if (NULL != decoderObj.pSws_ctx)
{sws_freeContext(decoderObj.pSws_ctx);
decoderObj.pSws_ctx = NULL;
}
if (NULL != decoderObj.pVideoCodecCtx)
{avcodec_close(decoderObj.pVideoCodecCtx);
av_free(decoderObj.pVideoCodecCtx);
decoderObj.pVideoCodecCtx = NULL;
}
}
有任何技术问题,欢送大家和我技术交换:
295222688@qq.com
大家也能够退出 SKEYEPlayer 流媒体播放器 QQ 群进行探讨:
102644504
正文完