zlmediakit源码学习(扩展支持转码H265/H264)

发布时间 2023-08-18 13:29:24作者: 飞翔天空energy

在zlmediakit源码基础上继续探索扩展支持H265/H264的转码功能。参照上一篇帖子:https://www.cnblogs.com/feixiang-energy/p/17623567.html

 

作者已经封装好了基于ffmpeg实现的解码、编码、视频图像格式转换、音频重采样等接口,https://gitee.com/xia-chu/ZLMediaKit/blob/feature/transcode/src/Codec/Transcode.h 。在参考了zlemdiakit的feature/transcode分支的基础上,不适用外挂ffmpeg命令,而是在代码中进行实现转码。

------------------------------------------------

关键代码如下:

1.在FFmpegTranscoder.h中增加一个用于发布转码后视频的MediaSource:_muxerServer
#ifndef FFMPEGTRANSCODER_H_
#define FFMPEGTRANSCODER_H_
#include <mutex>
#include <memory>
#include "Util/util.h"
#include "Util/logger.h"
#include "Util/TimeTicker.h"
#include "Util/TimeTicker.h"
#include "Common/MediaSink.h"
#include "Transcoder.h"
#include "FFmpegMuxer.h"
#include "Common/MultiMediaSourceMuxer.h"
namespace mediakit {
class FFmpegTranscoder: public MediaSinkInterface, public MediaSourceEvent {
public:
    using Ptr = std::shared_ptr<FFmpegTranscoder>;
    FFmpegTranscoder(
        const std::string &path, const std::string &vhost, const std::string &app,  const std::string &stream_id,
        size_t max_second);
    ~FFmpegTranscoder() override;
    /**
     * 重置所有Track
     */
    void resetTracks() override;
    /**
     * 输入frame
     */
    bool inputFrame(const Frame::Ptr &frame) override;
    /**
     * 添加ready状态的track
     */
    bool addTrack(const Track::Ptr &track) override;
    bool onNewFrame(const Frame::Ptr &frame);
private:
    void stop();
    void asyncStop();
private:
    bool _have_video = false;
    size_t _max_second;
    std::string _folder_path;
    std::string _full_path;
    std::string _full_path_tmp;
    TranscodeInfo _info;
    FFmpegMuxer::Ptr _muxer;
    std::list<Track::Ptr> _tracks;
    MultiMediaSourceMuxer::Ptr _muxerServer;
};
} // namespace mediakit
#endif /* FFMPEGTRANSCODER_H_ */

2.在FFmpegTranscoder的构造函数中定义_muxerServer。

FFmpegTranscoder::FFmpegTranscoder(const std::string &path, const std::string  &vhost, const std::string &app, const std::string &stream_id,size_t max_second) {
    _folder_path = path;
    /////record 业务逻辑//////
    _info.app = app;
    _info.stream = stream_id;
    _info.vhost = vhost;
    _info.folder = path;
    GET_CONFIG(size_t, recordSec, Record::kFileSecond);
    _max_second = max_second ? max_second : recordSec;
    _muxer = std::make_shared<FFmpegMuxer>(_folder_path, _max_second);
    ProtocolOption option;
    //读取mp4文件并流化时,不重复生成mp4/hls文件
    option.enable_mp4 = false;
    option.enable_hls = false;
    _muxerServer = std::make_shared<MultiMediaSourceMuxer>(vhost, app, stream_id +  "_new", 0.0, option);
}

3.FFmpegTranscoder::addTrack。向转码对象添加轨道的时候也向_muxerServer中添加轨道。暂时先只关注视频轨道

bool FFmpegTranscoder::addTrack(const Track::Ptr &track) {
    //保存所有的track,为创建MP4MuxerFile做准备
    _tracks.emplace_back(track);
    if (track->getTrackType() == TrackVideo) {
        _have_video = true;
        _muxer->addTrack(track);
        //创建新的视频轨道,并添加到发布对象中
        H264Track::Ptr newTrack(new H264Track());
        VideoTrack::Ptr video = static_pointer_cast<VideoTrack>(track);
        newTrack->setVideoWidth(video->getVideoWidth());
        newTrack->setVideoHeight(video->getVideoHeight());
        newTrack->setBitRate(video->getBitRate());
        _muxerServer->addTrack(newTrack);
        _muxerServer->addTrackCompleted();
        //设置封装器的帧回调函数,即将收到重新编码之后的视频帧写入到发布对象中
        _muxer->setOnFrame([this](const Frame::Ptr &frame) {
            _muxerServer->inputFrame(frame);
        });
    }
    return true;
}

4.在FFmpegMuxer.h中定义一个转码之后帧数据的回调std::function<void(const Frame::Ptr &)>

#include "Common/MediaSink.h"
#include "Codec/Transcode.h"
namespace mediakit {
class FFmpegMuxer : public MediaSinkInterface {
public:
    using onFrame = std::function<void(const Frame::Ptr &)>;
    typedef std::shared_ptr<FFmpegMuxer> Ptr;
   
    FFmpegMuxer(std::string filePath = "", int gapTime = 1);
    ~FFmpegMuxer() override;
        /**
     * 添加已经ready状态的track
     */
    bool addTrack(const Track::Ptr &track) override;
    /**
     * 输入帧
     */
    bool inputFrame(const Frame::Ptr &frame) override;
    /**
     * 重置所有track
     */
    void resetTracks() override;
    /**
     * 是否包含视频
     */
    bool haveVideo() const;
    int frameToImage(AVFrame *frame, enum AVCodecID codecID, uint8_t *outbuf,  size_t outbufSize);
    void stop();
    void setOnFrame(onFrame cb);
private:
    std::shared_ptr<FFmpegDecoder> _audio_dec;
    std::shared_ptr<FFmpegDecoder> _video_dec;
    std::shared_ptr<FFmpegEncoder> _audio_enc;
    std::shared_ptr<FFmpegEncoder> _video_enc;
    int _index;
    time_t _last_time;
    std::string _folder_path;
    int _gapTime;
    onFrame _cb;
};
}

5.FFmpegMuxer::addTrack。修改addTrack函数。设置解码器、编码器的回调函数

bool FFmpegMuxer::addTrack(const Track::Ptr &track) {
    if (track->getTrackType() == TrackVideo) {
        _video_dec.reset(new FFmpegDecoder(track));
        // 创建新的H264视频帧轨道,添加到编码器中
        H264Track::Ptr newTrack(new H264Track());
        VideoTrack::Ptr video = static_pointer_cast<VideoTrack>(track);
        newTrack->setVideoWidth(video->getVideoWidth());
        newTrack->setVideoHeight(video->getVideoHeight());
        newTrack->setBitRate(video->getBitRate());
        _video_enc.reset(new FFmpegEncoder(newTrack));
        // 设置视频编码器的回调函数,即接收到重新编码成H264的视频帧之后回调给上层,最终写入到FFmpegTranscoder的_muxerServer中
        _video_enc->setOnEncode([this](const Frame::Ptr &frame) {
            if (_cb) {
                _cb(frame);
            }
        });
        
        // 设置视频解码器的回调函数,即接收到解码之后的YUV图像之后写入到视频编码器中进行重新编码
        _video_dec->setOnDecode([this](const FFmpegFrame::Ptr &frame) {
            // 转码操作
            _video_enc->inputFrame(frame, false);
            /*
            // --- 抽帧操作 begin
            time_t now = ::time(NULL);
            if (now - _last_time >= _gapTime) {
                AVFrame *avFrame = frame->get();
                int bufSize = av_image_get_buffer_size(AV_PIX_FMT_BGRA,  avFrame->width, avFrame->height, 64);
                uint8_t *buf = (uint8_t *)av_malloc(bufSize);
                int picSize = frameToImage(avFrame, AV_CODEC_ID_MJPEG, buf,  bufSize);
                if (picSize > 0) {
                    auto file_path = _folder_path + getTimeStr("%H-%M-%S_") +  std::to_string(_index) + ".jpeg";
                    auto f = fopen(file_path.c_str(), "wb+");
                    if (f) {
                        fwrite(buf, sizeof(uint8_t), bufSize, f);
                        fclose(f);
                    }
                }
                av_free(buf);
                _index++;
                _last_time = now;
            }
            // --- 抽帧操作 end
            */
        });
    }
    return true;
}
6.FFmpegMuxer::inputFrame。接收到H265视频帧之后写入到解码器进行解码   
bool FFmpegMuxer::inputFrame(const Frame::Ptr &frame) {
    if(frame->getTrackType() == TrackVideo) {
        if (frame->getCodecId() == CodecH265 /* || frame->getCodecId() ==  CodecH264*/) {
            if (_video_dec != nullptr) {
                _video_dec->inputFrame(frame, true, false, false);
            }
        }
    }
    return true;
}

7.转码效果:

    1)原始的RTSP视频流:H265
    2)zlmediakit转流之后的/live/test.live.flv视频流:H265
    3)启动转码之后新的/live/test_new.live.flv视频流:H264