Commit 1970f601 by xiongziliang

优化音视频同步

parent 0779a4be
......@@ -42,8 +42,8 @@ void Stamp::setPlayBack(bool playback) {
_playback = playback;
}
void Stamp::makeRelation(Stamp &other){
_related = &other;
void Stamp::syncTo(Stamp &other){
_sync_master = &other;
}
void Stamp::revise(int64_t dts, int64_t pts, int64_t &dts_out, int64_t &pts_out,bool modifyStamp) {
......@@ -53,23 +53,21 @@ void Stamp::revise(int64_t dts, int64_t pts, int64_t &dts_out, int64_t &pts_out,
return;
}
if(_related && _related->_last_dts){
if(_sync_master && _sync_master->_last_dts){
//音视频dts当前时间差
int64_t dts_diff = _last_dts - _related->_last_dts;
int64_t dts_diff = _last_dts - _sync_master->_last_dts;
if(ABS(dts_diff) < 5000){
//如果绝对时间戳小于5秒,那么说明他们的起始时间戳是一致的,那么强制同步
_last_relativeStamp = _relativeStamp;
_relativeStamp = _related->_relativeStamp + dts_diff;
dts_out += dts_diff;
pts_out += dts_diff;
// DebugL << "音视频同步事件差:" << dts_diff;
_relativeStamp = _sync_master->_relativeStamp + dts_diff;
}
//下次不用再强制同步
_related = nullptr;
_sync_master = nullptr;
}
if(dts_out < 0){
//相对时间戳小于0,那么说明是同步时间戳导致的,在这个过渡期内,我们一直返回上次的结果(目的是为了防止时间戳回退)
if(dts_out < 0 || dts_out < _last_relativeStamp){
//相对时间戳小于0,或者小于上次的时间戳,
//那么说明是同步时间戳导致的,在这个过渡期内,我们一直返回上次的结果(目的是为了防止时间戳回退)
pts_out = _last_relativeStamp + (pts_out - dts_out);
dts_out = _last_relativeStamp;
}
......
......@@ -69,9 +69,10 @@ public:
void setPlayBack(bool playback = true);
/**
* 产生关联,用于音视频同步用
* 音视频同步用,音频应该同步于视频(只修改音频时间戳)
* 因为音频时间戳修改后不影响播放速度
*/
void makeRelation(Stamp &other);
void syncTo(Stamp &other);
private:
void revise_l(int64_t dts, int64_t pts, int64_t &dts_out, int64_t &pts_out,bool modifyStamp = false);
......@@ -81,7 +82,7 @@ private:
int64_t _last_dts = 0;
SmoothTicker _ticker;
bool _playback = false;
Stamp *_related = nullptr;
Stamp *_sync_master = nullptr;
};
//dts生成器,
......
......@@ -76,8 +76,8 @@ Frame::Ptr Frame::getCacheAbleFrame(const Frame::Ptr &frame){
}
#define SWITCH_CASE(codec_id) case codec_id : return #codec_id
const char *CodecInfo::getCodecName() {
switch (getCodecId()) {
const char *getCodecName(CodecId codecId) {
switch (codecId) {
SWITCH_CASE(CodecH264);
SWITCH_CASE(CodecH265);
SWITCH_CASE(CodecAAC);
......@@ -88,8 +88,8 @@ const char *CodecInfo::getCodecName() {
}
}
TrackType CodecInfo::getTrackType(){
switch (getCodecId()){
TrackType getTrackType(CodecId codecId){
switch (codecId){
case CodecH264:
case CodecH265: return TrackVideo;
case CodecAAC:
......@@ -100,4 +100,11 @@ TrackType CodecInfo::getTrackType(){
}
}
const char *CodecInfo::getCodecName() {
return mediakit::getCodecName(getCodecId());
}
TrackType CodecInfo::getTrackType() {
return mediakit::getTrackType(getCodecId());
}
}//namespace mediakit
......@@ -41,6 +41,16 @@ typedef enum {
} TrackType;
/**
* 获取编码器名称
*/
const char *getCodecName(CodecId codecId);
/**
* 获取音视频类型
*/
TrackType getTrackType(CodecId codecId);
/**
* 编码信息的抽象接口
*/
class CodecInfo {
......
......@@ -134,6 +134,26 @@ static uint8_t getObject(CodecId codecId){
}
}
void MP4Muxer::stampSync(){
if(_codec_to_trackid.size() < 2){
return;
}
Stamp *audio = nullptr, *video = nullptr;
for(auto &pr : _codec_to_trackid){
switch (getTrackType((CodecId) pr.first)){
case TrackAudio : audio = &pr.second.stamp; break;
case TrackVideo : video = &pr.second.stamp; break;
default : break;
}
}
if(audio && video){
//音频时间戳同步于视频,因为音频时间戳被修改后不影响播放
audio->syncTo(*video);
}
}
void MP4Muxer::addTrack(const Track::Ptr &track) {
auto mp4_object = getObject(track->getCodecId());
if (!mp4_object) {
......@@ -261,6 +281,9 @@ void MP4Muxer::addTrack(const Track::Ptr &track) {
default: WarnL << "MP4录制不支持该编码格式:" << track->getCodecName(); break;
}
//尝试音视频同步
stampSync();
}
}//namespace mediakit
......
......@@ -45,13 +45,14 @@ public:
private:
void openMP4();
void closeMP4();
void stampSync();
private:
struct track_info{
struct track_info {
int track_id = -1;
Stamp stamp;
};
unordered_map<int,track_info> _codec_to_trackid;
unordered_map<int, track_info> _codec_to_trackid;
List<Frame::Ptr> _frameCached;
bool _started = false;
bool _have_video = false;
......
......@@ -23,6 +23,26 @@ TsMuxer::~TsMuxer() {
uninit();
}
void TsMuxer::stampSync(){
if(_codec_to_trackid.size() < 2){
return;
}
Stamp *audio = nullptr, *video = nullptr;
for(auto &pr : _codec_to_trackid){
switch (getTrackType((CodecId) pr.first)){
case TrackAudio : audio = &pr.second.stamp; break;
case TrackVideo : video = &pr.second.stamp; break;
default : break;
}
}
if(audio && video){
//音频时间戳同步于视频,因为音频时间戳被修改后不影响播放
audio->syncTo(*video);
}
}
void TsMuxer::addTrack(const Track::Ptr &track) {
switch (track->getCodecId()) {
case CodecH264: {
......@@ -52,9 +72,11 @@ void TsMuxer::addTrack(const Track::Ptr &track) {
break;
}
default:
break;
default: WarnL << "mpeg-ts 不支持该编码格式,已忽略:" << track->getCodecName(); break;
}
//尝试音视频同步
stampSync();
}
void TsMuxer::inputFrame(const Frame::Ptr &frame) {
......
......@@ -17,37 +17,59 @@
#include "Util/File.h"
#include "Common/MediaSink.h"
#include "Common/Stamp.h"
using namespace toolkit;
namespace mediakit {
//该类用于产生MPEG-TS
class TsMuxer : public MediaSinkInterface {
public:
TsMuxer();
virtual ~TsMuxer();
/**
* 添加音视频轨道
*/
void addTrack(const Track::Ptr &track) override;
/**
* 重置音视频轨道
*/
void resetTracks() override;
/**
* 输入帧数据
*/
void inputFrame(const Frame::Ptr &frame) override;
protected:
/**
* 输出mpegts数据回调
* @param packet mpegts数据
* @param bytes mpegts数据长度
* @param timestamp 时间戳,单位毫秒
* @param is_idr_fast_packet 是否为关键帧的第一个TS包,用于确保ts切片第一帧为关键帧
*/
virtual void onTs(const void *packet, int bytes,uint32_t timestamp,bool is_idr_fast_packet) = 0;
private:
void init();
void uninit();
//音视频时间戳同步用
void stampSync();
private:
void *_context = nullptr;
char *_tsbuf[188];
void *_context = nullptr;
char _tsbuf[188];
uint32_t _timestamp = 0;
struct track_info{
struct track_info {
int track_id = -1;
Stamp stamp;
};
unordered_map<int,track_info> _codec_to_trackid;
unordered_map<int, track_info> _codec_to_trackid;
List<Frame::Ptr> _frameCached;
bool _is_idr_fast_packet = false;
bool _have_video = false;
};
}//namespace mediakit
#endif //TSMUXER_H
#endif //TSMUXER_H
\ No newline at end of file
......@@ -52,7 +52,7 @@ void FlvMuxer::start(const EventPoller::Ptr &poller,const RtmpMediaSource::Ptr &
});
//音频同步于视频
_stamp[0].makeRelation( _stamp[1]);
_stamp[0].syncTo(_stamp[1]);
_ring_reader->setReadCB([weakSelf](const RtmpMediaSource::RingDataType &pkt){
auto strongSelf = weakSelf.lock();
if(!strongSelf){
......
......@@ -267,7 +267,7 @@ void RtmpSession::sendPlayResponse(const string &err,const RtmpMediaSource::Ptr
});
//音频同步于视频
_stamp[0].makeRelation( _stamp[1]);
_stamp[0].syncTo(_stamp[1]);
_pRingReader = src->getRing()->attach(getPoller());
weak_ptr<RtmpSession> weakSelf = dynamic_pointer_cast<RtmpSession>(shared_from_this());
_pRingReader->setReadCB([weakSelf](const RtmpMediaSource::RingDataType &pkt) {
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论