Commit 607d8ba0 by cqm

实现音频转码功能:

- 启用rtc:// schecma
- 增加音频转码码率配置
- aac转码使用原通道数
parent cb165025
......@@ -436,8 +436,6 @@ include_directories(${CMAKE_CURRENT_SOURCE_DIR}/3rdpart)
add_subdirectory(3rdpart)
add_subdirectory(src)
if(ENABLE_SRT)
add_subdirectory(srt)
endif()
......@@ -450,6 +448,8 @@ if(ENABLE_API)
add_subdirectory(api)
endif()
add_subdirectory(src)
##############################################################################
if(ENABLE_PLAYER AND ENABLE_FFMPEG)
......
......@@ -11,6 +11,7 @@
#include "mk_media.h"
#include "Util/logger.h"
#include "Common/Device.h"
#include "Util/mini.h"
using namespace std;
using namespace toolkit;
......
......@@ -68,7 +68,9 @@ ProtocolOption::ProtocolOption() {
GET_CONFIG(bool, s_enable_rtmp, Protocol::kEnableRtmp);
GET_CONFIG(bool, s_enable_ts, Protocol::kEnableTS);
GET_CONFIG(bool, s_enable_fmp4, Protocol::kEnableFMP4);
GET_CONFIG(bool, s_enable_rtc, Protocol::kEnableRtc);
GET_CONFIG(bool, s_audio_transcode, Protocol::kAudioTranscode);
GET_CONFIG(bool, s_rtc_demand, Protocol::kRtcDemand);
GET_CONFIG(bool, s_hls_demand, Protocol::kHlsDemand);
GET_CONFIG(bool, s_rtsp_demand, Protocol::kRtspDemand);
GET_CONFIG(bool, s_rtmp_demand, Protocol::kRtmpDemand);
......@@ -94,7 +96,9 @@ ProtocolOption::ProtocolOption() {
enable_rtmp = s_enable_rtmp;
enable_ts = s_enable_ts;
enable_fmp4 = s_enable_fmp4;
enable_rtc = s_enable_rtc;
audio_transcode = s_audio_transcode;
rtc_demand = s_rtc_demand;
hls_demand = s_hls_demand;
rtsp_demand = s_rtsp_demand;
rtmp_demand = s_rtmp_demand;
......
......@@ -165,7 +165,7 @@ public:
bool enable_hls_fmp4;
//是否开启MP4录制
bool enable_mp4;
//是否开启转换为rtsp/webrtc
//是否开启转换为rtsp
bool enable_rtsp;
//是否开启转换为rtmp/flv
bool enable_rtmp;
......@@ -173,7 +173,11 @@ public:
bool enable_ts;
//是否开启转换为http-fmp4/ws-fmp4
bool enable_fmp4;
//是否开启转换为webrtc
bool enable_rtc;
// 是否启用音频转码
bool audio_transcode;
bool rtc_demand;
// hls协议是否按需生成,如果hls.segNum配置为0(意味着hls录制),那么hls将一直生成(不管此开关)
bool hls_demand;
// rtsp[s]协议是否按需生成
......@@ -214,6 +218,9 @@ public:
GET_OPT_VALUE(enable_rtmp);
GET_OPT_VALUE(enable_ts);
GET_OPT_VALUE(enable_fmp4);
GET_OPT_VALUE(enable_rtc);
GET_OPT_VALUE(audio_transcode);
GET_OPT_VALUE(rtc_demand);
GET_OPT_VALUE(hls_demand);
GET_OPT_VALUE(rtsp_demand);
......
......@@ -11,7 +11,20 @@
#include <math.h>
#include "Common/config.h"
#include "MultiMediaSourceMuxer.h"
#include "Extension/AAC.h"
#include "Extension/Opus.h"
#include "Extension/G711.h"
#include "Rtp/RtpSender.h"
#include "Record/HlsRecorder.h"
#include "Record/HlsMediaSource.h"
#include "Rtsp/RtspMediaSourceMuxer.h"
#include "Rtmp/RtmpMediaSourceMuxer.h"
#include "TS/TSMediaSourceMuxer.h"
#include "FMP4/FMP4MediaSourceMuxer.h"
#ifdef ENABLE_FFMPEG
#include "Codec/Transcode.h"
#endif
using namespace std;
using namespace toolkit;
......@@ -107,6 +120,11 @@ MultiMediaSourceMuxer::MultiMediaSourceMuxer(const MediaTuple& tuple, float dur_
if (option.enable_rtsp) {
_rtsp = std::make_shared<RtspMediaSourceMuxer>(_tuple, option, std::make_shared<TitleSdp>(dur_sec));
}
if (option.enable_rtc) {
#if defined(ENABLE_WEBRTC)
_rtc = std::make_shared<RtspMediaSourceMuxer>(_tuple, option, std::make_shared<TitleSdp>(dur_sec), RTC_SCHEMA);
#endif
}
if (option.enable_hls) {
_hls = dynamic_pointer_cast<HlsRecorder>(Recorder::createRecorder(Recorder::type_hls, _tuple, option));
}
......@@ -119,6 +137,15 @@ MultiMediaSourceMuxer::MultiMediaSourceMuxer(const MediaTuple& tuple, float dur_
if (option.enable_ts) {
_ts = dynamic_pointer_cast<TSMediaSourceMuxer>(Recorder::createRecorder(Recorder::type_ts, _tuple, option));
}
if (option.audio_transcode) {
#if defined(ENABLE_FFMPEG)
_audio_transcode = option.audio_transcode;
InfoL << "enable audio_transcode";
#else
InfoL << "without ffmpeg disable audio_transcode";
#endif
}
if (option.enable_fmp4) {
_fmp4 = dynamic_pointer_cast<FMP4MediaSourceMuxer>(Recorder::createRecorder(Recorder::type_fmp4, _tuple, option));
}
......@@ -142,6 +169,10 @@ void MultiMediaSourceMuxer::setMediaListener(const std::weak_ptr<MediaSourceEven
if (_ts) {
_ts->setListener(self);
}
if (_rtc) {
_rtc->setListener(self);
}
if (_fmp4) {
_fmp4->setListener(self);
}
......@@ -160,6 +191,7 @@ void MultiMediaSourceMuxer::setTrackListener(const std::weak_ptr<Listener> &list
int MultiMediaSourceMuxer::totalReaderCount() const {
return (_rtsp ? _rtsp->readerCount() : 0) +
(_rtmp ? _rtmp->readerCount() : 0) +
(_rtc ? _rtc->readerCount() : 0) +
(_ts ? _ts->readerCount() : 0) +
(_fmp4 ? _fmp4->readerCount() : 0) +
(_mp4 ? _option.mp4_as_player : 0) +
......@@ -375,15 +407,73 @@ std::shared_ptr<MultiMediaSourceMuxer> MultiMediaSourceMuxer::getMuxer(MediaSour
bool MultiMediaSourceMuxer::onTrackReady(const Track::Ptr &track) {
bool ret = false;
if (_rtmp) {
ret = _rtmp->addTrack(track) ? true : ret;
}
if (_rtsp) {
ret = _rtsp->addTrack(track) ? true : ret;
}
if (_ts) {
ret = _ts->addTrack(track) ? true : ret;
auto rtmp = _rtmp;
auto rtc = _rtc;
#if defined(ENABLE_FFMPEG)
if (_audio_transcode) {
if (track->getCodecId() == CodecAAC) {
if (rtmp) {
rtmp->addTrack(track);
rtmp = nullptr;
}
_audio_dec = nullptr;
_audio_enc = nullptr;
if (rtc) {
Track::Ptr newTrack(new OpusTrack());
GET_CONFIG(int, bitrate, General::kOpusBitrate);
newTrack->setBitRate(bitrate);
rtc->addTrack(newTrack);
rtc = nullptr;
// aac to opus
_audio_dec.reset(new FFmpegDecoder(track));
_audio_enc.reset(new FFmpegEncoder(newTrack));
_audio_dec->setOnDecode([this](const FFmpegFrame::Ptr & frame) {
_audio_enc->inputFrame(frame, false);
});
_audio_enc->setOnEncode([this](const Frame::Ptr& frame) {
// fill data to _rtc
if (_rtc && _rtc->isEnabled())
_rtc->inputFrame(frame);
});
}
}
else if (track->getTrackType() == TrackAudio) {
if (rtc) {
rtc->addTrack(track);
rtc = nullptr;
}
_audio_dec = nullptr;
_audio_enc = nullptr;
if (rtmp) {
Track::Ptr newTrack(new AACTrack(44100, std::dynamic_pointer_cast<AudioTrack>(track)->getAudioChannel()));
GET_CONFIG(int, bitrate, General::kAacBitrate);
newTrack->setBitRate(bitrate);
rtmp->addTrack(newTrack);
rtmp = nullptr;
_audio_dec.reset(new FFmpegDecoder(track));
_audio_enc.reset(new FFmpegEncoder(newTrack));
_audio_dec->setOnDecode([this](const FFmpegFrame::Ptr & frame) {
_audio_enc->inputFrame(frame, false);
});
_audio_enc->setOnEncode([this](const Frame::Ptr& frame) {
// fill aac frame to rtmp
if (_rtmp && _rtmp->isEnabled())
_rtmp->inputFrame(frame);
});
}
}
}
#endif
if (rtc && rtc->addTrack(track))
ret = true;
if (rtmp && rtmp->addTrack(track))
ret = true;
if (_rtsp && _rtsp->addTrack(track))
ret = true;
if (_ts && _ts->addTrack(track))
ret = true;
if (_fmp4) {
ret = _fmp4->addTrack(track) ? true : ret;
}
......@@ -418,6 +508,9 @@ void MultiMediaSourceMuxer::onAllTrackReady() {
if (_fmp4) {
_fmp4->addTrackCompleted();
}
if (_rtc) {
_rtc->addTrackCompleted();
}
if (_hls) {
_hls->addTrackCompleted();
}
......@@ -472,6 +565,13 @@ void MultiMediaSourceMuxer::resetTracks() {
if (_ts) {
_ts->resetTracks();
}
if (_rtc) {
_rtc->resetTracks();
}
#if defined(ENABLE_FFMPEG)
_audio_dec = nullptr;
_audio_dec = nullptr;
#endif
if (_fmp4) {
_fmp4->resetTracks();
}
......@@ -494,18 +594,47 @@ bool MultiMediaSourceMuxer::onTrackFrame(const Frame::Ptr &frame_in) {
}
bool ret = false;
if (_rtmp) {
ret = _rtmp->inputFrame(frame) ? true : ret;
}
if (_rtsp) {
ret = _rtsp->inputFrame(frame) ? true : ret;
}
if (_ts) {
ret = _ts->inputFrame(frame) ? true : ret;
RtspMediaSourceMuxer::Ptr rtc;
RtmpMediaSourceMuxer::Ptr rtmp;
if (_rtc && _rtc->isEnabled())
rtc = _rtc;
if (_rtmp && _rtmp->isEnabled())
rtmp = _rtmp;
#if defined(ENABLE_FFMPEG)
if (_audio_transcode) {
if (frame->getCodecId() == CodecAAC) {
if (rtc) {
if (_audio_dec && rtc->readerCount())
_audio_dec->inputFrame(frame, true, false, false);
rtc = nullptr;
}
}
else if (frame->getTrackType() == TrackAudio) {
if (rtmp) {
if (_audio_dec && rtmp->readerCount())
_audio_dec->inputFrame(frame, true, false, false);
rtmp = nullptr;
}
}
}
#endif
if (rtc && rtc->inputFrame(frame))
ret = true;
if (_hls) {
ret = _hls->inputFrame(frame) ? true : ret;
if (rtmp && rtmp->inputFrame(frame))
ret = true;
if (_rtsp && _rtsp->inputFrame(frame))
ret = true;
if (_ts && _ts->inputFrame(frame))
ret = true;
//拷贝智能指针,目的是为了防止跨线程调用设置录像相关api导致的线程竞争问题
//此处使用智能指针拷贝来确保线程安全,比互斥锁性能更优
auto hls = _hls;
if (hls) {
ret = hls->inputFrame(frame) ? true : ret;
}
if (_hls_fmp4) {
......@@ -541,6 +670,7 @@ bool MultiMediaSourceMuxer::isEnabled(){
//有人观看时,则延迟一定时间检查一遍是否无人观看了(节省性能)
_is_enable = (_rtmp ? _rtmp->isEnabled() : false) ||
(_rtsp ? _rtsp->isEnabled() : false) ||
(_rtc ? _rtc->isEnabled() : false) ||
(_ts ? _ts->isEnabled() : false) ||
(_fmp4 ? _fmp4->isEnabled() : false) ||
(_ring ? (bool)_ring->readerCount() : false) ||
......
......@@ -15,15 +15,19 @@
#include "Common/MediaSource.h"
#include "Common/MediaSink.h"
#include "Record/Recorder.h"
#include "Rtp/RtpSender.h"
#include "Record/HlsRecorder.h"
#include "Record/HlsMediaSource.h"
#include "Rtsp/RtspMediaSourceMuxer.h"
#include "Rtmp/RtmpMediaSourceMuxer.h"
#include "TS/TSMediaSourceMuxer.h"
#include "FMP4/FMP4MediaSourceMuxer.h"
#include "Util/RingBuffer.h"
namespace mediakit {
class HlsRecorder;
class HlsFMP4Recorder;
class RtspMediaSourceMuxer;
class RtmpMediaSourceMuxer;
class TSMediaSourceMuxer;
class FMP4MediaSourceMuxer;
class RtpSender;
#ifdef ENABLE_FFMPEG
class FFmpegDecoder;
class FFmpegEncoder;
#endif
class MultiMediaSourceMuxer : public MediaSourceEventInterceptor, public MediaSink, public std::enable_shared_from_this<MultiMediaSourceMuxer>{
public:
......@@ -169,13 +173,19 @@ private:
Stamp _stamp[2];
std::weak_ptr<Listener> _track_listener;
std::unordered_map<std::string, RingType::RingReader::Ptr> _rtp_sender;
FMP4MediaSourceMuxer::Ptr _fmp4;
RtmpMediaSourceMuxer::Ptr _rtmp;
RtspMediaSourceMuxer::Ptr _rtsp;
TSMediaSourceMuxer::Ptr _ts;
std::shared_ptr<FMP4MediaSourceMuxer> _fmp4;
std::shared_ptr<RtmpMediaSourceMuxer> _rtmp;
std::shared_ptr<RtspMediaSourceMuxer> _rtsp;
std::shared_ptr<TSMediaSourceMuxer> _ts;
std::shared_ptr<RtspMediaSourceMuxer> _rtc;
#if defined(ENABLE_FFMPEG)
bool _audio_transcode = false;
std::shared_ptr<FFmpegDecoder> _audio_dec;
std::shared_ptr<FFmpegEncoder> _audio_enc;
#endif
MediaSinkInterface::Ptr _mp4;
HlsRecorder::Ptr _hls;
HlsFMP4Recorder::Ptr _hls_fmp4;
std::shared_ptr<HlsRecorder> _hls;
std::shared_ptr<HlsFMP4Recorder> _hls_fmp4;
toolkit::EventPoller::Ptr _poller;
RingType::Ptr _ring;
......
......@@ -76,12 +76,16 @@ const string kEnableFFmpegLog = GENERAL_FIELD "enable_ffmpeg_log";
const string kWaitTrackReadyMS = GENERAL_FIELD "wait_track_ready_ms";
const string kWaitAddTrackMS = GENERAL_FIELD "wait_add_track_ms";
const string kUnreadyFrameCache = GENERAL_FIELD "unready_frame_cache";
const string kOpusBitrate = GENERAL_FIELD"opusBitrate";
const string kAacBitrate = GENERAL_FIELD"aacBitrate";
static onceToken token([]() {
mINI::Instance()[kFlowThreshold] = 1024;
mINI::Instance()[kStreamNoneReaderDelayMS] = 20 * 1000;
mINI::Instance()[kMaxStreamWaitTimeMS] = 15 * 1000;
mINI::Instance()[kEnableVhost] = 0;
mINI::Instance()[kOpusBitrate] = 64000;
mINI::Instance()[kAacBitrate] = 64000;
mINI::Instance()[kResetWhenRePlay] = 1;
mINI::Instance()[kMergeWriteMS] = 0;
mINI::Instance()[kMediaServerId] = makeRandStr(16);
......@@ -109,6 +113,8 @@ const string kEnableRtsp = PROTOCOL_FIELD "enable_rtsp";
const string kEnableRtmp = PROTOCOL_FIELD "enable_rtmp";
const string kEnableTS = PROTOCOL_FIELD "enable_ts";
const string kEnableFMP4 = PROTOCOL_FIELD "enable_fmp4";
const string kEnableRtc = PROTOCOL_FIELD "enable_rtc";
const string kAudioTranscode = PROTOCOL_FIELD "audio_transcode";
const string kMP4AsPlayer = PROTOCOL_FIELD "mp4_as_player";
const string kMP4MaxSecond = PROTOCOL_FIELD "mp4_max_second";
......@@ -116,6 +122,7 @@ const string kMP4SavePath = PROTOCOL_FIELD "mp4_save_path";
const string kHlsSavePath = PROTOCOL_FIELD "hls_save_path";
const string kRtcDemand = PROTOCOL_FIELD "rtc_demand";
const string kHlsDemand = PROTOCOL_FIELD "hls_demand";
const string kRtspDemand = PROTOCOL_FIELD "rtsp_demand";
const string kRtmpDemand = PROTOCOL_FIELD "rtmp_demand";
......@@ -136,6 +143,8 @@ static onceToken token([]() {
mINI::Instance()[kEnableRtmp] = 1;
mINI::Instance()[kEnableTS] = 1;
mINI::Instance()[kEnableFMP4] = 1;
mINI::Instance()[kEnableRtc] = 1;
mINI::Instance()[kAudioTranscode] = 1;
mINI::Instance()[kMP4AsPlayer] = 0;
mINI::Instance()[kMP4MaxSecond] = 3600;
......@@ -143,6 +152,7 @@ static onceToken token([]() {
mINI::Instance()[kHlsSavePath] = "./www";
mINI::Instance()[kRtcDemand] = 0;
mINI::Instance()[kHlsDemand] = 0;
mINI::Instance()[kRtspDemand] = 0;
mINI::Instance()[kRtmpDemand] = 0;
......
......@@ -181,6 +181,8 @@ extern const std::string kWaitTrackReadyMS;
extern const std::string kWaitAddTrackMS;
// 如果track未就绪,我们先缓存帧数据,但是有最大个数限制(100帧时大约4秒),防止内存溢出
extern const std::string kUnreadyFrameCache;
extern const std::string kOpusBitrate;
extern const std::string kAacBitrate;
} // namespace General
namespace Protocol {
......@@ -211,6 +213,8 @@ extern const std::string kEnableRtmp;
extern const std::string kEnableTS;
//是否开启转换为http-fmp4/ws-fmp4
extern const std::string kEnableFMP4;
extern const std::string kEnableRtc;
extern const std::string kAudioTranscode;
//是否将mp4录制当做观看者
extern const std::string kMP4AsPlayer;
......@@ -223,6 +227,7 @@ extern const std::string kMP4SavePath;
extern const std::string kHlsSavePath;
// 按需转协议的开关
extern const std::string kRtcDemand;
extern const std::string kHlsDemand;
extern const std::string kRtspDemand;
extern const std::string kRtmpDemand;
......
......@@ -17,6 +17,7 @@
#include "RtpSplitter.h"
#include "RtpProcess.h"
#include "Util/TimeTicker.h"
#include "Util/mini.h"
namespace mediakit{
......
......@@ -42,7 +42,7 @@ public:
* @param stream_id 流id
* @param ring_size 可以设置固定的环形缓冲大小,0则自适应
*/
RtspMediaSource(const MediaTuple& tuple, int ring_size = RTP_GOP_SIZE): MediaSource(RTSP_SCHEMA, tuple), _ring_size(ring_size) {}
RtspMediaSource(const MediaTuple& tuple, const std::string &schema = RTSP_SCHEMA, int ring_size = RTP_GOP_SIZE): MediaSource(schema, tuple), _ring_size(ring_size) {}
~RtspMediaSource() override { flush(); }
......
......@@ -74,7 +74,7 @@ void RtspMediaSource::onWrite(RtpPacket::Ptr rtp, bool keyPos) {
PacketCache<RtpPacket>::inputPacket(stamp, is_video, std::move(rtp), keyPos);
}
RtspMediaSourceImp::RtspMediaSourceImp(const MediaTuple& tuple, int ringSize): RtspMediaSource(tuple, ringSize)
RtspMediaSourceImp::RtspMediaSourceImp(const MediaTuple& tuple, const std::string& schema, int ringSize): RtspMediaSource(tuple, schema, ringSize)
{
_demuxer = std::make_shared<RtspDemuxer>();
_demuxer->setTrackListener(this);
......@@ -112,7 +112,12 @@ void RtspMediaSourceImp::setProtocolOption(const ProtocolOption &option)
//开启直接代理模式时,rtsp直接代理,不重复产生;但是有些rtsp推流端,由于sdp中已有sps pps,rtp中就不再包括sps pps,
//导致rtc无法播放,所以在rtsp推流rtc播放时,建议关闭直接代理模式
_option = option;
_option.enable_rtsp = !direct_proxy;
if (direct_proxy) {
if (getSchema() == RTC_SCHEMA)
_option.enable_rtc = false;
if (getSchema() == RTSP_SCHEMA)
_option.enable_rtsp = false;
}
_muxer = std::make_shared<MultiMediaSourceMuxer>(_tuple, _demuxer->getDuration(), _option);
_muxer->setMediaListener(getListener());
_muxer->setTrackListener(std::static_pointer_cast<RtspMediaSourceImp>(shared_from_this()));
......@@ -128,11 +133,48 @@ void RtspMediaSourceImp::setProtocolOption(const ProtocolOption &option)
RtspMediaSource::Ptr RtspMediaSourceImp::clone(const std::string &stream) {
auto tuple = _tuple;
tuple.stream = stream;
auto src_imp = std::make_shared<RtspMediaSourceImp>(tuple);
auto src_imp = std::make_shared<RtspMediaSourceImp>(tuple, getSchema());
src_imp->setSdp(getSdp());
src_imp->setProtocolOption(getProtocolOption());
return src_imp;
}
bool RtspMediaSourceImp::addTrack(const Track::Ptr &track)
{
if (_muxer) {
if (_muxer->addTrack(track)) {
track->addDelegate(_muxer);
return true;
}
}
return false;
}
void RtspMediaSourceImp::addTrackCompleted()
{
if (_muxer) {
_muxer->addTrackCompleted();
}
}
void RtspMediaSourceImp::resetTracks()
{
if (_muxer) {
_muxer->resetTracks();
}
}
void RtspMediaSourceImp::setListener(const std::weak_ptr<MediaSourceEvent> &listener)
{
if (_muxer) {
//_muxer对象不能处理的事件再给listener处理
_muxer->setMediaListener(listener);
}
else {
//未创建_muxer对象,事件全部给listener处理
MediaSource::setListener(listener);
}
}
}
......@@ -12,12 +12,11 @@
#define SRC_RTSP_RTSPTORTMPMEDIASOURCE_H_
#include "RtspMediaSource.h"
#include "RtspDemuxer.h"
#include "Common/MultiMediaSourceMuxer.h"
namespace mediakit {
class RtspDemuxer;
class RtspMediaSourceImp final : public RtspMediaSource, private TrackListener, public MultiMediaSourceMuxer::Listener {
class RtspMediaSourceImp : public RtspMediaSource, private TrackListener, public MultiMediaSourceMuxer::Listener {
public:
using Ptr = std::shared_ptr<RtspMediaSourceImp>;
......@@ -28,7 +27,7 @@ public:
* @param id 流id
* @param ringSize 环形缓存大小
*/
RtspMediaSourceImp(const MediaTuple& tuple, int ringSize = RTP_GOP_SIZE);
RtspMediaSourceImp(const MediaTuple& tuple, const std::string &schema = RTSP_SCHEMA, int ringSize = RTP_GOP_SIZE);
~RtspMediaSourceImp() override = default;
......@@ -61,30 +60,14 @@ public:
/**
* _demuxer触发的添加Track事件
*/
bool addTrack(const Track::Ptr &track) override {
if (_muxer) {
if (_muxer->addTrack(track)) {
track->addDelegate(_muxer);
return true;
}
}
return false;
}
bool addTrack(const Track::Ptr &track) override;
/**
* _demuxer触发的Track添加完毕事件
*/
void addTrackCompleted() override {
if (_muxer) {
_muxer->addTrackCompleted();
}
}
void addTrackCompleted() override;
void resetTracks() override {
if (_muxer) {
_muxer->resetTracks();
}
}
void resetTracks() override;
/**
* _muxer触发的所有Track就绪的事件
......@@ -97,21 +80,13 @@ public:
* 设置事件监听器
* @param listener 监听器
*/
void setListener(const std::weak_ptr<MediaSourceEvent> &listener) override{
if (_muxer) {
//_muxer对象不能处理的事件再给listener处理
_muxer->setMediaListener(listener);
} else {
//未创建_muxer对象,事件全部给listener处理
MediaSource::setListener(listener);
}
}
void setListener(const std::weak_ptr<MediaSourceEvent> &listener) override;
RtspMediaSource::Ptr clone(const std::string& stream) override;
private:
protected:
bool _all_track_ready = false;
ProtocolOption _option;
RtspDemuxer::Ptr _demuxer;
std::shared_ptr<RtspDemuxer> _demuxer;
MultiMediaSourceMuxer::Ptr _muxer;
};
} /* namespace mediakit */
......
......@@ -16,16 +16,19 @@
namespace mediakit {
class RtspMediaSourceMuxer final : public RtspMuxer, public MediaSourceEventInterceptor,
class RtspMediaSourceMuxer : public RtspMuxer, public MediaSourceEventInterceptor,
public std::enable_shared_from_this<RtspMediaSourceMuxer> {
public:
using Ptr = std::shared_ptr<RtspMediaSourceMuxer>;
RtspMediaSourceMuxer(const MediaTuple& tuple,
const ProtocolOption &option,
const TitleSdp::Ptr &title = nullptr) : RtspMuxer(title) {
const TitleSdp::Ptr &title = nullptr,
const std::string &schema = RTSP_SCHEMA
): RtspMuxer(title) {
_option = option;
_media_src = std::make_shared<RtspMediaSource>(tuple);
_on_demand = schema == RTSP_SCHEMA ? option.rtsp_demand : option.rtc_demand;
_media_src = std::make_shared<RtspMediaSource>(tuple, schema);
getRtpRing()->setDelegate(_media_src);
}
......@@ -50,19 +53,19 @@ public:
}
void onReaderChanged(MediaSource &sender, int size) override {
_enabled = _option.rtsp_demand ? size : true;
if (!size && _option.rtsp_demand) {
_enabled = _on_demand ? size : true;
if (!size && _on_demand) {
_clear_cache = true;
}
MediaSourceEventInterceptor::onReaderChanged(sender, size);
}
bool inputFrame(const Frame::Ptr &frame) override {
if (_clear_cache && _option.rtsp_demand) {
if (_clear_cache && _on_demand) {
_clear_cache = false;
_media_src->clearCache();
}
if (_enabled || !_option.rtsp_demand) {
if (_enabled || !_on_demand) {
return RtspMuxer::inputFrame(frame);
}
return false;
......@@ -70,12 +73,13 @@ public:
bool isEnabled() {
//缓存尚未清空时,还允许触发inputFrame函数,以便及时清空缓存
return _option.rtsp_demand ? (_clear_cache ? true : _enabled) : true;
return _on_demand ? (_clear_cache ? true : _enabled) : true;
}
private:
protected:
bool _enabled = true;
bool _clear_cache = false;
bool _on_demand = false;
ProtocolOption _option;
RtspMediaSource::Ptr _media_src;
};
......
......@@ -9,7 +9,7 @@
*/
#include "Nack.h"
#include <algorithm>
using namespace std;
using namespace toolkit;
......
......@@ -1176,10 +1176,10 @@ void push_plugin(Session &sender, const WebRtcArgs &args, const WebRtcPluginMana
cb(WebRtcException(SockException(Err_other, err)));
return;
}
std::string schema = RTC_SCHEMA;
RtspMediaSourceImp::Ptr push_src;
std::shared_ptr<void> push_src_ownership;
auto src = MediaSource::find(RTSP_SCHEMA, info.vhost, info.app, info.stream);
auto src = MediaSource::find(schema, info.vhost, info.app, info.stream);
auto push_failed = (bool)src;
while (src) {
......@@ -1206,7 +1206,7 @@ void push_plugin(Session &sender, const WebRtcArgs &args, const WebRtcPluginMana
}
if (!push_src) {
push_src = std::make_shared<RtspMediaSourceImp>(info);
push_src = std::make_shared<RtspMediaSourceImp>(info, schema);
push_src_ownership = push_src->getOwnership();
push_src->setProtocolOption(option);
}
......@@ -1233,9 +1233,8 @@ void play_plugin(Session &sender, const WebRtcArgs &args, const WebRtcPluginMana
cb(WebRtcException(SockException(Err_other, err)));
return;
}
// webrtc播放的是rtsp的源
info.schema = RTSP_SCHEMA;
info.schema = RTC_SCHEMA;
MediaSource::findAsync(info, session_ptr, [=](const MediaSource::Ptr &src_in) mutable {
auto src = dynamic_pointer_cast<RtspMediaSource>(src_in);
if (!src) {
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论