Commit ac3d7bd2 by cqm

add addDelegate with std::function, remove FrameWriterInterfaceHelper

parent d2c64b1f
...@@ -232,18 +232,18 @@ JNI_API(jlong, createMediaPlayer, jstring url, jobject callback){ ...@@ -232,18 +232,18 @@ JNI_API(jlong, createMediaPlayer, jstring url, jobject callback){
auto viedoTrack = strongPlayer->getTrack(TrackVideo); auto viedoTrack = strongPlayer->getTrack(TrackVideo);
if (viedoTrack) { if (viedoTrack) {
viedoTrack->addDelegate(std::make_shared<FrameWriterInterfaceHelper>([globalWeakRef](const Frame::Ptr &frame) { viedoTrack->addDelegate([globalWeakRef](const Frame::Ptr &frame) {
emitEvent((jobject)globalWeakRef,"onData","(L" MediaFrameSign ";)V",makeJavaFrame(env,frame)); emitEvent((jobject)globalWeakRef,"onData","(L" MediaFrameSign ";)V",makeJavaFrame(env,frame));
return true; return true;
})); });
} }
auto audioTrack = strongPlayer->getTrack(TrackAudio); auto audioTrack = strongPlayer->getTrack(TrackAudio);
if (audioTrack) { if (audioTrack) {
audioTrack->addDelegate(std::make_shared<FrameWriterInterfaceHelper>([globalWeakRef](const Frame::Ptr &frame) { audioTrack->addDelegate([globalWeakRef](const Frame::Ptr &frame) {
emitEvent((jobject)globalWeakRef,"onData","(L" MediaFrameSign ";)V",makeJavaFrame(env,frame)); emitEvent((jobject)globalWeakRef,"onData","(L" MediaFrameSign ";)V",makeJavaFrame(env,frame));
return true; return true;
})); });
} }
}); });
......
...@@ -213,9 +213,9 @@ git submodule update --init ...@@ -213,9 +213,9 @@ git submodule update --init
WarnL << "none video Track!"; WarnL << "none video Track!";
return; return;
} }
viedoTrack->addDelegate(std::make_shared<FrameWriterInterfaceHelper>([](const Frame::Ptr &frame) { viedoTrack->addDelegate([](const Frame::Ptr &frame) {
//please decode video here //please decode video here
})); });
}); });
player->setOnShutdown([](const SockException &ex) { player->setOnShutdown([](const SockException &ex) {
......
...@@ -115,12 +115,10 @@ API_EXPORT int API_CALL mk_track_bit_rate(mk_track track) { ...@@ -115,12 +115,10 @@ API_EXPORT int API_CALL mk_track_bit_rate(mk_track track) {
API_EXPORT void *API_CALL mk_track_add_delegate(mk_track track, on_mk_frame_out cb, void *user_data) { API_EXPORT void *API_CALL mk_track_add_delegate(mk_track track, on_mk_frame_out cb, void *user_data) {
assert(track && cb); assert(track && cb);
auto delegate = std::make_shared<FrameWriterInterfaceHelper>([cb, user_data](const Frame::Ptr &frame) { return (*((Track::Ptr *) track))->addDelegate([cb, user_data](const Frame::Ptr &frame) {
cb(user_data, (mk_frame) &frame); cb(user_data, (mk_frame) &frame);
return true; return true;
}); });
(*((Track::Ptr *) track))->addDelegate(delegate);
return delegate.get();
} }
API_EXPORT void API_CALL mk_track_del_delegate(mk_track track, void *tag) { API_EXPORT void API_CALL mk_track_del_delegate(mk_track track, void *tag) {
......
...@@ -83,10 +83,9 @@ int main(int argc, char *argv[]) { ...@@ -83,10 +83,9 @@ int main(int argc, char *argv[]) {
return true; return true;
}); });
}); });
auto delegate = std::make_shared<FrameWriterInterfaceHelper>([decoder](const Frame::Ptr &frame) { videoTrack->addDelegate([decoder](const Frame::Ptr &frame) {
return decoder->inputFrame(frame, false, true); return decoder->inputFrame(frame, false, true);
}); });
videoTrack->addDelegate(delegate);
} }
if (audioTrack) { if (audioTrack) {
...@@ -105,10 +104,9 @@ int main(int argc, char *argv[]) { ...@@ -105,10 +104,9 @@ int main(int argc, char *argv[]) {
auto len = pcm->get()->nb_samples * pcm->get()->channels * av_get_bytes_per_sample((enum AVSampleFormat)pcm->get()->format); auto len = pcm->get()->nb_samples * pcm->get()->channels * av_get_bytes_per_sample((enum AVSampleFormat)pcm->get()->format);
audio_player->playPCM((const char *) (pcm->get()->data[0]), MIN(len, frame->get()->linesize[0])); audio_player->playPCM((const char *) (pcm->get()->data[0]), MIN(len, frame->get()->linesize[0]));
}); });
auto audio_delegate = std::make_shared<FrameWriterInterfaceHelper>( [decoder](const Frame::Ptr &frame) { audioTrack->addDelegate([decoder](const Frame::Ptr &frame) {
return decoder->inputFrame(frame, false, true); return decoder->inputFrame(frame, false, true);
}); });
audioTrack->addDelegate(audio_delegate);
} }
}); });
......
...@@ -37,7 +37,7 @@ bool MediaSink::addTrack(const Track::Ptr &track_in) { ...@@ -37,7 +37,7 @@ bool MediaSink::addTrack(const Track::Ptr &track_in) {
}; };
_ticker.resetTime(); _ticker.resetTime();
track->addDelegate(std::make_shared<FrameWriterInterfaceHelper>([this](const Frame::Ptr &frame) { track->addDelegate([this](const Frame::Ptr &frame) {
if (_all_track_ready) { if (_all_track_ready) {
return onTrackFrame(frame); return onTrackFrame(frame);
} }
...@@ -52,7 +52,7 @@ bool MediaSink::addTrack(const Track::Ptr &track_in) { ...@@ -52,7 +52,7 @@ bool MediaSink::addTrack(const Track::Ptr &track_in) {
//还有Track未就绪,先缓存之 //还有Track未就绪,先缓存之
frame_unread.emplace_back(Frame::getCacheAbleFrame(frame)); frame_unread.emplace_back(Frame::getCacheAbleFrame(frame));
return true; return true;
})); });
return true; return true;
} }
...@@ -247,13 +247,13 @@ bool MediaSink::addMuteAudioTrack() { ...@@ -247,13 +247,13 @@ bool MediaSink::addMuteAudioTrack() {
} }
auto audio = std::make_shared<AACTrack>(makeAacConfig(MUTE_ADTS_DATA, ADTS_HEADER_LEN)); auto audio = std::make_shared<AACTrack>(makeAacConfig(MUTE_ADTS_DATA, ADTS_HEADER_LEN));
_track_map[audio->getTrackType()] = std::make_pair(audio, true); _track_map[audio->getTrackType()] = std::make_pair(audio, true);
audio->addDelegate(std::make_shared<FrameWriterInterfaceHelper>([this](const Frame::Ptr &frame) { audio->addDelegate([this](const Frame::Ptr &frame) {
return onTrackFrame(frame); return onTrackFrame(frame);
})); });
_mute_audio_maker = std::make_shared<MuteAudioMaker>(); _mute_audio_maker = std::make_shared<MuteAudioMaker>();
_mute_audio_maker->addDelegate(std::make_shared<FrameWriterInterfaceHelper>([audio](const Frame::Ptr &frame) { _mute_audio_maker->addDelegate([audio](const Frame::Ptr &frame) {
return audio->inputFrame(frame); return audio->inputFrame(frame);
})); });
onTrackReady(audio); onTrackReady(audio);
TraceL << "mute aac track added"; TraceL << "mute aac track added";
return true; return true;
......
...@@ -318,6 +318,12 @@ public: ...@@ -318,6 +318,12 @@ public:
_delegates.emplace(delegate.get(), delegate); _delegates.emplace(delegate.get(), delegate);
} }
FrameWriterInterface* addDelegate(const std::function<bool(const Frame::Ptr &frame)> &cb) {
auto delegate = std::make_shared<FrameWriterInterfaceHelper>(cb);
std::lock_guard<std::mutex> lck(_mtx);
_delegates.emplace(delegate.get(), delegate);
return delegate.get();
}
/** /**
* 删除代理 * 删除代理
*/ */
......
...@@ -112,9 +112,9 @@ bool Demuxer::addTrack(const Track::Ptr &track) { ...@@ -112,9 +112,9 @@ bool Demuxer::addTrack(const Track::Ptr &track) {
} }
if (_sink->addTrack(track)) { if (_sink->addTrack(track)) {
track->addDelegate(std::make_shared<FrameWriterInterfaceHelper>([this](const Frame::Ptr &frame) { track->addDelegate([this](const Frame::Ptr &frame) {
return _sink->inputFrame(frame); return _sink->inputFrame(frame);
})); });
return true; return true;
} }
return false; return false;
......
...@@ -136,10 +136,10 @@ bool GB28181Process::inputRtp(bool, const char *data, size_t data_len) { ...@@ -136,10 +136,10 @@ bool GB28181Process::inputRtp(bool, const char *data, size_t data_len) {
} }
} }
// 设置frame回调 // 设置frame回调
_rtp_decoder[pt]->addDelegate(std::make_shared<FrameWriterInterfaceHelper>([this](const Frame::Ptr &frame) { _rtp_decoder[pt]->addDelegate([this](const Frame::Ptr &frame) {
onRtpDecode(frame); onRtpDecode(frame);
return true; return true;
})); });
} }
return ref->inputRtp(TrackVideo, (unsigned char *)data, data_len); return ref->inputRtp(TrackVideo, (unsigned char *)data, data_len);
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论