Commit fbd711a6 by xiongziliang

整理代码

parent 42559146
...@@ -174,7 +174,7 @@ public: ...@@ -174,7 +174,7 @@ public:
} }
uint32_t getStamp(const RtmpPacket::Ptr &packet) { uint32_t getStamp(const RtmpPacket::Ptr &packet) {
return packet->timeStamp; return packet->time_stamp;
} }
bool isFlushAble(bool is_video, bool is_key, uint32_t new_stamp, int cache_size); bool isFlushAble(bool is_video, bool is_key, uint32_t new_stamp, int cache_size);
......
...@@ -144,7 +144,7 @@ int dumpAacConfig(const string &config, int length, uint8_t *out, int out_size) ...@@ -144,7 +144,7 @@ int dumpAacConfig(const string &config, int length, uint8_t *out, int out_size)
#ifndef ENABLE_MP4 #ifndef ENABLE_MP4
AdtsHeader header; AdtsHeader header;
parseAacConfig(config, header); parseAacConfig(config, header);
header.aac_frame_length = length; header.aac_frame_length = ADTS_HEADER_LEN + length;
dumpAdtsHeader(header, out); dumpAdtsHeader(header, out);
return ADTS_HEADER_LEN; return ADTS_HEADER_LEN;
#else #else
......
...@@ -21,11 +21,11 @@ static string getAacCfg(const RtmpPacket &thiz) { ...@@ -21,11 +21,11 @@ static string getAacCfg(const RtmpPacket &thiz) {
if (!thiz.isCfgFrame()) { if (!thiz.isCfgFrame()) {
return ret; return ret;
} }
if (thiz.strBuf.size() < 4) { if (thiz.buffer.size() < 4) {
WarnL << "bad aac cfg!"; WarnL << "bad aac cfg!";
return ret; return ret;
} }
ret = thiz.strBuf.substr(2); ret = thiz.buffer.substr(2);
return ret; return ret;
} }
...@@ -37,7 +37,7 @@ bool AACRtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt, bool) { ...@@ -37,7 +37,7 @@ bool AACRtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt, bool) {
} }
if (!_aac_cfg.empty()) { if (!_aac_cfg.empty()) {
onGetAAC(pkt->strBuf.data() + 2, pkt->strBuf.size() - 2, pkt->timeStamp); onGetAAC(pkt->buffer.data() + 2, pkt->buffer.size() - 2, pkt->time_stamp);
} }
return false; return false;
} }
...@@ -97,21 +97,21 @@ void AACRtmpEncoder::inputFrame(const Frame::Ptr &frame) { ...@@ -97,21 +97,21 @@ void AACRtmpEncoder::inputFrame(const Frame::Ptr &frame) {
if(!_aac_cfg.empty()){ if(!_aac_cfg.empty()){
RtmpPacket::Ptr rtmpPkt = ResourcePoolHelper<RtmpPacket>::obtainObj(); RtmpPacket::Ptr rtmpPkt = ResourcePoolHelper<RtmpPacket>::obtainObj();
rtmpPkt->strBuf.clear(); rtmpPkt->buffer.clear();
//header //header
uint8_t is_config = false; uint8_t is_config = false;
rtmpPkt->strBuf.push_back(_audio_flv_flags); rtmpPkt->buffer.push_back(_audio_flv_flags);
rtmpPkt->strBuf.push_back(!is_config); rtmpPkt->buffer.push_back(!is_config);
//aac data //aac data
rtmpPkt->strBuf.append(frame->data() + frame->prefixSize(), frame->size() - frame->prefixSize()); rtmpPkt->buffer.append(frame->data() + frame->prefixSize(), frame->size() - frame->prefixSize());
rtmpPkt->bodySize = rtmpPkt->strBuf.size(); rtmpPkt->body_size = rtmpPkt->buffer.size();
rtmpPkt->chunkId = CHUNK_AUDIO; rtmpPkt->chunk_id = CHUNK_AUDIO;
rtmpPkt->streamId = STREAM_MEDIA; rtmpPkt->stream_index = STREAM_MEDIA;
rtmpPkt->timeStamp = frame->dts(); rtmpPkt->time_stamp = frame->dts();
rtmpPkt->typeId = MSG_AUDIO; rtmpPkt->type_id = MSG_AUDIO;
RtmpCodec::inputRtmp(rtmpPkt, false); RtmpCodec::inputRtmp(rtmpPkt, false);
} }
} }
...@@ -119,20 +119,20 @@ void AACRtmpEncoder::inputFrame(const Frame::Ptr &frame) { ...@@ -119,20 +119,20 @@ void AACRtmpEncoder::inputFrame(const Frame::Ptr &frame) {
void AACRtmpEncoder::makeAudioConfigPkt() { void AACRtmpEncoder::makeAudioConfigPkt() {
_audio_flv_flags = getAudioRtmpFlags(std::make_shared<AACTrack>(_aac_cfg)); _audio_flv_flags = getAudioRtmpFlags(std::make_shared<AACTrack>(_aac_cfg));
RtmpPacket::Ptr rtmpPkt = ResourcePoolHelper<RtmpPacket>::obtainObj(); RtmpPacket::Ptr rtmpPkt = ResourcePoolHelper<RtmpPacket>::obtainObj();
rtmpPkt->strBuf.clear(); rtmpPkt->buffer.clear();
//header //header
uint8_t is_config = true; uint8_t is_config = true;
rtmpPkt->strBuf.push_back(_audio_flv_flags); rtmpPkt->buffer.push_back(_audio_flv_flags);
rtmpPkt->strBuf.push_back(!is_config); rtmpPkt->buffer.push_back(!is_config);
//aac config //aac config
rtmpPkt->strBuf.append(_aac_cfg); rtmpPkt->buffer.append(_aac_cfg);
rtmpPkt->bodySize = rtmpPkt->strBuf.size(); rtmpPkt->body_size = rtmpPkt->buffer.size();
rtmpPkt->chunkId = CHUNK_AUDIO; rtmpPkt->chunk_id = CHUNK_AUDIO;
rtmpPkt->streamId = STREAM_MEDIA; rtmpPkt->stream_index = STREAM_MEDIA;
rtmpPkt->timeStamp = 0; rtmpPkt->time_stamp = 0;
rtmpPkt->typeId = MSG_AUDIO; rtmpPkt->type_id = MSG_AUDIO;
RtmpCodec::inputRtmp(rtmpPkt, false); RtmpCodec::inputRtmp(rtmpPkt, false);
} }
......
...@@ -31,8 +31,8 @@ void CommonRtmpDecoder::obtainFrame() { ...@@ -31,8 +31,8 @@ void CommonRtmpDecoder::obtainFrame() {
bool CommonRtmpDecoder::inputRtmp(const RtmpPacket::Ptr &rtmp, bool) { bool CommonRtmpDecoder::inputRtmp(const RtmpPacket::Ptr &rtmp, bool) {
//拷贝负载 //拷贝负载
_frame->_buffer.assign(rtmp->strBuf.data() + 1, rtmp->strBuf.size() - 1); _frame->_buffer.assign(rtmp->buffer.data() + 1, rtmp->buffer.size() - 1);
_frame->_dts = rtmp->timeStamp; _frame->_dts = rtmp->time_stamp;
//写入环形缓存 //写入环形缓存
RtmpCodec::inputFrame(_frame); RtmpCodec::inputFrame(_frame);
//创建下一帧 //创建下一帧
...@@ -51,16 +51,16 @@ void CommonRtmpEncoder::inputFrame(const Frame::Ptr &frame) { ...@@ -51,16 +51,16 @@ void CommonRtmpEncoder::inputFrame(const Frame::Ptr &frame) {
return; return;
} }
RtmpPacket::Ptr rtmp = ResourcePoolHelper<RtmpPacket>::obtainObj(); RtmpPacket::Ptr rtmp = ResourcePoolHelper<RtmpPacket>::obtainObj();
rtmp->strBuf.clear(); rtmp->buffer.clear();
//header //header
rtmp->strBuf.push_back(_audio_flv_flags); rtmp->buffer.push_back(_audio_flv_flags);
//data //data
rtmp->strBuf.append(frame->data() + frame->prefixSize(), frame->size() - frame->prefixSize()); rtmp->buffer.append(frame->data() + frame->prefixSize(), frame->size() - frame->prefixSize());
rtmp->bodySize = rtmp->strBuf.size(); rtmp->body_size = rtmp->buffer.size();
rtmp->chunkId = CHUNK_AUDIO; rtmp->chunk_id = CHUNK_AUDIO;
rtmp->streamId = STREAM_MEDIA; rtmp->stream_index = STREAM_MEDIA;
rtmp->timeStamp = frame->dts(); rtmp->time_stamp = frame->dts();
rtmp->typeId = MSG_AUDIO; rtmp->type_id = MSG_AUDIO;
RtmpCodec::inputRtmp(rtmp, false); RtmpCodec::inputRtmp(rtmp, false);
} }
......
...@@ -39,18 +39,18 @@ static string getH264SPS(const RtmpPacket &thiz) { ...@@ -39,18 +39,18 @@ static string getH264SPS(const RtmpPacket &thiz) {
if (!thiz.isCfgFrame()) { if (!thiz.isCfgFrame()) {
return ret; return ret;
} }
if (thiz.strBuf.size() < 13) { if (thiz.buffer.size() < 13) {
WarnL << "bad H264 cfg!"; WarnL << "bad H264 cfg!";
return ret; return ret;
} }
uint16_t sps_size ; uint16_t sps_size ;
memcpy(&sps_size, thiz.strBuf.data() + 11,2); memcpy(&sps_size, thiz.buffer.data() + 11, 2);
sps_size = ntohs(sps_size); sps_size = ntohs(sps_size);
if ((int) thiz.strBuf.size() < 13 + sps_size) { if ((int) thiz.buffer.size() < 13 + sps_size) {
WarnL << "bad H264 cfg!"; WarnL << "bad H264 cfg!";
return ret; return ret;
} }
ret.assign(thiz.strBuf.data() + 13, sps_size); ret.assign(thiz.buffer.data() + 13, sps_size);
return ret; return ret;
} }
...@@ -66,27 +66,27 @@ static string getH264PPS(const RtmpPacket &thiz) { ...@@ -66,27 +66,27 @@ static string getH264PPS(const RtmpPacket &thiz) {
if (!thiz.isCfgFrame()) { if (!thiz.isCfgFrame()) {
return ret; return ret;
} }
if (thiz.strBuf.size() < 13) { if (thiz.buffer.size() < 13) {
WarnL << "bad H264 cfg!"; WarnL << "bad H264 cfg!";
return ret; return ret;
} }
uint16_t sps_size ; uint16_t sps_size ;
memcpy(&sps_size,thiz.strBuf.data() + 11,2); memcpy(&sps_size, thiz.buffer.data() + 11, 2);
sps_size = ntohs(sps_size); sps_size = ntohs(sps_size);
if ((int) thiz.strBuf.size() < 13 + sps_size + 1 + 2) { if ((int) thiz.buffer.size() < 13 + sps_size + 1 + 2) {
WarnL << "bad H264 cfg!"; WarnL << "bad H264 cfg!";
return ret; return ret;
} }
uint16_t pps_size ; uint16_t pps_size ;
memcpy(&pps_size, thiz.strBuf.data() + 13 + sps_size + 1,2); memcpy(&pps_size, thiz.buffer.data() + 13 + sps_size + 1, 2);
pps_size = ntohs(pps_size); pps_size = ntohs(pps_size);
if ((int) thiz.strBuf.size() < 13 + sps_size + 1 + 2 + pps_size) { if ((int) thiz.buffer.size() < 13 + sps_size + 1 + 2 + pps_size) {
WarnL << "bad H264 cfg!"; WarnL << "bad H264 cfg!";
return ret; return ret;
} }
ret.assign(thiz.strBuf.data() + 13 + sps_size + 1 + 2, pps_size); ret.assign(thiz.buffer.data() + 13 + sps_size + 1 + 2, pps_size);
return ret; return ret;
} }
...@@ -95,27 +95,27 @@ bool H264RtmpDecoder::decodeRtmp(const RtmpPacket::Ptr &pkt) { ...@@ -95,27 +95,27 @@ bool H264RtmpDecoder::decodeRtmp(const RtmpPacket::Ptr &pkt) {
//缓存sps pps,后续插入到I帧之前 //缓存sps pps,后续插入到I帧之前
_sps = getH264SPS(*pkt); _sps = getH264SPS(*pkt);
_pps = getH264PPS(*pkt); _pps = getH264PPS(*pkt);
onGetH264(_sps.data(), _sps.size(), pkt->timeStamp , pkt->timeStamp); onGetH264(_sps.data(), _sps.size(), pkt->time_stamp , pkt->time_stamp);
onGetH264(_pps.data(), _pps.size(), pkt->timeStamp , pkt->timeStamp); onGetH264(_pps.data(), _pps.size(), pkt->time_stamp , pkt->time_stamp);
return false; return false;
} }
if (pkt->strBuf.size() > 9) { if (pkt->buffer.size() > 9) {
uint32_t iTotalLen = pkt->strBuf.size(); uint32_t iTotalLen = pkt->buffer.size();
uint32_t iOffset = 5; uint32_t iOffset = 5;
uint8_t *cts_ptr = (uint8_t *) (pkt->strBuf.data() + 2); uint8_t *cts_ptr = (uint8_t *) (pkt->buffer.data() + 2);
int32_t cts = (((cts_ptr[0] << 16) | (cts_ptr[1] << 8) | (cts_ptr[2])) + 0xff800000) ^ 0xff800000; int32_t cts = (((cts_ptr[0] << 16) | (cts_ptr[1] << 8) | (cts_ptr[2])) + 0xff800000) ^ 0xff800000;
auto pts = pkt->timeStamp + cts; auto pts = pkt->time_stamp + cts;
while(iOffset + 4 < iTotalLen){ while(iOffset + 4 < iTotalLen){
uint32_t iFrameLen; uint32_t iFrameLen;
memcpy(&iFrameLen, pkt->strBuf.data() + iOffset, 4); memcpy(&iFrameLen, pkt->buffer.data() + iOffset, 4);
iFrameLen = ntohl(iFrameLen); iFrameLen = ntohl(iFrameLen);
iOffset += 4; iOffset += 4;
if(iFrameLen + iOffset > iTotalLen){ if(iFrameLen + iOffset > iTotalLen){
break; break;
} }
onGetH264(pkt->strBuf.data() + iOffset, iFrameLen, pkt->timeStamp , pts); onGetH264(pkt->buffer.data() + iOffset, iFrameLen, pkt->time_stamp , pts);
iOffset += iFrameLen; iOffset += iFrameLen;
} }
} }
...@@ -190,7 +190,7 @@ void H264RtmpEncoder::inputFrame(const Frame::Ptr &frame) { ...@@ -190,7 +190,7 @@ void H264RtmpEncoder::inputFrame(const Frame::Ptr &frame) {
} }
} }
if(_lastPacket && _lastPacket->timeStamp != frame->dts()) { if(_lastPacket && _lastPacket->time_stamp != frame->dts()) {
RtmpCodec::inputRtmp(_lastPacket, _lastPacket->isVideoKeyFrame()); RtmpCodec::inputRtmp(_lastPacket, _lastPacket->isVideoKeyFrame());
_lastPacket = nullptr; _lastPacket = nullptr;
} }
...@@ -202,23 +202,23 @@ void H264RtmpEncoder::inputFrame(const Frame::Ptr &frame) { ...@@ -202,23 +202,23 @@ void H264RtmpEncoder::inputFrame(const Frame::Ptr &frame) {
flags |= (((frame->configFrame() || frame->keyFrame()) ? FLV_KEY_FRAME : FLV_INTER_FRAME) << 4); flags |= (((frame->configFrame() || frame->keyFrame()) ? FLV_KEY_FRAME : FLV_INTER_FRAME) << 4);
_lastPacket = ResourcePoolHelper<RtmpPacket>::obtainObj(); _lastPacket = ResourcePoolHelper<RtmpPacket>::obtainObj();
_lastPacket->strBuf.clear(); _lastPacket->buffer.clear();
_lastPacket->strBuf.push_back(flags); _lastPacket->buffer.push_back(flags);
_lastPacket->strBuf.push_back(!is_config); _lastPacket->buffer.push_back(!is_config);
auto cts = frame->pts() - frame->dts(); auto cts = frame->pts() - frame->dts();
cts = htonl(cts); cts = htonl(cts);
_lastPacket->strBuf.append((char *)&cts + 1, 3); _lastPacket->buffer.append((char *)&cts + 1, 3);
_lastPacket->chunkId = CHUNK_VIDEO; _lastPacket->chunk_id = CHUNK_VIDEO;
_lastPacket->streamId = STREAM_MEDIA; _lastPacket->stream_index = STREAM_MEDIA;
_lastPacket->timeStamp = frame->dts(); _lastPacket->time_stamp = frame->dts();
_lastPacket->typeId = MSG_VIDEO; _lastPacket->type_id = MSG_VIDEO;
} }
auto size = htonl(iLen); auto size = htonl(iLen);
_lastPacket->strBuf.append((char *) &size, 4); _lastPacket->buffer.append((char *) &size, 4);
_lastPacket->strBuf.append(pcData, iLen); _lastPacket->buffer.append(pcData, iLen);
_lastPacket->bodySize = _lastPacket->strBuf.size(); _lastPacket->body_size = _lastPacket->buffer.size();
} }
void H264RtmpEncoder::makeVideoConfigPkt() { void H264RtmpEncoder::makeVideoConfigPkt() {
...@@ -227,38 +227,38 @@ void H264RtmpEncoder::makeVideoConfigPkt() { ...@@ -227,38 +227,38 @@ void H264RtmpEncoder::makeVideoConfigPkt() {
bool is_config = true; bool is_config = true;
RtmpPacket::Ptr rtmpPkt = ResourcePoolHelper<RtmpPacket>::obtainObj(); RtmpPacket::Ptr rtmpPkt = ResourcePoolHelper<RtmpPacket>::obtainObj();
rtmpPkt->strBuf.clear(); rtmpPkt->buffer.clear();
//header //header
rtmpPkt->strBuf.push_back(flags); rtmpPkt->buffer.push_back(flags);
rtmpPkt->strBuf.push_back(!is_config); rtmpPkt->buffer.push_back(!is_config);
//cts //cts
rtmpPkt->strBuf.append("\x0\x0\x0", 3); rtmpPkt->buffer.append("\x0\x0\x0", 3);
//AVCDecoderConfigurationRecord start //AVCDecoderConfigurationRecord start
rtmpPkt->strBuf.push_back(1); // version rtmpPkt->buffer.push_back(1); // version
rtmpPkt->strBuf.push_back(_sps[1]); // profile rtmpPkt->buffer.push_back(_sps[1]); // profile
rtmpPkt->strBuf.push_back(_sps[2]); // compat rtmpPkt->buffer.push_back(_sps[2]); // compat
rtmpPkt->strBuf.push_back(_sps[3]); // level rtmpPkt->buffer.push_back(_sps[3]); // level
rtmpPkt->strBuf.push_back(0xff); // 6 bits reserved + 2 bits nal size length - 1 (11) rtmpPkt->buffer.push_back(0xff); // 6 bits reserved + 2 bits nal size length - 1 (11)
rtmpPkt->strBuf.push_back(0xe1); // 3 bits reserved + 5 bits number of sps (00001) rtmpPkt->buffer.push_back(0xe1); // 3 bits reserved + 5 bits number of sps (00001)
//sps //sps
uint16_t size = _sps.size(); uint16_t size = _sps.size();
size = htons(size); size = htons(size);
rtmpPkt->strBuf.append((char *) &size, 2); rtmpPkt->buffer.append((char *) &size, 2);
rtmpPkt->strBuf.append(_sps); rtmpPkt->buffer.append(_sps);
//pps //pps
rtmpPkt->strBuf.push_back(1); // version rtmpPkt->buffer.push_back(1); // version
size = _pps.size(); size = _pps.size();
size = htons(size); size = htons(size);
rtmpPkt->strBuf.append((char *) &size, 2); rtmpPkt->buffer.append((char *) &size, 2);
rtmpPkt->strBuf.append(_pps); rtmpPkt->buffer.append(_pps);
rtmpPkt->bodySize = rtmpPkt->strBuf.size(); rtmpPkt->body_size = rtmpPkt->buffer.size();
rtmpPkt->chunkId = CHUNK_VIDEO; rtmpPkt->chunk_id = CHUNK_VIDEO;
rtmpPkt->streamId = STREAM_MEDIA; rtmpPkt->stream_index = STREAM_MEDIA;
rtmpPkt->timeStamp = 0; rtmpPkt->time_stamp = 0;
rtmpPkt->typeId = MSG_VIDEO; rtmpPkt->type_id = MSG_VIDEO;
RtmpCodec::inputRtmp(rtmpPkt, false); RtmpCodec::inputRtmp(rtmpPkt, false);
} }
......
...@@ -43,13 +43,13 @@ static bool getH265ConfigFrame(const RtmpPacket &thiz,string &frame) { ...@@ -43,13 +43,13 @@ static bool getH265ConfigFrame(const RtmpPacket &thiz,string &frame) {
if (!thiz.isCfgFrame()) { if (!thiz.isCfgFrame()) {
return false; return false;
} }
if (thiz.strBuf.size() < 6) { if (thiz.buffer.size() < 6) {
WarnL << "bad H265 cfg!"; WarnL << "bad H265 cfg!";
return false; return false;
} }
auto extra = thiz.strBuf.data() + 5; auto extra = thiz.buffer.data() + 5;
auto bytes = thiz.strBuf.size() - 5; auto bytes = thiz.buffer.size() - 5;
struct mpeg4_hevc_t hevc = {0}; struct mpeg4_hevc_t hevc = {0};
if (mpeg4_hevc_decoder_configuration_record_load((uint8_t *) extra, bytes, &hevc) > 0) { if (mpeg4_hevc_decoder_configuration_record_load((uint8_t *) extra, bytes, &hevc) > 0) {
...@@ -70,7 +70,7 @@ bool H265RtmpDecoder::decodeRtmp(const RtmpPacket::Ptr &pkt) { ...@@ -70,7 +70,7 @@ bool H265RtmpDecoder::decodeRtmp(const RtmpPacket::Ptr &pkt) {
#ifdef ENABLE_MP4 #ifdef ENABLE_MP4
string config; string config;
if(getH265ConfigFrame(*pkt,config)){ if(getH265ConfigFrame(*pkt,config)){
onGetH265(config.data(), config.size(), pkt->timeStamp , pkt->timeStamp); onGetH265(config.data(), config.size(), pkt->time_stamp , pkt->time_stamp);
} }
#else #else
WarnL << "请开启MP4相关功能并使能\"ENABLE_MP4\",否则对H265-RTMP支持不完善"; WarnL << "请开启MP4相关功能并使能\"ENABLE_MP4\",否则对H265-RTMP支持不完善";
...@@ -78,22 +78,22 @@ bool H265RtmpDecoder::decodeRtmp(const RtmpPacket::Ptr &pkt) { ...@@ -78,22 +78,22 @@ bool H265RtmpDecoder::decodeRtmp(const RtmpPacket::Ptr &pkt) {
return false; return false;
} }
if (pkt->strBuf.size() > 9) { if (pkt->buffer.size() > 9) {
uint32_t iTotalLen = pkt->strBuf.size(); uint32_t iTotalLen = pkt->buffer.size();
uint32_t iOffset = 5; uint32_t iOffset = 5;
uint8_t *cts_ptr = (uint8_t *) (pkt->strBuf.data() + 2); uint8_t *cts_ptr = (uint8_t *) (pkt->buffer.data() + 2);
int32_t cts = (((cts_ptr[0] << 16) | (cts_ptr[1] << 8) | (cts_ptr[2])) + 0xff800000) ^ 0xff800000; int32_t cts = (((cts_ptr[0] << 16) | (cts_ptr[1] << 8) | (cts_ptr[2])) + 0xff800000) ^ 0xff800000;
auto pts = pkt->timeStamp + cts; auto pts = pkt->time_stamp + cts;
while(iOffset + 4 < iTotalLen){ while(iOffset + 4 < iTotalLen){
uint32_t iFrameLen; uint32_t iFrameLen;
memcpy(&iFrameLen, pkt->strBuf.data() + iOffset, 4); memcpy(&iFrameLen, pkt->buffer.data() + iOffset, 4);
iFrameLen = ntohl(iFrameLen); iFrameLen = ntohl(iFrameLen);
iOffset += 4; iOffset += 4;
if(iFrameLen + iOffset > iTotalLen){ if(iFrameLen + iOffset > iTotalLen){
break; break;
} }
onGetH265(pkt->strBuf.data() + iOffset, iFrameLen, pkt->timeStamp , pts); onGetH265(pkt->buffer.data() + iOffset, iFrameLen, pkt->time_stamp , pts);
iOffset += iFrameLen; iOffset += iFrameLen;
} }
} }
...@@ -176,7 +176,7 @@ void H265RtmpEncoder::inputFrame(const Frame::Ptr &frame) { ...@@ -176,7 +176,7 @@ void H265RtmpEncoder::inputFrame(const Frame::Ptr &frame) {
return; return;
} }
if(_lastPacket && _lastPacket->timeStamp != frame->dts()) { if(_lastPacket && _lastPacket->time_stamp != frame->dts()) {
RtmpCodec::inputRtmp(_lastPacket, _lastPacket->isVideoKeyFrame()); RtmpCodec::inputRtmp(_lastPacket, _lastPacket->isVideoKeyFrame());
_lastPacket = nullptr; _lastPacket = nullptr;
} }
...@@ -188,23 +188,23 @@ void H265RtmpEncoder::inputFrame(const Frame::Ptr &frame) { ...@@ -188,23 +188,23 @@ void H265RtmpEncoder::inputFrame(const Frame::Ptr &frame) {
flags |= (((frame->configFrame() || frame->keyFrame()) ? FLV_KEY_FRAME : FLV_INTER_FRAME) << 4); flags |= (((frame->configFrame() || frame->keyFrame()) ? FLV_KEY_FRAME : FLV_INTER_FRAME) << 4);
_lastPacket = ResourcePoolHelper<RtmpPacket>::obtainObj(); _lastPacket = ResourcePoolHelper<RtmpPacket>::obtainObj();
_lastPacket->strBuf.clear(); _lastPacket->buffer.clear();
_lastPacket->strBuf.push_back(flags); _lastPacket->buffer.push_back(flags);
_lastPacket->strBuf.push_back(!is_config); _lastPacket->buffer.push_back(!is_config);
auto cts = frame->pts() - frame->dts(); auto cts = frame->pts() - frame->dts();
cts = htonl(cts); cts = htonl(cts);
_lastPacket->strBuf.append((char *)&cts + 1, 3); _lastPacket->buffer.append((char *)&cts + 1, 3);
_lastPacket->chunkId = CHUNK_VIDEO; _lastPacket->chunk_id = CHUNK_VIDEO;
_lastPacket->streamId = STREAM_MEDIA; _lastPacket->stream_index = STREAM_MEDIA;
_lastPacket->timeStamp = frame->dts(); _lastPacket->time_stamp = frame->dts();
_lastPacket->typeId = MSG_VIDEO; _lastPacket->type_id = MSG_VIDEO;
} }
auto size = htonl(iLen); auto size = htonl(iLen);
_lastPacket->strBuf.append((char *) &size, 4); _lastPacket->buffer.append((char *) &size, 4);
_lastPacket->strBuf.append(pcData, iLen); _lastPacket->buffer.append(pcData, iLen);
_lastPacket->bodySize = _lastPacket->strBuf.size(); _lastPacket->body_size = _lastPacket->buffer.size();
} }
void H265RtmpEncoder::makeVideoConfigPkt() { void H265RtmpEncoder::makeVideoConfigPkt() {
...@@ -214,13 +214,13 @@ void H265RtmpEncoder::makeVideoConfigPkt() { ...@@ -214,13 +214,13 @@ void H265RtmpEncoder::makeVideoConfigPkt() {
bool is_config = true; bool is_config = true;
RtmpPacket::Ptr rtmpPkt = ResourcePoolHelper<RtmpPacket>::obtainObj(); RtmpPacket::Ptr rtmpPkt = ResourcePoolHelper<RtmpPacket>::obtainObj();
rtmpPkt->strBuf.clear(); rtmpPkt->buffer.clear();
//header //header
rtmpPkt->strBuf.push_back(flags); rtmpPkt->buffer.push_back(flags);
rtmpPkt->strBuf.push_back(!is_config); rtmpPkt->buffer.push_back(!is_config);
//cts //cts
rtmpPkt->strBuf.append("\x0\x0\x0", 3); rtmpPkt->buffer.append("\x0\x0\x0", 3);
struct mpeg4_hevc_t hevc = {0}; struct mpeg4_hevc_t hevc = {0};
string vps_sps_pps = string("\x00\x00\x00\x01", 4) + _vps + string vps_sps_pps = string("\x00\x00\x00\x01", 4) + _vps +
...@@ -235,13 +235,13 @@ void H265RtmpEncoder::makeVideoConfigPkt() { ...@@ -235,13 +235,13 @@ void H265RtmpEncoder::makeVideoConfigPkt() {
} }
//HEVCDecoderConfigurationRecord //HEVCDecoderConfigurationRecord
rtmpPkt->strBuf.append((char *)extra_data, extra_data_size); rtmpPkt->buffer.append((char *)extra_data, extra_data_size);
rtmpPkt->bodySize = rtmpPkt->strBuf.size(); rtmpPkt->body_size = rtmpPkt->buffer.size();
rtmpPkt->chunkId = CHUNK_VIDEO; rtmpPkt->chunk_id = CHUNK_VIDEO;
rtmpPkt->streamId = STREAM_MEDIA; rtmpPkt->stream_index = STREAM_MEDIA;
rtmpPkt->timeStamp = 0; rtmpPkt->time_stamp = 0;
rtmpPkt->typeId = MSG_VIDEO; rtmpPkt->type_id = MSG_VIDEO;
RtmpCodec::inputRtmp(rtmpPkt, false); RtmpCodec::inputRtmp(rtmpPkt, false);
#else #else
WarnL << "请开启MP4相关功能并使能\"ENABLE_MP4\",否则对H265-RTMP支持不完善"; WarnL << "请开启MP4相关功能并使能\"ENABLE_MP4\",否则对H265-RTMP支持不完善";
......
...@@ -73,10 +73,10 @@ void FlvMuxer::onWriteFlvHeader(const RtmpMediaSource::Ptr &mediaSrc) { ...@@ -73,10 +73,10 @@ void FlvMuxer::onWriteFlvHeader(const RtmpMediaSource::Ptr &mediaSrc) {
bool is_have_audio = false,is_have_video = false; bool is_have_audio = false,is_have_video = false;
mediaSrc->getConfigFrame([&](const RtmpPacket::Ptr &pkt){ mediaSrc->getConfigFrame([&](const RtmpPacket::Ptr &pkt){
if(pkt->typeId == MSG_VIDEO){ if(pkt->type_id == MSG_VIDEO){
is_have_video = true; is_have_video = true;
} }
if(pkt->typeId == MSG_AUDIO){ if(pkt->type_id == MSG_AUDIO){
is_have_audio = true; is_have_audio = true;
} }
}); });
...@@ -133,16 +133,16 @@ public: ...@@ -133,16 +133,16 @@ public:
#pragma pack(pop) #pragma pack(pop)
#endif // defined(_WIN32) #endif // defined(_WIN32)
void FlvMuxer::onWriteFlvTag(const RtmpPacket::Ptr &pkt, uint32_t ui32TimeStamp , bool flush) { void FlvMuxer::onWriteFlvTag(const RtmpPacket::Ptr &pkt, uint32_t time_stamp , bool flush) {
onWriteFlvTag(pkt->typeId,pkt,ui32TimeStamp, flush); onWriteFlvTag(pkt->type_id, pkt, time_stamp, flush);
} }
void FlvMuxer::onWriteFlvTag(uint8_t ui8Type, const Buffer::Ptr &buffer, uint32_t ui32TimeStamp, bool flush) { void FlvMuxer::onWriteFlvTag(uint8_t type, const Buffer::Ptr &buffer, uint32_t time_stamp, bool flush) {
RtmpTagHeader header; RtmpTagHeader header;
header.type = ui8Type; header.type = type;
set_be24(header.data_size, buffer->size()); set_be24(header.data_size, buffer->size());
header.timestamp_ex = (uint8_t) ((ui32TimeStamp >> 24) & 0xff); header.timestamp_ex = (uint8_t) ((time_stamp >> 24) & 0xff);
set_be24(header.timestamp,ui32TimeStamp & 0xFFFFFF); set_be24(header.timestamp, time_stamp & 0xFFFFFF);
//tag header //tag header
onWrite(std::make_shared<BufferRaw>((char *)&header, sizeof(header)), false); onWrite(std::make_shared<BufferRaw>((char *)&header, sizeof(header)), false);
//tag data //tag data
...@@ -154,7 +154,7 @@ void FlvMuxer::onWriteFlvTag(uint8_t ui8Type, const Buffer::Ptr &buffer, uint32_ ...@@ -154,7 +154,7 @@ void FlvMuxer::onWriteFlvTag(uint8_t ui8Type, const Buffer::Ptr &buffer, uint32_
void FlvMuxer::onWriteRtmp(const RtmpPacket::Ptr &pkt,bool flush) { void FlvMuxer::onWriteRtmp(const RtmpPacket::Ptr &pkt,bool flush) {
int64_t dts_out; int64_t dts_out;
_stamp[pkt->typeId % 2].revise(pkt->timeStamp, 0, dts_out, dts_out); _stamp[pkt->type_id % 2].revise(pkt->time_stamp, 0, dts_out, dts_out);
onWriteFlvTag(pkt, dts_out,flush); onWriteFlvTag(pkt, dts_out,flush);
} }
......
...@@ -25,21 +25,23 @@ public: ...@@ -25,21 +25,23 @@ public:
FlvMuxer(); FlvMuxer();
virtual ~FlvMuxer(); virtual ~FlvMuxer();
void stop(); void stop();
protected: protected:
void start(const EventPoller::Ptr &poller,const RtmpMediaSource::Ptr &media); void start(const EventPoller::Ptr &poller, const RtmpMediaSource::Ptr &media);
virtual void onWrite(const Buffer::Ptr &data, bool flush) = 0; virtual void onWrite(const Buffer::Ptr &data, bool flush) = 0;
virtual void onDetach() = 0; virtual void onDetach() = 0;
virtual std::shared_ptr<FlvMuxer> getSharedPtr() = 0; virtual std::shared_ptr<FlvMuxer> getSharedPtr() = 0;
private: private:
void onWriteFlvHeader(const RtmpMediaSource::Ptr &media); void onWriteFlvHeader(const RtmpMediaSource::Ptr &media);
void onWriteRtmp(const RtmpPacket::Ptr &pkt,bool flush); void onWriteRtmp(const RtmpPacket::Ptr &pkt, bool flush);
void onWriteFlvTag(const RtmpPacket::Ptr &pkt, uint32_t ui32TimeStamp, bool flush); void onWriteFlvTag(const RtmpPacket::Ptr &pkt, uint32_t time_stamp, bool flush);
void onWriteFlvTag(uint8_t ui8Type, const Buffer::Ptr &buffer, uint32_t ui32TimeStamp, bool flush); void onWriteFlvTag(uint8_t type, const Buffer::Ptr &buffer, uint32_t time_stamp, bool flush);
private: private:
RtmpMediaSource::RingType::RingReader::Ptr _ring_reader;
//时间戳修整器 //时间戳修整器
Stamp _stamp[2]; Stamp _stamp[2];
RtmpMediaSource::RingType::RingReader::Ptr _ring_reader;
}; };
class FlvRecorder : public FlvMuxer , public std::enable_shared_from_this<FlvRecorder>{ class FlvRecorder : public FlvMuxer , public std::enable_shared_from_this<FlvRecorder>{
...@@ -47,12 +49,14 @@ public: ...@@ -47,12 +49,14 @@ public:
typedef std::shared_ptr<FlvRecorder> Ptr; typedef std::shared_ptr<FlvRecorder> Ptr;
FlvRecorder(); FlvRecorder();
virtual ~FlvRecorder(); virtual ~FlvRecorder();
void startRecord(const EventPoller::Ptr &poller,const string &vhost,const string &app,const string &stream,const string &file_path); void startRecord(const EventPoller::Ptr &poller, const RtmpMediaSource::Ptr &media, const string &file_path);
void startRecord(const EventPoller::Ptr &poller,const RtmpMediaSource::Ptr &media,const string &file_path); void startRecord(const EventPoller::Ptr &poller, const string &vhost, const string &app, const string &stream, const string &file_path);
private: private:
virtual void onWrite(const Buffer::Ptr &data, bool flush) override ; virtual void onWrite(const Buffer::Ptr &data, bool flush) override ;
virtual void onDetach() override; virtual void onDetach() override;
virtual std::shared_ptr<FlvMuxer> getSharedPtr() override; virtual std::shared_ptr<FlvMuxer> getSharedPtr() override;
private: private:
std::shared_ptr<FILE> _file; std::shared_ptr<FILE> _file;
recursive_mutex _file_mtx; recursive_mutex _file_mtx;
...@@ -60,5 +64,4 @@ private: ...@@ -60,5 +64,4 @@ private:
}//namespace mediakit }//namespace mediakit
#endif //ZLMEDIAKIT_FLVMUXER_H #endif //ZLMEDIAKIT_FLVMUXER_H
...@@ -23,9 +23,6 @@ ...@@ -23,9 +23,6 @@
using namespace toolkit; using namespace toolkit;
#define PORT 1935
#define DEFAULT_CHUNK_LEN 128
#if !defined(_WIN32) #if !defined(_WIN32)
#define PACKED __attribute__((packed)) #define PACKED __attribute__((packed))
#else #else
...@@ -33,6 +30,7 @@ using namespace toolkit; ...@@ -33,6 +30,7 @@ using namespace toolkit;
#endif //!defined(_WIN32) #endif //!defined(_WIN32)
#define DEFAULT_CHUNK_LEN 128
#define HANDSHAKE_PLAINTEXT 0x03 #define HANDSHAKE_PLAINTEXT 0x03
#define RANDOM_LEN (1536 - 8) #define RANDOM_LEN (1536 - 8)
...@@ -91,22 +89,24 @@ class RtmpHandshake { ...@@ -91,22 +89,24 @@ class RtmpHandshake {
public: public:
RtmpHandshake(uint32_t _time, uint8_t *_random = nullptr) { RtmpHandshake(uint32_t _time, uint8_t *_random = nullptr) {
_time = htonl(_time); _time = htonl(_time);
memcpy(timeStamp, &_time, 4); memcpy(time_stamp, &_time, 4);
if (!_random) { if (!_random) {
random_generate((char *) random, sizeof(random)); random_generate((char *) random, sizeof(random));
} else { } else {
memcpy(random, _random, sizeof(random)); memcpy(random, _random, sizeof(random));
} }
} }
uint8_t timeStamp[4];
uint8_t time_stamp[4];
uint8_t zero[4] = {0}; uint8_t zero[4] = {0};
uint8_t random[RANDOM_LEN]; uint8_t random[RANDOM_LEN];
void random_generate(char* bytes, int size) {
static char cdata[] = { 0x73, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x2d, 0x72, void random_generate(char *bytes, int size) {
0x74, 0x6d, 0x70, 0x2d, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, static char cdata[] = {0x73, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x2d, 0x72,
0x2d, 0x77, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x2d, 0x77, 0x69, 0x74, 0x6d, 0x70, 0x2d, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72,
0x6e, 0x74, 0x65, 0x72, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x2d, 0x77, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x2d, 0x77, 0x69,
0x40, 0x31, 0x32, 0x36, 0x2e, 0x63, 0x6f, 0x6d }; 0x6e, 0x74, 0x65, 0x72, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72,
0x40, 0x31, 0x32, 0x36, 0x2e, 0x63, 0x6f, 0x6d};
for (int i = 0; i < size; i++) { for (int i = 0; i < size; i++) {
bytes[i] = cdata[rand() % (sizeof(cdata) - 1)]; bytes[i] = cdata[rand() % (sizeof(cdata) - 1)];
} }
...@@ -116,10 +116,10 @@ public: ...@@ -116,10 +116,10 @@ public:
class RtmpHeader { class RtmpHeader {
public: public:
uint8_t flags; uint8_t flags;
uint8_t timeStamp[3]; uint8_t time_stamp[3];
uint8_t bodySize[3]; uint8_t body_size[3];
uint8_t typeId; uint8_t type_id;
uint8_t streamId[4]; /* Note, this is little-endian while others are BE */ uint8_t stream_index[4]; /* Note, this is little-endian while others are BE */
}PACKED; }PACKED;
#if defined(_WIN32) #if defined(_WIN32)
...@@ -129,21 +129,23 @@ public: ...@@ -129,21 +129,23 @@ public:
class RtmpPacket : public Buffer{ class RtmpPacket : public Buffer{
public: public:
typedef std::shared_ptr<RtmpPacket> Ptr; typedef std::shared_ptr<RtmpPacket> Ptr;
uint8_t typeId; uint8_t type_id;
uint32_t bodySize = 0; uint32_t body_size = 0;
uint32_t timeStamp = 0; uint32_t time_stamp = 0;
bool hasAbsStamp = false; bool is_abs_stamp = false;
uint32_t tsField = 0; uint32_t ts_field = 0;
uint32_t streamId; uint32_t stream_index;
uint32_t chunkId; uint32_t chunk_id;
std::string strBuf; std::string buffer;
public: public:
char *data() const override{ char *data() const override{
return (char*)strBuf.data(); return (char*)buffer.data();
} }
uint32_t size() const override { uint32_t size() const override {
return strBuf.size(); return buffer.size();
}; }
public: public:
RtmpPacket() = default; RtmpPacket() = default;
RtmpPacket(const RtmpPacket &that) = delete; RtmpPacket(const RtmpPacket &that) = delete;
...@@ -151,58 +153,64 @@ public: ...@@ -151,58 +153,64 @@ public:
RtmpPacket &operator=(RtmpPacket &&that) = delete; RtmpPacket &operator=(RtmpPacket &&that) = delete;
RtmpPacket(RtmpPacket &&that){ RtmpPacket(RtmpPacket &&that){
typeId = that.typeId; type_id = that.type_id;
bodySize = that.bodySize; body_size = that.body_size;
timeStamp = that.timeStamp; time_stamp = that.time_stamp;
hasAbsStamp = that.hasAbsStamp; is_abs_stamp = that.is_abs_stamp;
tsField = that.tsField; ts_field = that.ts_field;
streamId = that.streamId; stream_index = that.stream_index;
chunkId = that.chunkId; chunk_id = that.chunk_id;
strBuf = std::move(that.strBuf); buffer = std::move(that.buffer);
} }
bool isVideoKeyFrame() const { bool isVideoKeyFrame() const {
return typeId == MSG_VIDEO && (uint8_t) strBuf[0] >> 4 == FLV_KEY_FRAME && (uint8_t) strBuf[1] == 1; return type_id == MSG_VIDEO && (uint8_t) buffer[0] >> 4 == FLV_KEY_FRAME && (uint8_t) buffer[1] == 1;
} }
bool isCfgFrame() const { bool isCfgFrame() const {
switch (typeId){ switch (type_id){
case MSG_VIDEO : return strBuf[1] == 0; case MSG_VIDEO : return buffer[1] == 0;
case MSG_AUDIO : { case MSG_AUDIO : {
switch (getMediaType()){ switch (getMediaType()){
case FLV_CODEC_AAC : return strBuf[1] == 0; case FLV_CODEC_AAC : return buffer[1] == 0;
default : return false; default : return false;
} }
} }
default : return false; default : return false;
} }
} }
int getMediaType() const { int getMediaType() const {
switch (typeId) { switch (type_id) {
case MSG_VIDEO : return (uint8_t) strBuf[0] & 0x0F; case MSG_VIDEO : return (uint8_t) buffer[0] & 0x0F;
case MSG_AUDIO : return (uint8_t) strBuf[0] >> 4; case MSG_AUDIO : return (uint8_t) buffer[0] >> 4;
default : return 0; default : return 0;
} }
} }
int getAudioSampleRate() const { int getAudioSampleRate() const {
if (typeId != MSG_AUDIO) { if (type_id != MSG_AUDIO) {
return 0; return 0;
} }
int flvSampleRate = ((uint8_t) strBuf[0] & 0x0C) >> 2; int flvSampleRate = ((uint8_t) buffer[0] & 0x0C) >> 2;
const static int sampleRate[] = { 5512, 11025, 22050, 44100 }; const static int sampleRate[] = { 5512, 11025, 22050, 44100 };
return sampleRate[flvSampleRate]; return sampleRate[flvSampleRate];
} }
int getAudioSampleBit() const { int getAudioSampleBit() const {
if (typeId != MSG_AUDIO) { if (type_id != MSG_AUDIO) {
return 0; return 0;
} }
int flvSampleBit = ((uint8_t) strBuf[0] & 0x02) >> 1; int flvSampleBit = ((uint8_t) buffer[0] & 0x02) >> 1;
const static int sampleBit[] = { 8, 16 }; const static int sampleBit[] = { 8, 16 };
return sampleBit[flvSampleBit]; return sampleBit[flvSampleBit];
} }
int getAudioChannel() const { int getAudioChannel() const {
if (typeId != MSG_AUDIO) { if (type_id != MSG_AUDIO) {
return 0; return 0;
} }
int flvStereoOrMono = (uint8_t) strBuf[0] & 0x01; int flvStereoOrMono = (uint8_t) buffer[0] & 0x01;
const static int channel[] = { 1, 2 }; const static int channel[] = { 1, 2 };
return channel[flvStereoOrMono]; return channel[flvStereoOrMono];
} }
......
...@@ -23,14 +23,14 @@ public: ...@@ -23,14 +23,14 @@ public:
typedef std::shared_ptr<RtmpRing> Ptr; typedef std::shared_ptr<RtmpRing> Ptr;
typedef RingBuffer<RtmpPacket::Ptr> RingType; typedef RingBuffer<RtmpPacket::Ptr> RingType;
RtmpRing(){} RtmpRing() {}
virtual ~RtmpRing(){} virtual ~RtmpRing() {}
/** /**
* 获取rtmp环形缓存 * 获取rtmp环形缓存
* @return * @return
*/ */
virtual RingType::Ptr getRtmpRing() const{ virtual RingType::Ptr getRtmpRing() const {
return _rtmpRing; return _rtmpRing;
} }
...@@ -38,7 +38,7 @@ public: ...@@ -38,7 +38,7 @@ public:
* 设置rtmp环形缓存 * 设置rtmp环形缓存
* @param ring * @param ring
*/ */
virtual void setRtmpRing(const RingType::Ptr &ring){ virtual void setRtmpRing(const RingType::Ptr &ring) {
_rtmpRing = ring; _rtmpRing = ring;
} }
...@@ -48,17 +48,17 @@ public: ...@@ -48,17 +48,17 @@ public:
* @param key_pos 是否为关键帧 * @param key_pos 是否为关键帧
* @return 是否为关键帧 * @return 是否为关键帧
*/ */
virtual bool inputRtmp(const RtmpPacket::Ptr &rtmp, bool key_pos){ virtual bool inputRtmp(const RtmpPacket::Ptr &rtmp, bool key_pos) {
if(_rtmpRing){ if (_rtmpRing) {
_rtmpRing->write(rtmp,key_pos); _rtmpRing->write(rtmp, key_pos);
} }
return key_pos; return key_pos;
} }
protected: protected:
RingType::Ptr _rtmpRing; RingType::Ptr _rtmpRing;
}; };
class RtmpCodec : public RtmpRing, public FrameDispatcher , public CodecInfo{ class RtmpCodec : public RtmpRing, public FrameDispatcher , public CodecInfo{
public: public:
typedef std::shared_ptr<RtmpCodec> Ptr; typedef std::shared_ptr<RtmpCodec> Ptr;
...@@ -69,5 +69,4 @@ public: ...@@ -69,5 +69,4 @@ public:
}//namespace mediakit }//namespace mediakit
#endif //ZLMEDIAKIT_RTMPCODEC_H #endif //ZLMEDIAKIT_RTMPCODEC_H
...@@ -66,27 +66,27 @@ bool RtmpDemuxer::loadMetaData(const AMFValue &val){ ...@@ -66,27 +66,27 @@ bool RtmpDemuxer::loadMetaData(const AMFValue &val){
} }
bool RtmpDemuxer::inputRtmp(const RtmpPacket::Ptr &pkt) { bool RtmpDemuxer::inputRtmp(const RtmpPacket::Ptr &pkt) {
switch (pkt->typeId) { switch (pkt->type_id) {
case MSG_VIDEO: { case MSG_VIDEO: {
if(!_tryedGetVideoTrack){ if(!_try_get_video_track){
_tryedGetVideoTrack = true; _try_get_video_track = true;
auto codec = AMFValue(pkt->getMediaType()); auto codec = AMFValue(pkt->getMediaType());
makeVideoTrack(codec); makeVideoTrack(codec);
} }
if(_videoRtmpDecoder){ if(_video_rtmp_decoder){
return _videoRtmpDecoder->inputRtmp(pkt, true); return _video_rtmp_decoder->inputRtmp(pkt, true);
} }
return false; return false;
} }
case MSG_AUDIO: { case MSG_AUDIO: {
if(!_tryedGetAudioTrack) { if(!_try_get_audio_track) {
_tryedGetAudioTrack = true; _try_get_audio_track = true;
auto codec = AMFValue(pkt->getMediaType()); auto codec = AMFValue(pkt->getMediaType());
makeAudioTrack(codec, pkt->getAudioSampleRate(), pkt->getAudioChannel(), pkt->getAudioSampleBit()); makeAudioTrack(codec, pkt->getAudioSampleRate(), pkt->getAudioChannel(), pkt->getAudioSampleBit());
} }
if(_audioRtmpDecoder){ if(_audio_rtmp_decoder){
_audioRtmpDecoder->inputRtmp(pkt, false); _audio_rtmp_decoder->inputRtmp(pkt, false);
return false; return false;
} }
return false; return false;
...@@ -101,12 +101,12 @@ void RtmpDemuxer::makeVideoTrack(const AMFValue &videoCodec) { ...@@ -101,12 +101,12 @@ void RtmpDemuxer::makeVideoTrack(const AMFValue &videoCodec) {
_videoTrack = dynamic_pointer_cast<VideoTrack>(Factory::getVideoTrackByAmf(videoCodec)); _videoTrack = dynamic_pointer_cast<VideoTrack>(Factory::getVideoTrackByAmf(videoCodec));
if (_videoTrack) { if (_videoTrack) {
//生成rtmpCodec对象以便解码rtmp //生成rtmpCodec对象以便解码rtmp
_videoRtmpDecoder = Factory::getRtmpCodecByTrack(_videoTrack, false); _video_rtmp_decoder = Factory::getRtmpCodecByTrack(_videoTrack, false);
if (_videoRtmpDecoder) { if (_video_rtmp_decoder) {
//设置rtmp解码器代理,生成的frame写入该Track //设置rtmp解码器代理,生成的frame写入该Track
_videoRtmpDecoder->addDelegate(_videoTrack); _video_rtmp_decoder->addDelegate(_videoTrack);
onAddTrack(_videoTrack); onAddTrack(_videoTrack);
_tryedGetVideoTrack = true; _try_get_video_track = true;
} else { } else {
//找不到相应的rtmp解码器,该track无效 //找不到相应的rtmp解码器,该track无效
_videoTrack.reset(); _videoTrack.reset();
...@@ -119,12 +119,12 @@ void RtmpDemuxer::makeAudioTrack(const AMFValue &audioCodec,int sample_rate, int ...@@ -119,12 +119,12 @@ void RtmpDemuxer::makeAudioTrack(const AMFValue &audioCodec,int sample_rate, int
_audioTrack = dynamic_pointer_cast<AudioTrack>(Factory::getAudioTrackByAmf(audioCodec, sample_rate, channels, sample_bit)); _audioTrack = dynamic_pointer_cast<AudioTrack>(Factory::getAudioTrackByAmf(audioCodec, sample_rate, channels, sample_bit));
if (_audioTrack) { if (_audioTrack) {
//生成rtmpCodec对象以便解码rtmp //生成rtmpCodec对象以便解码rtmp
_audioRtmpDecoder = Factory::getRtmpCodecByTrack(_audioTrack, false); _audio_rtmp_decoder = Factory::getRtmpCodecByTrack(_audioTrack, false);
if (_audioRtmpDecoder) { if (_audio_rtmp_decoder) {
//设置rtmp解码器代理,生成的frame写入该Track //设置rtmp解码器代理,生成的frame写入该Track
_audioRtmpDecoder->addDelegate(_audioTrack); _audio_rtmp_decoder->addDelegate(_audioTrack);
onAddTrack(_audioTrack); onAddTrack(_audioTrack);
_tryedGetAudioTrack = true; _try_get_audio_track = true;
} else { } else {
//找不到相应的rtmp解码器,该track无效 //找不到相应的rtmp解码器,该track无效
_audioTrack.reset(); _audioTrack.reset();
......
...@@ -28,7 +28,7 @@ public: ...@@ -28,7 +28,7 @@ public:
typedef std::shared_ptr<RtmpDemuxer> Ptr; typedef std::shared_ptr<RtmpDemuxer> Ptr;
RtmpDemuxer() = default; RtmpDemuxer() = default;
virtual ~RtmpDemuxer() = default; ~RtmpDemuxer() override = default;
bool loadMetaData(const AMFValue &metadata); bool loadMetaData(const AMFValue &metadata);
...@@ -38,14 +38,16 @@ public: ...@@ -38,14 +38,16 @@ public:
* @return true 代表是i帧 * @return true 代表是i帧
*/ */
bool inputRtmp(const RtmpPacket::Ptr &pkt); bool inputRtmp(const RtmpPacket::Ptr &pkt);
private: private:
void makeVideoTrack(const AMFValue &val); void makeVideoTrack(const AMFValue &val);
void makeAudioTrack(const AMFValue &val, int sample_rate, int channels, int sample_bit); void makeAudioTrack(const AMFValue &val, int sample_rate, int channels, int sample_bit);
private: private:
bool _tryedGetVideoTrack = false; bool _try_get_video_track = false;
bool _tryedGetAudioTrack = false; bool _try_get_audio_track = false;
RtmpCodec::Ptr _audioRtmpDecoder; RtmpCodec::Ptr _audio_rtmp_decoder;
RtmpCodec::Ptr _videoRtmpDecoder; RtmpCodec::Ptr _video_rtmp_decoder;
}; };
} /* namespace mediakit */ } /* namespace mediakit */
......
...@@ -122,17 +122,17 @@ public: ...@@ -122,17 +122,17 @@ public:
*/ */
void onWrite(const RtmpPacket::Ptr &pkt, bool key = true) override { void onWrite(const RtmpPacket::Ptr &pkt, bool key = true) override {
lock_guard<recursive_mutex> lock(_mtx); lock_guard<recursive_mutex> lock(_mtx);
if(pkt->typeId == MSG_VIDEO){ if(pkt->type_id == MSG_VIDEO){
//有视频,那么启用GOP缓存 //有视频,那么启用GOP缓存
_have_video = true; _have_video = true;
} }
if (pkt->isCfgFrame()) { if (pkt->isCfgFrame()) {
_config_frame_map[pkt->typeId] = pkt; _config_frame_map[pkt->type_id] = pkt;
return; return;
} }
//保存当前时间戳 //保存当前时间戳
_track_stamps_map[pkt->typeId] = pkt->timeStamp; _track_stamps_map[pkt->type_id] = pkt->time_stamp;
if (!_ring) { if (!_ring) {
weak_ptr<RtmpMediaSource> weakSelf = dynamic_pointer_cast<RtmpMediaSource>(shared_from_this()); weak_ptr<RtmpMediaSource> weakSelf = dynamic_pointer_cast<RtmpMediaSource>(shared_from_this());
...@@ -154,7 +154,7 @@ public: ...@@ -154,7 +154,7 @@ public:
regist(); regist();
} }
} }
PacketCache<RtmpPacket>::inputPacket(pkt->typeId == MSG_VIDEO, pkt, key); PacketCache<RtmpPacket>::inputPacket(pkt->type_id == MSG_VIDEO, pkt, key);
} }
/** /**
......
...@@ -161,11 +161,12 @@ public: ...@@ -161,11 +161,12 @@ public:
} }
private: private:
RtmpDemuxer::Ptr _demuxer;
MultiMediaSourceMuxer::Ptr _muxer;
AMFValue _metadata;
bool _all_track_ready = false; bool _all_track_ready = false;
bool _recreate_metadata = false; bool _recreate_metadata = false;
AMFValue _metadata;
RtmpDemuxer::Ptr _demuxer;
MultiMediaSourceMuxer::Ptr _muxer;
}; };
} /* namespace mediakit */ } /* namespace mediakit */
......
...@@ -24,34 +24,35 @@ public: ...@@ -24,34 +24,35 @@ public:
const string &strApp, const string &strApp,
const string &strId, const string &strId,
const TitleMeta::Ptr &title = nullptr) : RtmpMuxer(title){ const TitleMeta::Ptr &title = nullptr) : RtmpMuxer(title){
_mediaSouce = std::make_shared<RtmpMediaSource>(vhost,strApp,strId); _media_src = std::make_shared<RtmpMediaSource>(vhost, strApp, strId);
getRtmpRing()->setDelegate(_mediaSouce); getRtmpRing()->setDelegate(_media_src);
} }
virtual ~RtmpMediaSourceMuxer(){} virtual ~RtmpMediaSourceMuxer(){}
void setListener(const std::weak_ptr<MediaSourceEvent> &listener){ void setListener(const std::weak_ptr<MediaSourceEvent> &listener){
_mediaSouce->setListener(listener); _media_src->setListener(listener);
} }
void setTimeStamp(uint32_t stamp){ void setTimeStamp(uint32_t stamp){
_mediaSouce->setTimeStamp(stamp); _media_src->setTimeStamp(stamp);
} }
int readerCount() const{ int readerCount() const{
return _mediaSouce->readerCount(); return _media_src->readerCount();
} }
void onAllTrackReady(){ void onAllTrackReady(){
makeConfigPacket(); makeConfigPacket();
_mediaSouce->setMetaData(getMetadata()); _media_src->setMetaData(getMetadata());
} }
// 设置TrackSource // 设置TrackSource
void setTrackSource(const std::weak_ptr<TrackSource> &track_src){ void setTrackSource(const std::weak_ptr<TrackSource> &track_src){
_mediaSouce->setTrackSource(track_src); _media_src->setTrackSource(track_src);
} }
private: private:
RtmpMediaSource::Ptr _mediaSouce; RtmpMediaSource::Ptr _media_src;
}; };
......
...@@ -19,7 +19,7 @@ RtmpMuxer::RtmpMuxer(const TitleMeta::Ptr &title) { ...@@ -19,7 +19,7 @@ RtmpMuxer::RtmpMuxer(const TitleMeta::Ptr &title) {
}else{ }else{
_metadata = title->getMetadata(); _metadata = title->getMetadata();
} }
_rtmpRing = std::make_shared<RtmpRing::RingType>(); _rtmp_ring = std::make_shared<RtmpRing::RingType>();
} }
void RtmpMuxer::addTrack(const Track::Ptr &track) { void RtmpMuxer::addTrack(const Track::Ptr &track) {
...@@ -31,7 +31,7 @@ void RtmpMuxer::addTrack(const Track::Ptr &track) { ...@@ -31,7 +31,7 @@ void RtmpMuxer::addTrack(const Track::Ptr &track) {
} }
//设置rtmp输出环形缓存 //设置rtmp输出环形缓存
encoder->setRtmpRing(_rtmpRing); encoder->setRtmpRing(_rtmp_ring);
//添加metadata //添加metadata
Metadata::addTrack(_metadata,track); Metadata::addTrack(_metadata,track);
...@@ -57,7 +57,7 @@ const AMFValue &RtmpMuxer::getMetadata() const { ...@@ -57,7 +57,7 @@ const AMFValue &RtmpMuxer::getMetadata() const {
} }
RtmpRing::RingType::Ptr RtmpMuxer::getRtmpRing() const { RtmpRing::RingType::Ptr RtmpMuxer::getRtmpRing() const {
return _rtmpRing; return _rtmp_ring;
} }
void RtmpMuxer::resetTracks() { void RtmpMuxer::resetTracks() {
......
...@@ -61,7 +61,7 @@ public: ...@@ -61,7 +61,7 @@ public:
*/ */
void makeConfigPacket(); void makeConfigPacket();
private: private:
RtmpRing::RingType::Ptr _rtmpRing; RtmpRing::RingType::Ptr _rtmp_ring;
AMFValue _metadata; AMFValue _metadata;
RtmpCodec::Ptr _encoder[TrackMax]; RtmpCodec::Ptr _encoder[TrackMax];
}; };
......
...@@ -8,8 +8,8 @@ ...@@ -8,8 +8,8 @@
* may be found in the AUTHORS file in the root of the source tree. * may be found in the AUTHORS file in the root of the source tree.
*/ */
#ifndef SRC_RTMP_RtmpPlayer2_H_ #ifndef SRC_RTMP_RtmpPlayer_H_
#define SRC_RTMP_RtmpPlayer2_H_ #define SRC_RTMP_RtmpPlayer_H_
#include <memory> #include <memory>
#include <string> #include <string>
...@@ -28,21 +28,24 @@ using namespace toolkit; ...@@ -28,21 +28,24 @@ using namespace toolkit;
using namespace mediakit::Client; using namespace mediakit::Client;
namespace mediakit { namespace mediakit {
//实现了rtmp播放器协议部分的功能,及数据接收功能 //实现了rtmp播放器协议部分的功能,及数据接收功能
class RtmpPlayer:public PlayerBase, public TcpClient, public RtmpProtocol{ class RtmpPlayer : public PlayerBase, public TcpClient, public RtmpProtocol {
public: public:
typedef std::shared_ptr<RtmpPlayer> Ptr; typedef std::shared_ptr<RtmpPlayer> Ptr;
RtmpPlayer(const EventPoller::Ptr &poller); RtmpPlayer(const EventPoller::Ptr &poller);
virtual ~RtmpPlayer(); ~RtmpPlayer() override;
void play(const string &strUrl) override; void play(const string &strUrl) override;
void pause(bool bPause) override; void pause(bool bPause) override;
void teardown() override; void teardown() override;
protected: protected:
virtual bool onCheckMeta(const AMFValue &val) =0; virtual bool onCheckMeta(const AMFValue &val) =0;
virtual void onMediaData(const RtmpPacket::Ptr &chunkData) =0; virtual void onMediaData(const RtmpPacket::Ptr &chunkData) =0;
uint32_t getProgressMilliSecond() const; uint32_t getProgressMilliSecond() const;
void seekToMilliSecond(uint32_t ms); void seekToMilliSecond(uint32_t ms);
protected: protected:
void onMediaData_l(const RtmpPacket::Ptr &chunkData); void onMediaData_l(const RtmpPacket::Ptr &chunkData);
//在获取config帧后才触发onPlayResult_l(而不是收到play命令回复),所以此时所有track都初始化完毕了 //在获取config帧后才触发onPlayResult_l(而不是收到play命令回复),所以此时所有track都初始化完毕了
...@@ -59,15 +62,15 @@ protected: ...@@ -59,15 +62,15 @@ protected:
send(buffer); send(buffer);
} }
template<typename FUN> template<typename FUNC>
inline void addOnResultCB(const FUN &fun) { inline void addOnResultCB(const FUNC &func) {
lock_guard<recursive_mutex> lck(_mtxOnResultCB); lock_guard<recursive_mutex> lck(_mtx_on_result);
_mapOnResultCB.emplace(_iReqID, fun); _map_on_result.emplace(_send_req_id, func);
} }
template<typename FUN> template<typename FUNC>
inline void addOnStatusCB(const FUN &fun) { inline void addOnStatusCB(const FUNC &func) {
lock_guard<recursive_mutex> lck(_mtxOnStatusCB); lock_guard<recursive_mutex> lck(_mtx_on_status);
_dqOnStatusCB.emplace_back(fun); _deque_on_status.emplace_back(func);
} }
void onCmd_result(AMFDecoder &dec); void onCmd_result(AMFDecoder &dec);
...@@ -78,34 +81,37 @@ protected: ...@@ -78,34 +81,37 @@ protected:
inline void send_createStream(); inline void send_createStream();
inline void send_play(); inline void send_play();
inline void send_pause(bool bPause); inline void send_pause(bool bPause);
private: private:
string _strApp; string _app;
string _strStream; string _stream_id;
string _strTcUrl; string _tc_url;
bool _bPaused = false;
unordered_map<int, function<void(AMFDecoder &dec)> > _mapOnResultCB;
recursive_mutex _mtxOnResultCB;
deque<function<void(AMFValue &dec)> > _dqOnStatusCB;
recursive_mutex _mtxOnStatusCB;
//超时功能实现
Ticker _mediaTicker;
std::shared_ptr<Timer> _pMediaTimer;
std::shared_ptr<Timer> _pPlayTimer;
//心跳定时器
std::shared_ptr<Timer> _pBeatTimer;
//播放进度控制 bool _paused = false;
uint32_t _iSeekTo = 0;
uint32_t _aiFistStamp[2] = { 0, 0 };
uint32_t _aiNowStamp[2] = { 0, 0 };
Ticker _aNowStampTicker[2];
bool _metadata_got = false; bool _metadata_got = false;
//是否为性能测试模式 //是否为性能测试模式
bool _benchmark_mode = false; bool _benchmark_mode = false;
//播放进度控制
uint32_t _seek_ms = 0;
uint32_t _fist_stamp[2] = {0, 0};
uint32_t _now_stamp[2] = {0, 0};
Ticker _now_stamp_ticker[2];
recursive_mutex _mtx_on_result;
recursive_mutex _mtx_on_status;
deque<function<void(AMFValue &dec)> > _deque_on_status;
unordered_map<int, function<void(AMFDecoder &dec)> > _map_on_result;
//rtmp接收超时计时器
Ticker _rtmp_recv_ticker;
//心跳发送定时器
std::shared_ptr<Timer> _beat_timer;
//播放超时定时器
std::shared_ptr<Timer> _play_timer;
//rtmp接收超时定时器
std::shared_ptr<Timer> _rtmp_recv_timer;
}; };
} /* namespace mediakit */ } /* namespace mediakit */
#endif /* SRC_RTMP_RtmpPlayer_H_ */
#endif /* SRC_RTMP_RtmpPlayer2_H_ */
...@@ -27,51 +27,59 @@ namespace mediakit { ...@@ -27,51 +27,59 @@ namespace mediakit {
class RtmpPlayerImp: public PlayerImp<RtmpPlayer,RtmpDemuxer> { class RtmpPlayerImp: public PlayerImp<RtmpPlayer,RtmpDemuxer> {
public: public:
typedef std::shared_ptr<RtmpPlayerImp> Ptr; typedef std::shared_ptr<RtmpPlayerImp> Ptr;
RtmpPlayerImp(const EventPoller::Ptr &poller) : PlayerImp<RtmpPlayer,RtmpDemuxer>(poller){};
virtual ~RtmpPlayerImp(){ RtmpPlayerImp(const EventPoller::Ptr &poller) : PlayerImp<RtmpPlayer, RtmpDemuxer>(poller) {};
DebugL<<endl;
}; ~RtmpPlayerImp() override {
float getProgress() const override{ DebugL << endl;
if(getDuration() > 0){ }
float getProgress() const override {
if (getDuration() > 0) {
return getProgressMilliSecond() / (getDuration() * 1000); return getProgressMilliSecond() / (getDuration() * 1000);
} }
return PlayerBase::getProgress(); return PlayerBase::getProgress();
}; }
void seekTo(float fProgress) override{
fProgress = MAX(float(0),MIN(fProgress,float(1.0))); void seekTo(float fProgress) override {
fProgress = MAX(float(0), MIN(fProgress, float(1.0)));
seekToMilliSecond(fProgress * getDuration() * 1000); seekToMilliSecond(fProgress * getDuration() * 1000);
}; }
void play(const string &strUrl) override { void play(const string &strUrl) override {
PlayerImp<RtmpPlayer,RtmpDemuxer>::play(strUrl); PlayerImp<RtmpPlayer, RtmpDemuxer>::play(strUrl);
} }
private: private:
//派生类回调函数 //派生类回调函数
bool onCheckMeta(const AMFValue &val) override { bool onCheckMeta(const AMFValue &val) override {
_pRtmpMediaSrc = dynamic_pointer_cast<RtmpMediaSource>(_pMediaSrc); _rtmp_src = dynamic_pointer_cast<RtmpMediaSource>(_pMediaSrc);
if(_pRtmpMediaSrc){ if (_rtmp_src) {
_pRtmpMediaSrc->setMetaData(val); _rtmp_src->setMetaData(val);
_set_meta_data = true; _set_meta_data = true;
} }
_delegate.reset(new RtmpDemuxer); _delegate.reset(new RtmpDemuxer);
_delegate->loadMetaData(val); _delegate->loadMetaData(val);
return true; return true;
} }
void onMediaData(const RtmpPacket::Ptr &chunkData) override { void onMediaData(const RtmpPacket::Ptr &chunkData) override {
if(_pRtmpMediaSrc){ if (_rtmp_src) {
if(!_set_meta_data && !chunkData->isCfgFrame()){ if (!_set_meta_data && !chunkData->isCfgFrame()) {
_set_meta_data = true; _set_meta_data = true;
_pRtmpMediaSrc->setMetaData(TitleMeta().getMetadata()); _rtmp_src->setMetaData(TitleMeta().getMetadata());
} }
_pRtmpMediaSrc->onWrite(chunkData); _rtmp_src->onWrite(chunkData);
} }
if(!_delegate){ if (!_delegate) {
//这个流没有metadata //这个流没有metadata
_delegate.reset(new RtmpDemuxer()); _delegate.reset(new RtmpDemuxer());
} }
_delegate->inputRtmp(chunkData); _delegate->inputRtmp(chunkData);
} }
private: private:
RtmpMediaSource::Ptr _pRtmpMediaSrc; RtmpMediaSource::Ptr _rtmp_src;
bool _set_meta_data = false; bool _set_meta_data = false;
}; };
......
...@@ -31,45 +31,42 @@ class RtmpProtocol { ...@@ -31,45 +31,42 @@ class RtmpProtocol {
public: public:
RtmpProtocol(); RtmpProtocol();
virtual ~RtmpProtocol(); virtual ~RtmpProtocol();
void onParseRtmp(const char *data, int size);
//作为客户端发送c0c1,等待s0s1s2并且回调 //作为客户端发送c0c1,等待s0s1s2并且回调
void startClientSession(const function<void()> &cb); void startClientSession(const function<void()> &cb);
void onParseRtmp(const char *pcRawData,int iSize);
void reset();
protected: protected:
virtual void onSendRawData(const Buffer::Ptr &buffer) = 0; virtual void onSendRawData(const Buffer::Ptr &buffer) = 0;
virtual void onRtmpChunk(RtmpPacket &chunkData) = 0; virtual void onRtmpChunk(RtmpPacket &chunk_data) = 0;
virtual void onStreamBegin(uint32_t ui32StreamId){ virtual void onStreamBegin(uint32_t stream_index){
_ui32StreamId = ui32StreamId; _stream_index = stream_index;
} }
virtual void onStreamEof(uint32_t ui32StreamId){}; virtual void onStreamEof(uint32_t stream_index){};
virtual void onStreamDry(uint32_t ui32StreamId){}; virtual void onStreamDry(uint32_t stream_index){};
protected:
void sendAcknowledgement(uint32_t ui32Size);
void sendAcknowledgementSize(uint32_t ui32Size);
void sendPeerBandwidth(uint32_t ui32Size);
void sendChunkSize(uint32_t ui32Size);
void sendPingRequest(uint32_t ui32TimeStamp = ::time(NULL));
void sendPingResponse(uint32_t ui32TimeStamp = ::time(NULL));
void sendSetBufferLength(uint32_t ui32StreamId, uint32_t ui32Length);
void sendUserControl(uint16_t ui16EventType, uint32_t ui32EventData);
void sendUserControl(uint16_t ui16EventType, const string &strEventData);
void sendInvoke(const string &strCmd, const AMFValue &val);
void sendRequest(int iCmd, const string &str);
void sendResponse(int iType, const string &str);
void sendRtmp(uint8_t ui8Type, uint32_t ui32StreamId, const std::string &strBuf, uint32_t ui32TimeStamp, int iChunkID);
void sendRtmp(uint8_t ui8Type, uint32_t ui32StreamId, const Buffer::Ptr &buffer, uint32_t ui32TimeStamp, int iChunkID);
protected: protected:
int _iReqID = 0; void reset();
uint32_t _ui32StreamId = STREAM_CONTROL; BufferRaw::Ptr obtainBuffer();
int _iNowStreamID = 0; BufferRaw::Ptr obtainBuffer(const void *data, int len);
int _iNowChunkID = 0;
bool _bDataStarted = false; void sendAcknowledgement(uint32_t size);
inline BufferRaw::Ptr obtainBuffer(); void sendAcknowledgementSize(uint32_t size);
inline BufferRaw::Ptr obtainBuffer(const void *data, int len); void sendPeerBandwidth(uint32_t size);
//ResourcePool<BufferRaw,MAX_SEND_PKT> _bufferPool; void sendChunkSize(uint32_t size);
void sendPingRequest(uint32_t ti = ::time(NULL));
void sendPingResponse(uint32_t time_stamp = ::time(NULL));
void sendSetBufferLength(uint32_t stream_index, uint32_t len);
void sendUserControl(uint16_t event_type, uint32_t event_data);
void sendUserControl(uint16_t event_type, const string &event_data);
void sendInvoke(const string &cmd, const AMFValue &val);
void sendRequest(int cmd, const string &str);
void sendResponse(int type, const string &str);
void sendRtmp(uint8_t type, uint32_t stream_index, const std::string &buffer, uint32_t stamp, int chunk_id);
void sendRtmp(uint8_t type, uint32_t stream_index, const Buffer::Ptr &buffer, uint32_t stamp, int chunk_id);
private: private:
void handle_S0S1S2(const function<void()> &cb); void handle_S0S1S2(const function<void()> &func);
void handle_C0C1(); void handle_C0C1();
void handle_C1_simple(); void handle_C1_simple();
#ifdef ENABLE_OPENSSL #ifdef ENABLE_OPENSSL
...@@ -82,26 +79,32 @@ private: ...@@ -82,26 +79,32 @@ private:
void handle_C2(); void handle_C2();
void handle_rtmp(); void handle_rtmp();
void handle_rtmpChunk(RtmpPacket &chunkData); void handle_rtmpChunk(RtmpPacket &chunk_data);
protected:
int _send_req_id = 0;
uint32_t _stream_index = STREAM_CONTROL;
private: private:
int _now_stream_index = 0;
int _now_chunk_id = 0;
bool _data_started = false;
////////////ChunkSize//////////// ////////////ChunkSize////////////
size_t _iChunkLenIn = DEFAULT_CHUNK_LEN; size_t _chunk_size_in = DEFAULT_CHUNK_LEN;
size_t _iChunkLenOut = DEFAULT_CHUNK_LEN; size_t _chunk_size_out = DEFAULT_CHUNK_LEN;
////////////Acknowledgement//////////// ////////////Acknowledgement////////////
uint32_t _ui32ByteSent = 0; uint32_t _bytes_sent = 0;
uint32_t _ui32LastSent = 0; uint32_t _bytes_sent_last = 0;
uint32_t _ui32WinSize = 0; uint32_t _windows_size = 0;
///////////PeerBandwidth/////////// ///////////PeerBandwidth///////////
uint32_t _ui32Bandwidth = 2500000; uint32_t _bandwidth = 2500000;
uint8_t _ui8LimitType = 2; uint8_t _band_limit_type = 2;
////////////Chunk////////////
unordered_map<int, RtmpPacket> _mapChunkData;
//////////Rtmp parser////////// //////////Rtmp parser//////////
string _strRcvBuf; string _recv_data_buf;
function<void()> _nextHandle; function<void()> _next_step_func;
////////////Chunk////////////
unordered_map<int, RtmpPacket> _map_chunk_data;
}; };
} /* namespace mediakit */ } /* namespace mediakit */
#endif /* SRC_RTMP_RTMPPROTOCOL_H_ */ #endif /* SRC_RTMP_RTMPPROTOCOL_H_ */
...@@ -164,13 +164,13 @@ inline void RtmpPusher::send_createStream() { ...@@ -164,13 +164,13 @@ inline void RtmpPusher::send_createStream() {
addOnResultCB([this](AMFDecoder &dec){ addOnResultCB([this](AMFDecoder &dec){
//TraceL << "createStream result"; //TraceL << "createStream result";
dec.load<AMFValue>(); dec.load<AMFValue>();
_ui32StreamId = dec.load<int>(); _stream_index = dec.load<int>();
send_publish(); send_publish();
}); });
} }
inline void RtmpPusher::send_publish() { inline void RtmpPusher::send_publish() {
AMFEncoder enc; AMFEncoder enc;
enc << "publish" << ++_iReqID << nullptr << _strStream << _strApp ; enc << "publish" << ++_send_req_id << nullptr << _strStream << _strApp ;
sendRequest(MSG_CMD, enc.data()); sendRequest(MSG_CMD, enc.data());
addOnStatusCB([this](AMFValue &val) { addOnStatusCB([this](AMFValue &val) {
...@@ -195,7 +195,7 @@ inline void RtmpPusher::send_metaData(){ ...@@ -195,7 +195,7 @@ inline void RtmpPusher::send_metaData(){
sendRequest(MSG_DATA, enc.data()); sendRequest(MSG_DATA, enc.data());
src->getConfigFrame([&](const RtmpPacket::Ptr &pkt){ src->getConfigFrame([&](const RtmpPacket::Ptr &pkt){
sendRtmp(pkt->typeId, _ui32StreamId, pkt, pkt->timeStamp, pkt->chunkId ); sendRtmp(pkt->type_id, _stream_index, pkt, pkt->time_stamp, pkt->chunk_id );
}); });
_pRtmpReader = src->getRing()->attach(getPoller()); _pRtmpReader = src->getRing()->attach(getPoller());
...@@ -213,7 +213,7 @@ inline void RtmpPusher::send_metaData(){ ...@@ -213,7 +213,7 @@ inline void RtmpPusher::send_metaData(){
if(++i == size){ if(++i == size){
strongSelf->setSendFlushFlag(true); strongSelf->setSendFlushFlag(true);
} }
strongSelf->sendRtmp(rtmp->typeId, strongSelf->_ui32StreamId, rtmp, rtmp->timeStamp, rtmp->chunkId); strongSelf->sendRtmp(rtmp->type_id, strongSelf->_stream_index, rtmp, rtmp->time_stamp, rtmp->chunk_id);
}); });
}); });
_pRtmpReader->setDetachCB([weakSelf](){ _pRtmpReader->setDetachCB([weakSelf](){
...@@ -275,7 +275,7 @@ void RtmpPusher::onCmd_onStatus(AMFDecoder &dec) { ...@@ -275,7 +275,7 @@ void RtmpPusher::onCmd_onStatus(AMFDecoder &dec) {
} }
void RtmpPusher::onRtmpChunk(RtmpPacket &chunkData) { void RtmpPusher::onRtmpChunk(RtmpPacket &chunkData) {
switch (chunkData.typeId) { switch (chunkData.type_id) {
case MSG_CMD: case MSG_CMD:
case MSG_CMD3: { case MSG_CMD3: {
typedef void (RtmpPusher::*rtmpCMDHandle)(AMFDecoder &dec); typedef void (RtmpPusher::*rtmpCMDHandle)(AMFDecoder &dec);
...@@ -284,9 +284,9 @@ void RtmpPusher::onRtmpChunk(RtmpPacket &chunkData) { ...@@ -284,9 +284,9 @@ void RtmpPusher::onRtmpChunk(RtmpPacket &chunkData) {
g_mapCmd.emplace("_error",&RtmpPusher::onCmd_result); g_mapCmd.emplace("_error",&RtmpPusher::onCmd_result);
g_mapCmd.emplace("_result",&RtmpPusher::onCmd_result); g_mapCmd.emplace("_result",&RtmpPusher::onCmd_result);
g_mapCmd.emplace("onStatus",&RtmpPusher::onCmd_onStatus); g_mapCmd.emplace("onStatus",&RtmpPusher::onCmd_onStatus);
}, []() {}); });
AMFDecoder dec(chunkData.strBuf, 0); AMFDecoder dec(chunkData.buffer, 0);
std::string type = dec.load<std::string>(); std::string type = dec.load<std::string>();
auto it = g_mapCmd.find(type); auto it = g_mapCmd.find(type);
if(it != g_mapCmd.end()){ if(it != g_mapCmd.end()){
...@@ -298,7 +298,7 @@ void RtmpPusher::onRtmpChunk(RtmpPacket &chunkData) { ...@@ -298,7 +298,7 @@ void RtmpPusher::onRtmpChunk(RtmpPacket &chunkData) {
} }
break; break;
default: default:
//WarnL << "unhandled message:" << (int) chunkData.typeId << hexdump(chunkData.strBuf.data(), chunkData.strBuf.size()); //WarnL << "unhandled message:" << (int) chunkData.type_id << hexdump(chunkData.buffer.data(), chunkData.buffer.size());
break; break;
} }
} }
......
...@@ -52,7 +52,7 @@ private: ...@@ -52,7 +52,7 @@ private:
template<typename FUN> template<typename FUN>
inline void addOnResultCB(const FUN &fun) { inline void addOnResultCB(const FUN &fun) {
lock_guard<recursive_mutex> lck(_mtxOnResultCB); lock_guard<recursive_mutex> lck(_mtxOnResultCB);
_mapOnResultCB.emplace(_iReqID, fun); _mapOnResultCB.emplace(_send_req_id, fun);
} }
template<typename FUN> template<typename FUN>
inline void addOnStatusCB(const FUN &fun) { inline void addOnStatusCB(const FUN &fun) {
......
...@@ -30,11 +30,13 @@ namespace mediakit { ...@@ -30,11 +30,13 @@ namespace mediakit {
class RtmpSession: public TcpSession ,public RtmpProtocol , public MediaSourceEvent{ class RtmpSession: public TcpSession ,public RtmpProtocol , public MediaSourceEvent{
public: public:
typedef std::shared_ptr<RtmpSession> Ptr; typedef std::shared_ptr<RtmpSession> Ptr;
RtmpSession(const Socket::Ptr &_sock); RtmpSession(const Socket::Ptr &sock);
virtual ~RtmpSession(); ~RtmpSession() override;
void onRecv(const Buffer::Ptr &pBuf) override;
void onRecv(const Buffer::Ptr &buf) override;
void onError(const SockException &err) override; void onError(const SockException &err) override;
void onManager() override; void onManager() override;
private: private:
void onProcessCmd(AMFDecoder &dec); void onProcessCmd(AMFDecoder &dec);
void onCmd_connect(AMFDecoder &dec); void onCmd_connect(AMFDecoder &dec);
...@@ -55,15 +57,15 @@ private: ...@@ -55,15 +57,15 @@ private:
void onSendMedia(const RtmpPacket::Ptr &pkt); void onSendMedia(const RtmpPacket::Ptr &pkt);
void onSendRawData(const Buffer::Ptr &buffer) override{ void onSendRawData(const Buffer::Ptr &buffer) override{
_ui64TotalBytes += buffer->size(); _total_bytes += buffer->size();
send(buffer); send(buffer);
} }
void onRtmpChunk(RtmpPacket &chunkData) override; void onRtmpChunk(RtmpPacket &chunk_data) override;
template<typename first, typename second> template<typename first, typename second>
inline void sendReply(const char *str, const first &reply, const second &status) { inline void sendReply(const char *str, const first &reply, const second &status) {
AMFEncoder invoke; AMFEncoder invoke;
invoke << str << _dNowReqID << reply << status; invoke << str << _recv_req_id << reply << status;
sendResponse(MSG_CMD, invoke.data()); sendResponse(MSG_CMD, invoke.data());
} }
...@@ -74,29 +76,30 @@ private: ...@@ -74,29 +76,30 @@ private:
void setSocketFlags(); void setSocketFlags();
string getStreamId(const string &str); string getStreamId(const string &str);
void dumpMetadata(const AMFValue &metadata); void dumpMetadata(const AMFValue &metadata);
private: private:
std::string _strTcUrl; bool _paused = false;
MediaInfo _mediaInfo;
double _dNowReqID = 0;
bool _set_meta_data = false; bool _set_meta_data = false;
Ticker _ticker;//数据接收时间 double _recv_req_id = 0;
RtmpMediaSource::RingType::RingReader::Ptr _pRingReader; //消耗的总流量
std::shared_ptr<RtmpMediaSourceImp> _pPublisherSrc; uint64_t _total_bytes = 0;
std::weak_ptr<RtmpMediaSource> _pPlayerSrc;
std::string _tc_url;
//时间戳修整器 //时间戳修整器
Stamp _stamp[2]; Stamp _stamp[2];
//消耗的总流量 //数据接收超时计时器
uint64_t _ui64TotalBytes = 0; Ticker _ticker;
bool _paused = false; MediaInfo _media_info;
std::weak_ptr<RtmpMediaSource> _player_src;
std::shared_ptr<RtmpMediaSourceImp> _publisher_src;
RtmpMediaSource::RingType::RingReader::Ptr _ring_reader;
}; };
/** /**
* 支持ssl加密的rtmp服务器 * 支持ssl加密的rtmp服务器
*/ */
typedef TcpSessionWithSSL<RtmpSession> RtmpSessionWithSSL; typedef TcpSessionWithSSL<RtmpSession> RtmpSessionWithSSL;
} /* namespace mediakit */ } /* namespace mediakit */
#endif /* SRC_RTMP_RTMPSESSION_H_ */ #endif /* SRC_RTMP_RTMPSESSION_H_ */
...@@ -733,28 +733,12 @@ void RtspPlayer::onRecvRTP_l(const RtpPacket::Ptr &rtp, const SdpTrack::Ptr &tra ...@@ -733,28 +733,12 @@ void RtspPlayer::onRecvRTP_l(const RtpPacket::Ptr &rtp, const SdpTrack::Ptr &tra
} }
void RtspPlayer::onPlayResult_l(const SockException &ex , bool handshakeCompleted) { void RtspPlayer::onPlayResult_l(const SockException &ex , bool handshakeCompleted) {
WarnL << ex.getErrCode() << " " << ex.what(); if (ex.getErrCode() == Err_shutdown) {
//主动shutdown的,不触发回调
if(!ex){ return;
//播放成功,恢复rtp接收超时定时器
_rtp_recv_ticker.resetTime();
weak_ptr<RtspPlayer> weakSelf = dynamic_pointer_cast<RtspPlayer>(shared_from_this());
int timeoutMS = (*this)[kMediaTimeoutMS].as<int>();
//创建rtp数据接收超时检测定时器
_rtp_check_timer.reset(new Timer(timeoutMS / 2000.0, [weakSelf,timeoutMS]() {
auto strongSelf=weakSelf.lock();
if(!strongSelf) {
return false;
}
if(strongSelf->_rtp_recv_ticker.elapsedTime() > timeoutMS) {
//接收rtp媒体数据包超时
strongSelf->onPlayResult_l(SockException(Err_timeout,"receive rtp timeout"), true);
return false;
}
return true;
}, getPoller()));
} }
WarnL << ex.getErrCode() << " " << ex.what();
if (!handshakeCompleted) { if (!handshakeCompleted) {
//开始播放阶段 //开始播放阶段
_play_check_timer.reset(); _play_check_timer.reset();
...@@ -769,7 +753,26 @@ void RtspPlayer::onPlayResult_l(const SockException &ex , bool handshakeComplete ...@@ -769,7 +753,26 @@ void RtspPlayer::onPlayResult_l(const SockException &ex , bool handshakeComplete
onResume(); onResume();
} }
if(ex){ if (!ex) {
//播放成功,恢复rtp接收超时定时器
_rtp_recv_ticker.resetTime();
int timeoutMS = (*this)[kMediaTimeoutMS].as<int>();
weak_ptr<RtspPlayer> weakSelf = dynamic_pointer_cast<RtspPlayer>(shared_from_this());
auto lam = [weakSelf, timeoutMS]() {
auto strongSelf = weakSelf.lock();
if (!strongSelf) {
return false;
}
if (strongSelf->_rtp_recv_ticker.elapsedTime() > timeoutMS) {
//接收rtp媒体数据包超时
strongSelf->onPlayResult_l(SockException(Err_timeout, "receive rtp timeout"), true);
return false;
}
return true;
};
//创建rtp数据接收超时检测定时器
_rtp_check_timer = std::make_shared<Timer>(timeoutMS / 2000.0, lam, getPoller());
} else {
teardown(); teardown();
} }
} }
......
...@@ -1123,7 +1123,7 @@ inline void RtspSession::onSendRtpPacket(const RtpPacket::Ptr &pkt){ ...@@ -1123,7 +1123,7 @@ inline void RtspSession::onSendRtpPacket(const RtpPacket::Ptr &pkt){
//send rtcp every 5 second //send rtcp every 5 second
ticker.resetTime(); ticker.resetTime();
//直接保存网络字节序 //直接保存网络字节序
memcpy(&counter.timeStamp, pkt->data() + 8, 4); memcpy(&counter.time_stamp, pkt->data() + 8, 4);
sendSenderReport(_rtp_type == Rtsp::RTP_TCP, track_index); sendSenderReport(_rtp_type == Rtsp::RTP_TCP, track_index);
} }
#endif #endif
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论