Commit fbd711a6 by xiongziliang

整理代码

parent 42559146
......@@ -174,7 +174,7 @@ public:
}
uint32_t getStamp(const RtmpPacket::Ptr &packet) {
return packet->timeStamp;
return packet->time_stamp;
}
bool isFlushAble(bool is_video, bool is_key, uint32_t new_stamp, int cache_size);
......
......@@ -144,7 +144,7 @@ int dumpAacConfig(const string &config, int length, uint8_t *out, int out_size)
#ifndef ENABLE_MP4
AdtsHeader header;
parseAacConfig(config, header);
header.aac_frame_length = length;
header.aac_frame_length = ADTS_HEADER_LEN + length;
dumpAdtsHeader(header, out);
return ADTS_HEADER_LEN;
#else
......
......@@ -21,11 +21,11 @@ static string getAacCfg(const RtmpPacket &thiz) {
if (!thiz.isCfgFrame()) {
return ret;
}
if (thiz.strBuf.size() < 4) {
if (thiz.buffer.size() < 4) {
WarnL << "bad aac cfg!";
return ret;
}
ret = thiz.strBuf.substr(2);
ret = thiz.buffer.substr(2);
return ret;
}
......@@ -37,7 +37,7 @@ bool AACRtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt, bool) {
}
if (!_aac_cfg.empty()) {
onGetAAC(pkt->strBuf.data() + 2, pkt->strBuf.size() - 2, pkt->timeStamp);
onGetAAC(pkt->buffer.data() + 2, pkt->buffer.size() - 2, pkt->time_stamp);
}
return false;
}
......@@ -97,21 +97,21 @@ void AACRtmpEncoder::inputFrame(const Frame::Ptr &frame) {
if(!_aac_cfg.empty()){
RtmpPacket::Ptr rtmpPkt = ResourcePoolHelper<RtmpPacket>::obtainObj();
rtmpPkt->strBuf.clear();
rtmpPkt->buffer.clear();
//header
uint8_t is_config = false;
rtmpPkt->strBuf.push_back(_audio_flv_flags);
rtmpPkt->strBuf.push_back(!is_config);
rtmpPkt->buffer.push_back(_audio_flv_flags);
rtmpPkt->buffer.push_back(!is_config);
//aac data
rtmpPkt->strBuf.append(frame->data() + frame->prefixSize(), frame->size() - frame->prefixSize());
rtmpPkt->buffer.append(frame->data() + frame->prefixSize(), frame->size() - frame->prefixSize());
rtmpPkt->bodySize = rtmpPkt->strBuf.size();
rtmpPkt->chunkId = CHUNK_AUDIO;
rtmpPkt->streamId = STREAM_MEDIA;
rtmpPkt->timeStamp = frame->dts();
rtmpPkt->typeId = MSG_AUDIO;
rtmpPkt->body_size = rtmpPkt->buffer.size();
rtmpPkt->chunk_id = CHUNK_AUDIO;
rtmpPkt->stream_index = STREAM_MEDIA;
rtmpPkt->time_stamp = frame->dts();
rtmpPkt->type_id = MSG_AUDIO;
RtmpCodec::inputRtmp(rtmpPkt, false);
}
}
......@@ -119,20 +119,20 @@ void AACRtmpEncoder::inputFrame(const Frame::Ptr &frame) {
void AACRtmpEncoder::makeAudioConfigPkt() {
_audio_flv_flags = getAudioRtmpFlags(std::make_shared<AACTrack>(_aac_cfg));
RtmpPacket::Ptr rtmpPkt = ResourcePoolHelper<RtmpPacket>::obtainObj();
rtmpPkt->strBuf.clear();
rtmpPkt->buffer.clear();
//header
uint8_t is_config = true;
rtmpPkt->strBuf.push_back(_audio_flv_flags);
rtmpPkt->strBuf.push_back(!is_config);
rtmpPkt->buffer.push_back(_audio_flv_flags);
rtmpPkt->buffer.push_back(!is_config);
//aac config
rtmpPkt->strBuf.append(_aac_cfg);
rtmpPkt->buffer.append(_aac_cfg);
rtmpPkt->bodySize = rtmpPkt->strBuf.size();
rtmpPkt->chunkId = CHUNK_AUDIO;
rtmpPkt->streamId = STREAM_MEDIA;
rtmpPkt->timeStamp = 0;
rtmpPkt->typeId = MSG_AUDIO;
rtmpPkt->body_size = rtmpPkt->buffer.size();
rtmpPkt->chunk_id = CHUNK_AUDIO;
rtmpPkt->stream_index = STREAM_MEDIA;
rtmpPkt->time_stamp = 0;
rtmpPkt->type_id = MSG_AUDIO;
RtmpCodec::inputRtmp(rtmpPkt, false);
}
......
......@@ -31,8 +31,8 @@ void CommonRtmpDecoder::obtainFrame() {
bool CommonRtmpDecoder::inputRtmp(const RtmpPacket::Ptr &rtmp, bool) {
//拷贝负载
_frame->_buffer.assign(rtmp->strBuf.data() + 1, rtmp->strBuf.size() - 1);
_frame->_dts = rtmp->timeStamp;
_frame->_buffer.assign(rtmp->buffer.data() + 1, rtmp->buffer.size() - 1);
_frame->_dts = rtmp->time_stamp;
//写入环形缓存
RtmpCodec::inputFrame(_frame);
//创建下一帧
......@@ -51,16 +51,16 @@ void CommonRtmpEncoder::inputFrame(const Frame::Ptr &frame) {
return;
}
RtmpPacket::Ptr rtmp = ResourcePoolHelper<RtmpPacket>::obtainObj();
rtmp->strBuf.clear();
rtmp->buffer.clear();
//header
rtmp->strBuf.push_back(_audio_flv_flags);
rtmp->buffer.push_back(_audio_flv_flags);
//data
rtmp->strBuf.append(frame->data() + frame->prefixSize(), frame->size() - frame->prefixSize());
rtmp->bodySize = rtmp->strBuf.size();
rtmp->chunkId = CHUNK_AUDIO;
rtmp->streamId = STREAM_MEDIA;
rtmp->timeStamp = frame->dts();
rtmp->typeId = MSG_AUDIO;
rtmp->buffer.append(frame->data() + frame->prefixSize(), frame->size() - frame->prefixSize());
rtmp->body_size = rtmp->buffer.size();
rtmp->chunk_id = CHUNK_AUDIO;
rtmp->stream_index = STREAM_MEDIA;
rtmp->time_stamp = frame->dts();
rtmp->type_id = MSG_AUDIO;
RtmpCodec::inputRtmp(rtmp, false);
}
......
......@@ -39,18 +39,18 @@ static string getH264SPS(const RtmpPacket &thiz) {
if (!thiz.isCfgFrame()) {
return ret;
}
if (thiz.strBuf.size() < 13) {
if (thiz.buffer.size() < 13) {
WarnL << "bad H264 cfg!";
return ret;
}
uint16_t sps_size ;
memcpy(&sps_size, thiz.strBuf.data() + 11,2);
memcpy(&sps_size, thiz.buffer.data() + 11, 2);
sps_size = ntohs(sps_size);
if ((int) thiz.strBuf.size() < 13 + sps_size) {
if ((int) thiz.buffer.size() < 13 + sps_size) {
WarnL << "bad H264 cfg!";
return ret;
}
ret.assign(thiz.strBuf.data() + 13, sps_size);
ret.assign(thiz.buffer.data() + 13, sps_size);
return ret;
}
......@@ -66,27 +66,27 @@ static string getH264PPS(const RtmpPacket &thiz) {
if (!thiz.isCfgFrame()) {
return ret;
}
if (thiz.strBuf.size() < 13) {
if (thiz.buffer.size() < 13) {
WarnL << "bad H264 cfg!";
return ret;
}
uint16_t sps_size ;
memcpy(&sps_size,thiz.strBuf.data() + 11,2);
memcpy(&sps_size, thiz.buffer.data() + 11, 2);
sps_size = ntohs(sps_size);
if ((int) thiz.strBuf.size() < 13 + sps_size + 1 + 2) {
if ((int) thiz.buffer.size() < 13 + sps_size + 1 + 2) {
WarnL << "bad H264 cfg!";
return ret;
}
uint16_t pps_size ;
memcpy(&pps_size, thiz.strBuf.data() + 13 + sps_size + 1,2);
memcpy(&pps_size, thiz.buffer.data() + 13 + sps_size + 1, 2);
pps_size = ntohs(pps_size);
if ((int) thiz.strBuf.size() < 13 + sps_size + 1 + 2 + pps_size) {
if ((int) thiz.buffer.size() < 13 + sps_size + 1 + 2 + pps_size) {
WarnL << "bad H264 cfg!";
return ret;
}
ret.assign(thiz.strBuf.data() + 13 + sps_size + 1 + 2, pps_size);
ret.assign(thiz.buffer.data() + 13 + sps_size + 1 + 2, pps_size);
return ret;
}
......@@ -95,27 +95,27 @@ bool H264RtmpDecoder::decodeRtmp(const RtmpPacket::Ptr &pkt) {
//缓存sps pps,后续插入到I帧之前
_sps = getH264SPS(*pkt);
_pps = getH264PPS(*pkt);
onGetH264(_sps.data(), _sps.size(), pkt->timeStamp , pkt->timeStamp);
onGetH264(_pps.data(), _pps.size(), pkt->timeStamp , pkt->timeStamp);
onGetH264(_sps.data(), _sps.size(), pkt->time_stamp , pkt->time_stamp);
onGetH264(_pps.data(), _pps.size(), pkt->time_stamp , pkt->time_stamp);
return false;
}
if (pkt->strBuf.size() > 9) {
uint32_t iTotalLen = pkt->strBuf.size();
if (pkt->buffer.size() > 9) {
uint32_t iTotalLen = pkt->buffer.size();
uint32_t iOffset = 5;
uint8_t *cts_ptr = (uint8_t *) (pkt->strBuf.data() + 2);
uint8_t *cts_ptr = (uint8_t *) (pkt->buffer.data() + 2);
int32_t cts = (((cts_ptr[0] << 16) | (cts_ptr[1] << 8) | (cts_ptr[2])) + 0xff800000) ^ 0xff800000;
auto pts = pkt->timeStamp + cts;
auto pts = pkt->time_stamp + cts;
while(iOffset + 4 < iTotalLen){
uint32_t iFrameLen;
memcpy(&iFrameLen, pkt->strBuf.data() + iOffset, 4);
memcpy(&iFrameLen, pkt->buffer.data() + iOffset, 4);
iFrameLen = ntohl(iFrameLen);
iOffset += 4;
if(iFrameLen + iOffset > iTotalLen){
break;
}
onGetH264(pkt->strBuf.data() + iOffset, iFrameLen, pkt->timeStamp , pts);
onGetH264(pkt->buffer.data() + iOffset, iFrameLen, pkt->time_stamp , pts);
iOffset += iFrameLen;
}
}
......@@ -190,7 +190,7 @@ void H264RtmpEncoder::inputFrame(const Frame::Ptr &frame) {
}
}
if(_lastPacket && _lastPacket->timeStamp != frame->dts()) {
if(_lastPacket && _lastPacket->time_stamp != frame->dts()) {
RtmpCodec::inputRtmp(_lastPacket, _lastPacket->isVideoKeyFrame());
_lastPacket = nullptr;
}
......@@ -202,23 +202,23 @@ void H264RtmpEncoder::inputFrame(const Frame::Ptr &frame) {
flags |= (((frame->configFrame() || frame->keyFrame()) ? FLV_KEY_FRAME : FLV_INTER_FRAME) << 4);
_lastPacket = ResourcePoolHelper<RtmpPacket>::obtainObj();
_lastPacket->strBuf.clear();
_lastPacket->strBuf.push_back(flags);
_lastPacket->strBuf.push_back(!is_config);
_lastPacket->buffer.clear();
_lastPacket->buffer.push_back(flags);
_lastPacket->buffer.push_back(!is_config);
auto cts = frame->pts() - frame->dts();
cts = htonl(cts);
_lastPacket->strBuf.append((char *)&cts + 1, 3);
_lastPacket->buffer.append((char *)&cts + 1, 3);
_lastPacket->chunkId = CHUNK_VIDEO;
_lastPacket->streamId = STREAM_MEDIA;
_lastPacket->timeStamp = frame->dts();
_lastPacket->typeId = MSG_VIDEO;
_lastPacket->chunk_id = CHUNK_VIDEO;
_lastPacket->stream_index = STREAM_MEDIA;
_lastPacket->time_stamp = frame->dts();
_lastPacket->type_id = MSG_VIDEO;
}
auto size = htonl(iLen);
_lastPacket->strBuf.append((char *) &size, 4);
_lastPacket->strBuf.append(pcData, iLen);
_lastPacket->bodySize = _lastPacket->strBuf.size();
_lastPacket->buffer.append((char *) &size, 4);
_lastPacket->buffer.append(pcData, iLen);
_lastPacket->body_size = _lastPacket->buffer.size();
}
void H264RtmpEncoder::makeVideoConfigPkt() {
......@@ -227,38 +227,38 @@ void H264RtmpEncoder::makeVideoConfigPkt() {
bool is_config = true;
RtmpPacket::Ptr rtmpPkt = ResourcePoolHelper<RtmpPacket>::obtainObj();
rtmpPkt->strBuf.clear();
rtmpPkt->buffer.clear();
//header
rtmpPkt->strBuf.push_back(flags);
rtmpPkt->strBuf.push_back(!is_config);
rtmpPkt->buffer.push_back(flags);
rtmpPkt->buffer.push_back(!is_config);
//cts
rtmpPkt->strBuf.append("\x0\x0\x0", 3);
rtmpPkt->buffer.append("\x0\x0\x0", 3);
//AVCDecoderConfigurationRecord start
rtmpPkt->strBuf.push_back(1); // version
rtmpPkt->strBuf.push_back(_sps[1]); // profile
rtmpPkt->strBuf.push_back(_sps[2]); // compat
rtmpPkt->strBuf.push_back(_sps[3]); // level
rtmpPkt->strBuf.push_back(0xff); // 6 bits reserved + 2 bits nal size length - 1 (11)
rtmpPkt->strBuf.push_back(0xe1); // 3 bits reserved + 5 bits number of sps (00001)
rtmpPkt->buffer.push_back(1); // version
rtmpPkt->buffer.push_back(_sps[1]); // profile
rtmpPkt->buffer.push_back(_sps[2]); // compat
rtmpPkt->buffer.push_back(_sps[3]); // level
rtmpPkt->buffer.push_back(0xff); // 6 bits reserved + 2 bits nal size length - 1 (11)
rtmpPkt->buffer.push_back(0xe1); // 3 bits reserved + 5 bits number of sps (00001)
//sps
uint16_t size = _sps.size();
size = htons(size);
rtmpPkt->strBuf.append((char *) &size, 2);
rtmpPkt->strBuf.append(_sps);
rtmpPkt->buffer.append((char *) &size, 2);
rtmpPkt->buffer.append(_sps);
//pps
rtmpPkt->strBuf.push_back(1); // version
rtmpPkt->buffer.push_back(1); // version
size = _pps.size();
size = htons(size);
rtmpPkt->strBuf.append((char *) &size, 2);
rtmpPkt->strBuf.append(_pps);
rtmpPkt->buffer.append((char *) &size, 2);
rtmpPkt->buffer.append(_pps);
rtmpPkt->bodySize = rtmpPkt->strBuf.size();
rtmpPkt->chunkId = CHUNK_VIDEO;
rtmpPkt->streamId = STREAM_MEDIA;
rtmpPkt->timeStamp = 0;
rtmpPkt->typeId = MSG_VIDEO;
rtmpPkt->body_size = rtmpPkt->buffer.size();
rtmpPkt->chunk_id = CHUNK_VIDEO;
rtmpPkt->stream_index = STREAM_MEDIA;
rtmpPkt->time_stamp = 0;
rtmpPkt->type_id = MSG_VIDEO;
RtmpCodec::inputRtmp(rtmpPkt, false);
}
......
......@@ -43,13 +43,13 @@ static bool getH265ConfigFrame(const RtmpPacket &thiz,string &frame) {
if (!thiz.isCfgFrame()) {
return false;
}
if (thiz.strBuf.size() < 6) {
if (thiz.buffer.size() < 6) {
WarnL << "bad H265 cfg!";
return false;
}
auto extra = thiz.strBuf.data() + 5;
auto bytes = thiz.strBuf.size() - 5;
auto extra = thiz.buffer.data() + 5;
auto bytes = thiz.buffer.size() - 5;
struct mpeg4_hevc_t hevc = {0};
if (mpeg4_hevc_decoder_configuration_record_load((uint8_t *) extra, bytes, &hevc) > 0) {
......@@ -70,7 +70,7 @@ bool H265RtmpDecoder::decodeRtmp(const RtmpPacket::Ptr &pkt) {
#ifdef ENABLE_MP4
string config;
if(getH265ConfigFrame(*pkt,config)){
onGetH265(config.data(), config.size(), pkt->timeStamp , pkt->timeStamp);
onGetH265(config.data(), config.size(), pkt->time_stamp , pkt->time_stamp);
}
#else
WarnL << "请开启MP4相关功能并使能\"ENABLE_MP4\",否则对H265-RTMP支持不完善";
......@@ -78,22 +78,22 @@ bool H265RtmpDecoder::decodeRtmp(const RtmpPacket::Ptr &pkt) {
return false;
}
if (pkt->strBuf.size() > 9) {
uint32_t iTotalLen = pkt->strBuf.size();
if (pkt->buffer.size() > 9) {
uint32_t iTotalLen = pkt->buffer.size();
uint32_t iOffset = 5;
uint8_t *cts_ptr = (uint8_t *) (pkt->strBuf.data() + 2);
uint8_t *cts_ptr = (uint8_t *) (pkt->buffer.data() + 2);
int32_t cts = (((cts_ptr[0] << 16) | (cts_ptr[1] << 8) | (cts_ptr[2])) + 0xff800000) ^ 0xff800000;
auto pts = pkt->timeStamp + cts;
auto pts = pkt->time_stamp + cts;
while(iOffset + 4 < iTotalLen){
uint32_t iFrameLen;
memcpy(&iFrameLen, pkt->strBuf.data() + iOffset, 4);
memcpy(&iFrameLen, pkt->buffer.data() + iOffset, 4);
iFrameLen = ntohl(iFrameLen);
iOffset += 4;
if(iFrameLen + iOffset > iTotalLen){
break;
}
onGetH265(pkt->strBuf.data() + iOffset, iFrameLen, pkt->timeStamp , pts);
onGetH265(pkt->buffer.data() + iOffset, iFrameLen, pkt->time_stamp , pts);
iOffset += iFrameLen;
}
}
......@@ -176,7 +176,7 @@ void H265RtmpEncoder::inputFrame(const Frame::Ptr &frame) {
return;
}
if(_lastPacket && _lastPacket->timeStamp != frame->dts()) {
if(_lastPacket && _lastPacket->time_stamp != frame->dts()) {
RtmpCodec::inputRtmp(_lastPacket, _lastPacket->isVideoKeyFrame());
_lastPacket = nullptr;
}
......@@ -188,23 +188,23 @@ void H265RtmpEncoder::inputFrame(const Frame::Ptr &frame) {
flags |= (((frame->configFrame() || frame->keyFrame()) ? FLV_KEY_FRAME : FLV_INTER_FRAME) << 4);
_lastPacket = ResourcePoolHelper<RtmpPacket>::obtainObj();
_lastPacket->strBuf.clear();
_lastPacket->strBuf.push_back(flags);
_lastPacket->strBuf.push_back(!is_config);
_lastPacket->buffer.clear();
_lastPacket->buffer.push_back(flags);
_lastPacket->buffer.push_back(!is_config);
auto cts = frame->pts() - frame->dts();
cts = htonl(cts);
_lastPacket->strBuf.append((char *)&cts + 1, 3);
_lastPacket->buffer.append((char *)&cts + 1, 3);
_lastPacket->chunkId = CHUNK_VIDEO;
_lastPacket->streamId = STREAM_MEDIA;
_lastPacket->timeStamp = frame->dts();
_lastPacket->typeId = MSG_VIDEO;
_lastPacket->chunk_id = CHUNK_VIDEO;
_lastPacket->stream_index = STREAM_MEDIA;
_lastPacket->time_stamp = frame->dts();
_lastPacket->type_id = MSG_VIDEO;
}
auto size = htonl(iLen);
_lastPacket->strBuf.append((char *) &size, 4);
_lastPacket->strBuf.append(pcData, iLen);
_lastPacket->bodySize = _lastPacket->strBuf.size();
_lastPacket->buffer.append((char *) &size, 4);
_lastPacket->buffer.append(pcData, iLen);
_lastPacket->body_size = _lastPacket->buffer.size();
}
void H265RtmpEncoder::makeVideoConfigPkt() {
......@@ -214,13 +214,13 @@ void H265RtmpEncoder::makeVideoConfigPkt() {
bool is_config = true;
RtmpPacket::Ptr rtmpPkt = ResourcePoolHelper<RtmpPacket>::obtainObj();
rtmpPkt->strBuf.clear();
rtmpPkt->buffer.clear();
//header
rtmpPkt->strBuf.push_back(flags);
rtmpPkt->strBuf.push_back(!is_config);
rtmpPkt->buffer.push_back(flags);
rtmpPkt->buffer.push_back(!is_config);
//cts
rtmpPkt->strBuf.append("\x0\x0\x0", 3);
rtmpPkt->buffer.append("\x0\x0\x0", 3);
struct mpeg4_hevc_t hevc = {0};
string vps_sps_pps = string("\x00\x00\x00\x01", 4) + _vps +
......@@ -235,13 +235,13 @@ void H265RtmpEncoder::makeVideoConfigPkt() {
}
//HEVCDecoderConfigurationRecord
rtmpPkt->strBuf.append((char *)extra_data, extra_data_size);
rtmpPkt->buffer.append((char *)extra_data, extra_data_size);
rtmpPkt->bodySize = rtmpPkt->strBuf.size();
rtmpPkt->chunkId = CHUNK_VIDEO;
rtmpPkt->streamId = STREAM_MEDIA;
rtmpPkt->timeStamp = 0;
rtmpPkt->typeId = MSG_VIDEO;
rtmpPkt->body_size = rtmpPkt->buffer.size();
rtmpPkt->chunk_id = CHUNK_VIDEO;
rtmpPkt->stream_index = STREAM_MEDIA;
rtmpPkt->time_stamp = 0;
rtmpPkt->type_id = MSG_VIDEO;
RtmpCodec::inputRtmp(rtmpPkt, false);
#else
WarnL << "请开启MP4相关功能并使能\"ENABLE_MP4\",否则对H265-RTMP支持不完善";
......
......@@ -73,10 +73,10 @@ void FlvMuxer::onWriteFlvHeader(const RtmpMediaSource::Ptr &mediaSrc) {
bool is_have_audio = false,is_have_video = false;
mediaSrc->getConfigFrame([&](const RtmpPacket::Ptr &pkt){
if(pkt->typeId == MSG_VIDEO){
if(pkt->type_id == MSG_VIDEO){
is_have_video = true;
}
if(pkt->typeId == MSG_AUDIO){
if(pkt->type_id == MSG_AUDIO){
is_have_audio = true;
}
});
......@@ -133,16 +133,16 @@ public:
#pragma pack(pop)
#endif // defined(_WIN32)
void FlvMuxer::onWriteFlvTag(const RtmpPacket::Ptr &pkt, uint32_t ui32TimeStamp , bool flush) {
onWriteFlvTag(pkt->typeId,pkt,ui32TimeStamp, flush);
void FlvMuxer::onWriteFlvTag(const RtmpPacket::Ptr &pkt, uint32_t time_stamp , bool flush) {
onWriteFlvTag(pkt->type_id, pkt, time_stamp, flush);
}
void FlvMuxer::onWriteFlvTag(uint8_t ui8Type, const Buffer::Ptr &buffer, uint32_t ui32TimeStamp, bool flush) {
void FlvMuxer::onWriteFlvTag(uint8_t type, const Buffer::Ptr &buffer, uint32_t time_stamp, bool flush) {
RtmpTagHeader header;
header.type = ui8Type;
header.type = type;
set_be24(header.data_size, buffer->size());
header.timestamp_ex = (uint8_t) ((ui32TimeStamp >> 24) & 0xff);
set_be24(header.timestamp,ui32TimeStamp & 0xFFFFFF);
header.timestamp_ex = (uint8_t) ((time_stamp >> 24) & 0xff);
set_be24(header.timestamp, time_stamp & 0xFFFFFF);
//tag header
onWrite(std::make_shared<BufferRaw>((char *)&header, sizeof(header)), false);
//tag data
......@@ -154,7 +154,7 @@ void FlvMuxer::onWriteFlvTag(uint8_t ui8Type, const Buffer::Ptr &buffer, uint32_
void FlvMuxer::onWriteRtmp(const RtmpPacket::Ptr &pkt,bool flush) {
int64_t dts_out;
_stamp[pkt->typeId % 2].revise(pkt->timeStamp, 0, dts_out, dts_out);
_stamp[pkt->type_id % 2].revise(pkt->time_stamp, 0, dts_out, dts_out);
onWriteFlvTag(pkt, dts_out,flush);
}
......
......@@ -25,21 +25,23 @@ public:
FlvMuxer();
virtual ~FlvMuxer();
void stop();
protected:
void start(const EventPoller::Ptr &poller,const RtmpMediaSource::Ptr &media);
void start(const EventPoller::Ptr &poller, const RtmpMediaSource::Ptr &media);
virtual void onWrite(const Buffer::Ptr &data, bool flush) = 0;
virtual void onDetach() = 0;
virtual std::shared_ptr<FlvMuxer> getSharedPtr() = 0;
private:
void onWriteFlvHeader(const RtmpMediaSource::Ptr &media);
void onWriteRtmp(const RtmpPacket::Ptr &pkt,bool flush);
void onWriteFlvTag(const RtmpPacket::Ptr &pkt, uint32_t ui32TimeStamp, bool flush);
void onWriteFlvTag(uint8_t ui8Type, const Buffer::Ptr &buffer, uint32_t ui32TimeStamp, bool flush);
void onWriteRtmp(const RtmpPacket::Ptr &pkt, bool flush);
void onWriteFlvTag(const RtmpPacket::Ptr &pkt, uint32_t time_stamp, bool flush);
void onWriteFlvTag(uint8_t type, const Buffer::Ptr &buffer, uint32_t time_stamp, bool flush);
private:
RtmpMediaSource::RingType::RingReader::Ptr _ring_reader;
//时间戳修整器
Stamp _stamp[2];
RtmpMediaSource::RingType::RingReader::Ptr _ring_reader;
};
class FlvRecorder : public FlvMuxer , public std::enable_shared_from_this<FlvRecorder>{
......@@ -47,12 +49,14 @@ public:
typedef std::shared_ptr<FlvRecorder> Ptr;
FlvRecorder();
virtual ~FlvRecorder();
void startRecord(const EventPoller::Ptr &poller,const string &vhost,const string &app,const string &stream,const string &file_path);
void startRecord(const EventPoller::Ptr &poller,const RtmpMediaSource::Ptr &media,const string &file_path);
void startRecord(const EventPoller::Ptr &poller, const RtmpMediaSource::Ptr &media, const string &file_path);
void startRecord(const EventPoller::Ptr &poller, const string &vhost, const string &app, const string &stream, const string &file_path);
private:
virtual void onWrite(const Buffer::Ptr &data, bool flush) override ;
virtual void onDetach() override;
virtual std::shared_ptr<FlvMuxer> getSharedPtr() override;
private:
std::shared_ptr<FILE> _file;
recursive_mutex _file_mtx;
......@@ -60,5 +64,4 @@ private:
}//namespace mediakit
#endif //ZLMEDIAKIT_FLVMUXER_H
......@@ -23,9 +23,6 @@
using namespace toolkit;
#define PORT 1935
#define DEFAULT_CHUNK_LEN 128
#if !defined(_WIN32)
#define PACKED __attribute__((packed))
#else
......@@ -33,6 +30,7 @@ using namespace toolkit;
#endif //!defined(_WIN32)
#define DEFAULT_CHUNK_LEN 128
#define HANDSHAKE_PLAINTEXT 0x03
#define RANDOM_LEN (1536 - 8)
......@@ -91,22 +89,24 @@ class RtmpHandshake {
public:
RtmpHandshake(uint32_t _time, uint8_t *_random = nullptr) {
_time = htonl(_time);
memcpy(timeStamp, &_time, 4);
memcpy(time_stamp, &_time, 4);
if (!_random) {
random_generate((char *) random, sizeof(random));
} else {
memcpy(random, _random, sizeof(random));
}
}
uint8_t timeStamp[4];
uint8_t time_stamp[4];
uint8_t zero[4] = {0};
uint8_t random[RANDOM_LEN];
void random_generate(char* bytes, int size) {
static char cdata[] = { 0x73, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x2d, 0x72,
0x74, 0x6d, 0x70, 0x2d, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72,
0x2d, 0x77, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x2d, 0x77, 0x69,
0x6e, 0x74, 0x65, 0x72, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72,
0x40, 0x31, 0x32, 0x36, 0x2e, 0x63, 0x6f, 0x6d };
void random_generate(char *bytes, int size) {
static char cdata[] = {0x73, 0x69, 0x6d, 0x70, 0x6c, 0x65, 0x2d, 0x72,
0x74, 0x6d, 0x70, 0x2d, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72,
0x2d, 0x77, 0x69, 0x6e, 0x6c, 0x69, 0x6e, 0x2d, 0x77, 0x69,
0x6e, 0x74, 0x65, 0x72, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72,
0x40, 0x31, 0x32, 0x36, 0x2e, 0x63, 0x6f, 0x6d};
for (int i = 0; i < size; i++) {
bytes[i] = cdata[rand() % (sizeof(cdata) - 1)];
}
......@@ -116,10 +116,10 @@ public:
class RtmpHeader {
public:
uint8_t flags;
uint8_t timeStamp[3];
uint8_t bodySize[3];
uint8_t typeId;
uint8_t streamId[4]; /* Note, this is little-endian while others are BE */
uint8_t time_stamp[3];
uint8_t body_size[3];
uint8_t type_id;
uint8_t stream_index[4]; /* Note, this is little-endian while others are BE */
}PACKED;
#if defined(_WIN32)
......@@ -129,21 +129,23 @@ public:
class RtmpPacket : public Buffer{
public:
typedef std::shared_ptr<RtmpPacket> Ptr;
uint8_t typeId;
uint32_t bodySize = 0;
uint32_t timeStamp = 0;
bool hasAbsStamp = false;
uint32_t tsField = 0;
uint32_t streamId;
uint32_t chunkId;
std::string strBuf;
uint8_t type_id;
uint32_t body_size = 0;
uint32_t time_stamp = 0;
bool is_abs_stamp = false;
uint32_t ts_field = 0;
uint32_t stream_index;
uint32_t chunk_id;
std::string buffer;
public:
char *data() const override{
return (char*)strBuf.data();
return (char*)buffer.data();
}
uint32_t size() const override {
return strBuf.size();
};
return buffer.size();
}
public:
RtmpPacket() = default;
RtmpPacket(const RtmpPacket &that) = delete;
......@@ -151,58 +153,64 @@ public:
RtmpPacket &operator=(RtmpPacket &&that) = delete;
RtmpPacket(RtmpPacket &&that){
typeId = that.typeId;
bodySize = that.bodySize;
timeStamp = that.timeStamp;
hasAbsStamp = that.hasAbsStamp;
tsField = that.tsField;
streamId = that.streamId;
chunkId = that.chunkId;
strBuf = std::move(that.strBuf);
type_id = that.type_id;
body_size = that.body_size;
time_stamp = that.time_stamp;
is_abs_stamp = that.is_abs_stamp;
ts_field = that.ts_field;
stream_index = that.stream_index;
chunk_id = that.chunk_id;
buffer = std::move(that.buffer);
}
bool isVideoKeyFrame() const {
return typeId == MSG_VIDEO && (uint8_t) strBuf[0] >> 4 == FLV_KEY_FRAME && (uint8_t) strBuf[1] == 1;
return type_id == MSG_VIDEO && (uint8_t) buffer[0] >> 4 == FLV_KEY_FRAME && (uint8_t) buffer[1] == 1;
}
bool isCfgFrame() const {
switch (typeId){
case MSG_VIDEO : return strBuf[1] == 0;
switch (type_id){
case MSG_VIDEO : return buffer[1] == 0;
case MSG_AUDIO : {
switch (getMediaType()){
case FLV_CODEC_AAC : return strBuf[1] == 0;
case FLV_CODEC_AAC : return buffer[1] == 0;
default : return false;
}
}
default : return false;
}
}
int getMediaType() const {
switch (typeId) {
case MSG_VIDEO : return (uint8_t) strBuf[0] & 0x0F;
case MSG_AUDIO : return (uint8_t) strBuf[0] >> 4;
switch (type_id) {
case MSG_VIDEO : return (uint8_t) buffer[0] & 0x0F;
case MSG_AUDIO : return (uint8_t) buffer[0] >> 4;
default : return 0;
}
}
int getAudioSampleRate() const {
if (typeId != MSG_AUDIO) {
if (type_id != MSG_AUDIO) {
return 0;
}
int flvSampleRate = ((uint8_t) strBuf[0] & 0x0C) >> 2;
int flvSampleRate = ((uint8_t) buffer[0] & 0x0C) >> 2;
const static int sampleRate[] = { 5512, 11025, 22050, 44100 };
return sampleRate[flvSampleRate];
}
int getAudioSampleBit() const {
if (typeId != MSG_AUDIO) {
if (type_id != MSG_AUDIO) {
return 0;
}
int flvSampleBit = ((uint8_t) strBuf[0] & 0x02) >> 1;
int flvSampleBit = ((uint8_t) buffer[0] & 0x02) >> 1;
const static int sampleBit[] = { 8, 16 };
return sampleBit[flvSampleBit];
}
int getAudioChannel() const {
if (typeId != MSG_AUDIO) {
if (type_id != MSG_AUDIO) {
return 0;
}
int flvStereoOrMono = (uint8_t) strBuf[0] & 0x01;
int flvStereoOrMono = (uint8_t) buffer[0] & 0x01;
const static int channel[] = { 1, 2 };
return channel[flvStereoOrMono];
}
......
......@@ -23,14 +23,14 @@ public:
typedef std::shared_ptr<RtmpRing> Ptr;
typedef RingBuffer<RtmpPacket::Ptr> RingType;
RtmpRing(){}
virtual ~RtmpRing(){}
RtmpRing() {}
virtual ~RtmpRing() {}
/**
* 获取rtmp环形缓存
* @return
*/
virtual RingType::Ptr getRtmpRing() const{
virtual RingType::Ptr getRtmpRing() const {
return _rtmpRing;
}
......@@ -38,7 +38,7 @@ public:
* 设置rtmp环形缓存
* @param ring
*/
virtual void setRtmpRing(const RingType::Ptr &ring){
virtual void setRtmpRing(const RingType::Ptr &ring) {
_rtmpRing = ring;
}
......@@ -48,17 +48,17 @@ public:
* @param key_pos 是否为关键帧
* @return 是否为关键帧
*/
virtual bool inputRtmp(const RtmpPacket::Ptr &rtmp, bool key_pos){
if(_rtmpRing){
_rtmpRing->write(rtmp,key_pos);
virtual bool inputRtmp(const RtmpPacket::Ptr &rtmp, bool key_pos) {
if (_rtmpRing) {
_rtmpRing->write(rtmp, key_pos);
}
return key_pos;
}
protected:
RingType::Ptr _rtmpRing;
};
class RtmpCodec : public RtmpRing, public FrameDispatcher , public CodecInfo{
public:
typedef std::shared_ptr<RtmpCodec> Ptr;
......@@ -69,5 +69,4 @@ public:
}//namespace mediakit
#endif //ZLMEDIAKIT_RTMPCODEC_H
......@@ -66,27 +66,27 @@ bool RtmpDemuxer::loadMetaData(const AMFValue &val){
}
bool RtmpDemuxer::inputRtmp(const RtmpPacket::Ptr &pkt) {
switch (pkt->typeId) {
switch (pkt->type_id) {
case MSG_VIDEO: {
if(!_tryedGetVideoTrack){
_tryedGetVideoTrack = true;
if(!_try_get_video_track){
_try_get_video_track = true;
auto codec = AMFValue(pkt->getMediaType());
makeVideoTrack(codec);
}
if(_videoRtmpDecoder){
return _videoRtmpDecoder->inputRtmp(pkt, true);
if(_video_rtmp_decoder){
return _video_rtmp_decoder->inputRtmp(pkt, true);
}
return false;
}
case MSG_AUDIO: {
if(!_tryedGetAudioTrack) {
_tryedGetAudioTrack = true;
if(!_try_get_audio_track) {
_try_get_audio_track = true;
auto codec = AMFValue(pkt->getMediaType());
makeAudioTrack(codec, pkt->getAudioSampleRate(), pkt->getAudioChannel(), pkt->getAudioSampleBit());
}
if(_audioRtmpDecoder){
_audioRtmpDecoder->inputRtmp(pkt, false);
if(_audio_rtmp_decoder){
_audio_rtmp_decoder->inputRtmp(pkt, false);
return false;
}
return false;
......@@ -101,12 +101,12 @@ void RtmpDemuxer::makeVideoTrack(const AMFValue &videoCodec) {
_videoTrack = dynamic_pointer_cast<VideoTrack>(Factory::getVideoTrackByAmf(videoCodec));
if (_videoTrack) {
//生成rtmpCodec对象以便解码rtmp
_videoRtmpDecoder = Factory::getRtmpCodecByTrack(_videoTrack, false);
if (_videoRtmpDecoder) {
_video_rtmp_decoder = Factory::getRtmpCodecByTrack(_videoTrack, false);
if (_video_rtmp_decoder) {
//设置rtmp解码器代理,生成的frame写入该Track
_videoRtmpDecoder->addDelegate(_videoTrack);
_video_rtmp_decoder->addDelegate(_videoTrack);
onAddTrack(_videoTrack);
_tryedGetVideoTrack = true;
_try_get_video_track = true;
} else {
//找不到相应的rtmp解码器,该track无效
_videoTrack.reset();
......@@ -119,12 +119,12 @@ void RtmpDemuxer::makeAudioTrack(const AMFValue &audioCodec,int sample_rate, int
_audioTrack = dynamic_pointer_cast<AudioTrack>(Factory::getAudioTrackByAmf(audioCodec, sample_rate, channels, sample_bit));
if (_audioTrack) {
//生成rtmpCodec对象以便解码rtmp
_audioRtmpDecoder = Factory::getRtmpCodecByTrack(_audioTrack, false);
if (_audioRtmpDecoder) {
_audio_rtmp_decoder = Factory::getRtmpCodecByTrack(_audioTrack, false);
if (_audio_rtmp_decoder) {
//设置rtmp解码器代理,生成的frame写入该Track
_audioRtmpDecoder->addDelegate(_audioTrack);
_audio_rtmp_decoder->addDelegate(_audioTrack);
onAddTrack(_audioTrack);
_tryedGetAudioTrack = true;
_try_get_audio_track = true;
} else {
//找不到相应的rtmp解码器,该track无效
_audioTrack.reset();
......
......@@ -28,7 +28,7 @@ public:
typedef std::shared_ptr<RtmpDemuxer> Ptr;
RtmpDemuxer() = default;
virtual ~RtmpDemuxer() = default;
~RtmpDemuxer() override = default;
bool loadMetaData(const AMFValue &metadata);
......@@ -38,14 +38,16 @@ public:
* @return true 代表是i帧
*/
bool inputRtmp(const RtmpPacket::Ptr &pkt);
private:
void makeVideoTrack(const AMFValue &val);
void makeAudioTrack(const AMFValue &val, int sample_rate, int channels, int sample_bit);
private:
bool _tryedGetVideoTrack = false;
bool _tryedGetAudioTrack = false;
RtmpCodec::Ptr _audioRtmpDecoder;
RtmpCodec::Ptr _videoRtmpDecoder;
bool _try_get_video_track = false;
bool _try_get_audio_track = false;
RtmpCodec::Ptr _audio_rtmp_decoder;
RtmpCodec::Ptr _video_rtmp_decoder;
};
} /* namespace mediakit */
......
......@@ -122,17 +122,17 @@ public:
*/
void onWrite(const RtmpPacket::Ptr &pkt, bool key = true) override {
lock_guard<recursive_mutex> lock(_mtx);
if(pkt->typeId == MSG_VIDEO){
if(pkt->type_id == MSG_VIDEO){
//有视频,那么启用GOP缓存
_have_video = true;
}
if (pkt->isCfgFrame()) {
_config_frame_map[pkt->typeId] = pkt;
_config_frame_map[pkt->type_id] = pkt;
return;
}
//保存当前时间戳
_track_stamps_map[pkt->typeId] = pkt->timeStamp;
_track_stamps_map[pkt->type_id] = pkt->time_stamp;
if (!_ring) {
weak_ptr<RtmpMediaSource> weakSelf = dynamic_pointer_cast<RtmpMediaSource>(shared_from_this());
......@@ -154,7 +154,7 @@ public:
regist();
}
}
PacketCache<RtmpPacket>::inputPacket(pkt->typeId == MSG_VIDEO, pkt, key);
PacketCache<RtmpPacket>::inputPacket(pkt->type_id == MSG_VIDEO, pkt, key);
}
/**
......
......@@ -161,11 +161,12 @@ public:
}
private:
RtmpDemuxer::Ptr _demuxer;
MultiMediaSourceMuxer::Ptr _muxer;
AMFValue _metadata;
bool _all_track_ready = false;
bool _recreate_metadata = false;
AMFValue _metadata;
RtmpDemuxer::Ptr _demuxer;
MultiMediaSourceMuxer::Ptr _muxer;
};
} /* namespace mediakit */
......
......@@ -24,34 +24,35 @@ public:
const string &strApp,
const string &strId,
const TitleMeta::Ptr &title = nullptr) : RtmpMuxer(title){
_mediaSouce = std::make_shared<RtmpMediaSource>(vhost,strApp,strId);
getRtmpRing()->setDelegate(_mediaSouce);
_media_src = std::make_shared<RtmpMediaSource>(vhost, strApp, strId);
getRtmpRing()->setDelegate(_media_src);
}
virtual ~RtmpMediaSourceMuxer(){}
void setListener(const std::weak_ptr<MediaSourceEvent> &listener){
_mediaSouce->setListener(listener);
_media_src->setListener(listener);
}
void setTimeStamp(uint32_t stamp){
_mediaSouce->setTimeStamp(stamp);
_media_src->setTimeStamp(stamp);
}
int readerCount() const{
return _mediaSouce->readerCount();
return _media_src->readerCount();
}
void onAllTrackReady(){
makeConfigPacket();
_mediaSouce->setMetaData(getMetadata());
_media_src->setMetaData(getMetadata());
}
// 设置TrackSource
void setTrackSource(const std::weak_ptr<TrackSource> &track_src){
_mediaSouce->setTrackSource(track_src);
_media_src->setTrackSource(track_src);
}
private:
RtmpMediaSource::Ptr _mediaSouce;
RtmpMediaSource::Ptr _media_src;
};
......
......@@ -19,7 +19,7 @@ RtmpMuxer::RtmpMuxer(const TitleMeta::Ptr &title) {
}else{
_metadata = title->getMetadata();
}
_rtmpRing = std::make_shared<RtmpRing::RingType>();
_rtmp_ring = std::make_shared<RtmpRing::RingType>();
}
void RtmpMuxer::addTrack(const Track::Ptr &track) {
......@@ -31,7 +31,7 @@ void RtmpMuxer::addTrack(const Track::Ptr &track) {
}
//设置rtmp输出环形缓存
encoder->setRtmpRing(_rtmpRing);
encoder->setRtmpRing(_rtmp_ring);
//添加metadata
Metadata::addTrack(_metadata,track);
......@@ -57,7 +57,7 @@ const AMFValue &RtmpMuxer::getMetadata() const {
}
RtmpRing::RingType::Ptr RtmpMuxer::getRtmpRing() const {
return _rtmpRing;
return _rtmp_ring;
}
void RtmpMuxer::resetTracks() {
......
......@@ -61,7 +61,7 @@ public:
*/
void makeConfigPacket();
private:
RtmpRing::RingType::Ptr _rtmpRing;
RtmpRing::RingType::Ptr _rtmp_ring;
AMFValue _metadata;
RtmpCodec::Ptr _encoder[TrackMax];
};
......
......@@ -8,8 +8,8 @@
* may be found in the AUTHORS file in the root of the source tree.
*/
#ifndef SRC_RTMP_RtmpPlayer2_H_
#define SRC_RTMP_RtmpPlayer2_H_
#ifndef SRC_RTMP_RtmpPlayer_H_
#define SRC_RTMP_RtmpPlayer_H_
#include <memory>
#include <string>
......@@ -28,21 +28,24 @@ using namespace toolkit;
using namespace mediakit::Client;
namespace mediakit {
//实现了rtmp播放器协议部分的功能,及数据接收功能
class RtmpPlayer:public PlayerBase, public TcpClient, public RtmpProtocol{
class RtmpPlayer : public PlayerBase, public TcpClient, public RtmpProtocol {
public:
typedef std::shared_ptr<RtmpPlayer> Ptr;
RtmpPlayer(const EventPoller::Ptr &poller);
virtual ~RtmpPlayer();
~RtmpPlayer() override;
void play(const string &strUrl) override;
void pause(bool bPause) override;
void teardown() override;
protected:
virtual bool onCheckMeta(const AMFValue &val) =0;
virtual void onMediaData(const RtmpPacket::Ptr &chunkData) =0;
uint32_t getProgressMilliSecond() const;
void seekToMilliSecond(uint32_t ms);
protected:
void onMediaData_l(const RtmpPacket::Ptr &chunkData);
//在获取config帧后才触发onPlayResult_l(而不是收到play命令回复),所以此时所有track都初始化完毕了
......@@ -59,15 +62,15 @@ protected:
send(buffer);
}
template<typename FUN>
inline void addOnResultCB(const FUN &fun) {
lock_guard<recursive_mutex> lck(_mtxOnResultCB);
_mapOnResultCB.emplace(_iReqID, fun);
template<typename FUNC>
inline void addOnResultCB(const FUNC &func) {
lock_guard<recursive_mutex> lck(_mtx_on_result);
_map_on_result.emplace(_send_req_id, func);
}
template<typename FUN>
inline void addOnStatusCB(const FUN &fun) {
lock_guard<recursive_mutex> lck(_mtxOnStatusCB);
_dqOnStatusCB.emplace_back(fun);
template<typename FUNC>
inline void addOnStatusCB(const FUNC &func) {
lock_guard<recursive_mutex> lck(_mtx_on_status);
_deque_on_status.emplace_back(func);
}
void onCmd_result(AMFDecoder &dec);
......@@ -78,34 +81,37 @@ protected:
inline void send_createStream();
inline void send_play();
inline void send_pause(bool bPause);
private:
string _strApp;
string _strStream;
string _strTcUrl;
bool _bPaused = false;
unordered_map<int, function<void(AMFDecoder &dec)> > _mapOnResultCB;
recursive_mutex _mtxOnResultCB;
deque<function<void(AMFValue &dec)> > _dqOnStatusCB;
recursive_mutex _mtxOnStatusCB;
//超时功能实现
Ticker _mediaTicker;
std::shared_ptr<Timer> _pMediaTimer;
std::shared_ptr<Timer> _pPlayTimer;
//心跳定时器
std::shared_ptr<Timer> _pBeatTimer;
string _app;
string _stream_id;
string _tc_url;
//播放进度控制
uint32_t _iSeekTo = 0;
uint32_t _aiFistStamp[2] = { 0, 0 };
uint32_t _aiNowStamp[2] = { 0, 0 };
Ticker _aNowStampTicker[2];
bool _paused = false;
bool _metadata_got = false;
//是否为性能测试模式
bool _benchmark_mode = false;
//播放进度控制
uint32_t _seek_ms = 0;
uint32_t _fist_stamp[2] = {0, 0};
uint32_t _now_stamp[2] = {0, 0};
Ticker _now_stamp_ticker[2];
recursive_mutex _mtx_on_result;
recursive_mutex _mtx_on_status;
deque<function<void(AMFValue &dec)> > _deque_on_status;
unordered_map<int, function<void(AMFDecoder &dec)> > _map_on_result;
//rtmp接收超时计时器
Ticker _rtmp_recv_ticker;
//心跳发送定时器
std::shared_ptr<Timer> _beat_timer;
//播放超时定时器
std::shared_ptr<Timer> _play_timer;
//rtmp接收超时定时器
std::shared_ptr<Timer> _rtmp_recv_timer;
};
} /* namespace mediakit */
#endif /* SRC_RTMP_RtmpPlayer2_H_ */
#endif /* SRC_RTMP_RtmpPlayer_H_ */
......@@ -27,51 +27,59 @@ namespace mediakit {
class RtmpPlayerImp: public PlayerImp<RtmpPlayer,RtmpDemuxer> {
public:
typedef std::shared_ptr<RtmpPlayerImp> Ptr;
RtmpPlayerImp(const EventPoller::Ptr &poller) : PlayerImp<RtmpPlayer,RtmpDemuxer>(poller){};
virtual ~RtmpPlayerImp(){
DebugL<<endl;
};
float getProgress() const override{
if(getDuration() > 0){
RtmpPlayerImp(const EventPoller::Ptr &poller) : PlayerImp<RtmpPlayer, RtmpDemuxer>(poller) {};
~RtmpPlayerImp() override {
DebugL << endl;
}
float getProgress() const override {
if (getDuration() > 0) {
return getProgressMilliSecond() / (getDuration() * 1000);
}
return PlayerBase::getProgress();
};
void seekTo(float fProgress) override{
fProgress = MAX(float(0),MIN(fProgress,float(1.0)));
}
void seekTo(float fProgress) override {
fProgress = MAX(float(0), MIN(fProgress, float(1.0)));
seekToMilliSecond(fProgress * getDuration() * 1000);
};
}
void play(const string &strUrl) override {
PlayerImp<RtmpPlayer,RtmpDemuxer>::play(strUrl);
PlayerImp<RtmpPlayer, RtmpDemuxer>::play(strUrl);
}
private:
//派生类回调函数
bool onCheckMeta(const AMFValue &val) override {
_pRtmpMediaSrc = dynamic_pointer_cast<RtmpMediaSource>(_pMediaSrc);
if(_pRtmpMediaSrc){
_pRtmpMediaSrc->setMetaData(val);
_rtmp_src = dynamic_pointer_cast<RtmpMediaSource>(_pMediaSrc);
if (_rtmp_src) {
_rtmp_src->setMetaData(val);
_set_meta_data = true;
}
_delegate.reset(new RtmpDemuxer);
_delegate->loadMetaData(val);
return true;
}
void onMediaData(const RtmpPacket::Ptr &chunkData) override {
if(_pRtmpMediaSrc){
if(!_set_meta_data && !chunkData->isCfgFrame()){
if (_rtmp_src) {
if (!_set_meta_data && !chunkData->isCfgFrame()) {
_set_meta_data = true;
_pRtmpMediaSrc->setMetaData(TitleMeta().getMetadata());
_rtmp_src->setMetaData(TitleMeta().getMetadata());
}
_pRtmpMediaSrc->onWrite(chunkData);
_rtmp_src->onWrite(chunkData);
}
if(!_delegate){
if (!_delegate) {
//这个流没有metadata
_delegate.reset(new RtmpDemuxer());
}
_delegate->inputRtmp(chunkData);
}
private:
RtmpMediaSource::Ptr _pRtmpMediaSrc;
RtmpMediaSource::Ptr _rtmp_src;
bool _set_meta_data = false;
};
......
......@@ -31,45 +31,42 @@ class RtmpProtocol {
public:
RtmpProtocol();
virtual ~RtmpProtocol();
void onParseRtmp(const char *data, int size);
//作为客户端发送c0c1,等待s0s1s2并且回调
void startClientSession(const function<void()> &cb);
void onParseRtmp(const char *pcRawData,int iSize);
void reset();
protected:
virtual void onSendRawData(const Buffer::Ptr &buffer) = 0;
virtual void onRtmpChunk(RtmpPacket &chunkData) = 0;
virtual void onStreamBegin(uint32_t ui32StreamId){
_ui32StreamId = ui32StreamId;
virtual void onRtmpChunk(RtmpPacket &chunk_data) = 0;
virtual void onStreamBegin(uint32_t stream_index){
_stream_index = stream_index;
}
virtual void onStreamEof(uint32_t ui32StreamId){};
virtual void onStreamDry(uint32_t ui32StreamId){};
protected:
void sendAcknowledgement(uint32_t ui32Size);
void sendAcknowledgementSize(uint32_t ui32Size);
void sendPeerBandwidth(uint32_t ui32Size);
void sendChunkSize(uint32_t ui32Size);
void sendPingRequest(uint32_t ui32TimeStamp = ::time(NULL));
void sendPingResponse(uint32_t ui32TimeStamp = ::time(NULL));
void sendSetBufferLength(uint32_t ui32StreamId, uint32_t ui32Length);
void sendUserControl(uint16_t ui16EventType, uint32_t ui32EventData);
void sendUserControl(uint16_t ui16EventType, const string &strEventData);
virtual void onStreamEof(uint32_t stream_index){};
virtual void onStreamDry(uint32_t stream_index){};
void sendInvoke(const string &strCmd, const AMFValue &val);
void sendRequest(int iCmd, const string &str);
void sendResponse(int iType, const string &str);
void sendRtmp(uint8_t ui8Type, uint32_t ui32StreamId, const std::string &strBuf, uint32_t ui32TimeStamp, int iChunkID);
void sendRtmp(uint8_t ui8Type, uint32_t ui32StreamId, const Buffer::Ptr &buffer, uint32_t ui32TimeStamp, int iChunkID);
protected:
int _iReqID = 0;
uint32_t _ui32StreamId = STREAM_CONTROL;
int _iNowStreamID = 0;
int _iNowChunkID = 0;
bool _bDataStarted = false;
inline BufferRaw::Ptr obtainBuffer();
inline BufferRaw::Ptr obtainBuffer(const void *data, int len);
//ResourcePool<BufferRaw,MAX_SEND_PKT> _bufferPool;
void reset();
BufferRaw::Ptr obtainBuffer();
BufferRaw::Ptr obtainBuffer(const void *data, int len);
void sendAcknowledgement(uint32_t size);
void sendAcknowledgementSize(uint32_t size);
void sendPeerBandwidth(uint32_t size);
void sendChunkSize(uint32_t size);
void sendPingRequest(uint32_t ti = ::time(NULL));
void sendPingResponse(uint32_t time_stamp = ::time(NULL));
void sendSetBufferLength(uint32_t stream_index, uint32_t len);
void sendUserControl(uint16_t event_type, uint32_t event_data);
void sendUserControl(uint16_t event_type, const string &event_data);
void sendInvoke(const string &cmd, const AMFValue &val);
void sendRequest(int cmd, const string &str);
void sendResponse(int type, const string &str);
void sendRtmp(uint8_t type, uint32_t stream_index, const std::string &buffer, uint32_t stamp, int chunk_id);
void sendRtmp(uint8_t type, uint32_t stream_index, const Buffer::Ptr &buffer, uint32_t stamp, int chunk_id);
private:
void handle_S0S1S2(const function<void()> &cb);
void handle_S0S1S2(const function<void()> &func);
void handle_C0C1();
void handle_C1_simple();
#ifdef ENABLE_OPENSSL
......@@ -82,26 +79,32 @@ private:
void handle_C2();
void handle_rtmp();
void handle_rtmpChunk(RtmpPacket &chunkData);
void handle_rtmpChunk(RtmpPacket &chunk_data);
protected:
int _send_req_id = 0;
uint32_t _stream_index = STREAM_CONTROL;
private:
int _now_stream_index = 0;
int _now_chunk_id = 0;
bool _data_started = false;
////////////ChunkSize////////////
size_t _iChunkLenIn = DEFAULT_CHUNK_LEN;
size_t _iChunkLenOut = DEFAULT_CHUNK_LEN;
size_t _chunk_size_in = DEFAULT_CHUNK_LEN;
size_t _chunk_size_out = DEFAULT_CHUNK_LEN;
////////////Acknowledgement////////////
uint32_t _ui32ByteSent = 0;
uint32_t _ui32LastSent = 0;
uint32_t _ui32WinSize = 0;
uint32_t _bytes_sent = 0;
uint32_t _bytes_sent_last = 0;
uint32_t _windows_size = 0;
///////////PeerBandwidth///////////
uint32_t _ui32Bandwidth = 2500000;
uint8_t _ui8LimitType = 2;
////////////Chunk////////////
unordered_map<int, RtmpPacket> _mapChunkData;
uint32_t _bandwidth = 2500000;
uint8_t _band_limit_type = 2;
//////////Rtmp parser//////////
string _strRcvBuf;
function<void()> _nextHandle;
string _recv_data_buf;
function<void()> _next_step_func;
////////////Chunk////////////
unordered_map<int, RtmpPacket> _map_chunk_data;
};
} /* namespace mediakit */
#endif /* SRC_RTMP_RTMPPROTOCOL_H_ */
......@@ -164,13 +164,13 @@ inline void RtmpPusher::send_createStream() {
addOnResultCB([this](AMFDecoder &dec){
//TraceL << "createStream result";
dec.load<AMFValue>();
_ui32StreamId = dec.load<int>();
_stream_index = dec.load<int>();
send_publish();
});
}
inline void RtmpPusher::send_publish() {
AMFEncoder enc;
enc << "publish" << ++_iReqID << nullptr << _strStream << _strApp ;
enc << "publish" << ++_send_req_id << nullptr << _strStream << _strApp ;
sendRequest(MSG_CMD, enc.data());
addOnStatusCB([this](AMFValue &val) {
......@@ -195,7 +195,7 @@ inline void RtmpPusher::send_metaData(){
sendRequest(MSG_DATA, enc.data());
src->getConfigFrame([&](const RtmpPacket::Ptr &pkt){
sendRtmp(pkt->typeId, _ui32StreamId, pkt, pkt->timeStamp, pkt->chunkId );
sendRtmp(pkt->type_id, _stream_index, pkt, pkt->time_stamp, pkt->chunk_id );
});
_pRtmpReader = src->getRing()->attach(getPoller());
......@@ -213,7 +213,7 @@ inline void RtmpPusher::send_metaData(){
if(++i == size){
strongSelf->setSendFlushFlag(true);
}
strongSelf->sendRtmp(rtmp->typeId, strongSelf->_ui32StreamId, rtmp, rtmp->timeStamp, rtmp->chunkId);
strongSelf->sendRtmp(rtmp->type_id, strongSelf->_stream_index, rtmp, rtmp->time_stamp, rtmp->chunk_id);
});
});
_pRtmpReader->setDetachCB([weakSelf](){
......@@ -275,7 +275,7 @@ void RtmpPusher::onCmd_onStatus(AMFDecoder &dec) {
}
void RtmpPusher::onRtmpChunk(RtmpPacket &chunkData) {
switch (chunkData.typeId) {
switch (chunkData.type_id) {
case MSG_CMD:
case MSG_CMD3: {
typedef void (RtmpPusher::*rtmpCMDHandle)(AMFDecoder &dec);
......@@ -284,9 +284,9 @@ void RtmpPusher::onRtmpChunk(RtmpPacket &chunkData) {
g_mapCmd.emplace("_error",&RtmpPusher::onCmd_result);
g_mapCmd.emplace("_result",&RtmpPusher::onCmd_result);
g_mapCmd.emplace("onStatus",&RtmpPusher::onCmd_onStatus);
}, []() {});
});
AMFDecoder dec(chunkData.strBuf, 0);
AMFDecoder dec(chunkData.buffer, 0);
std::string type = dec.load<std::string>();
auto it = g_mapCmd.find(type);
if(it != g_mapCmd.end()){
......@@ -298,7 +298,7 @@ void RtmpPusher::onRtmpChunk(RtmpPacket &chunkData) {
}
break;
default:
//WarnL << "unhandled message:" << (int) chunkData.typeId << hexdump(chunkData.strBuf.data(), chunkData.strBuf.size());
//WarnL << "unhandled message:" << (int) chunkData.type_id << hexdump(chunkData.buffer.data(), chunkData.buffer.size());
break;
}
}
......
......@@ -52,7 +52,7 @@ private:
template<typename FUN>
inline void addOnResultCB(const FUN &fun) {
lock_guard<recursive_mutex> lck(_mtxOnResultCB);
_mapOnResultCB.emplace(_iReqID, fun);
_mapOnResultCB.emplace(_send_req_id, fun);
}
template<typename FUN>
inline void addOnStatusCB(const FUN &fun) {
......
......@@ -30,11 +30,13 @@ namespace mediakit {
class RtmpSession: public TcpSession ,public RtmpProtocol , public MediaSourceEvent{
public:
typedef std::shared_ptr<RtmpSession> Ptr;
RtmpSession(const Socket::Ptr &_sock);
virtual ~RtmpSession();
void onRecv(const Buffer::Ptr &pBuf) override;
RtmpSession(const Socket::Ptr &sock);
~RtmpSession() override;
void onRecv(const Buffer::Ptr &buf) override;
void onError(const SockException &err) override;
void onManager() override;
private:
void onProcessCmd(AMFDecoder &dec);
void onCmd_connect(AMFDecoder &dec);
......@@ -55,15 +57,15 @@ private:
void onSendMedia(const RtmpPacket::Ptr &pkt);
void onSendRawData(const Buffer::Ptr &buffer) override{
_ui64TotalBytes += buffer->size();
_total_bytes += buffer->size();
send(buffer);
}
void onRtmpChunk(RtmpPacket &chunkData) override;
void onRtmpChunk(RtmpPacket &chunk_data) override;
template<typename first, typename second>
inline void sendReply(const char *str, const first &reply, const second &status) {
AMFEncoder invoke;
invoke << str << _dNowReqID << reply << status;
invoke << str << _recv_req_id << reply << status;
sendResponse(MSG_CMD, invoke.data());
}
......@@ -74,29 +76,30 @@ private:
void setSocketFlags();
string getStreamId(const string &str);
void dumpMetadata(const AMFValue &metadata);
private:
std::string _strTcUrl;
MediaInfo _mediaInfo;
double _dNowReqID = 0;
bool _paused = false;
bool _set_meta_data = false;
Ticker _ticker;//数据接收时间
RtmpMediaSource::RingType::RingReader::Ptr _pRingReader;
std::shared_ptr<RtmpMediaSourceImp> _pPublisherSrc;
std::weak_ptr<RtmpMediaSource> _pPlayerSrc;
double _recv_req_id = 0;
//消耗的总流量
uint64_t _total_bytes = 0;
std::string _tc_url;
//时间戳修整器
Stamp _stamp[2];
//消耗的总流量
uint64_t _ui64TotalBytes = 0;
bool _paused = false;
//数据接收超时计时器
Ticker _ticker;
MediaInfo _media_info;
std::weak_ptr<RtmpMediaSource> _player_src;
std::shared_ptr<RtmpMediaSourceImp> _publisher_src;
RtmpMediaSource::RingType::RingReader::Ptr _ring_reader;
};
/**
* 支持ssl加密的rtmp服务器
*/
typedef TcpSessionWithSSL<RtmpSession> RtmpSessionWithSSL;
} /* namespace mediakit */
#endif /* SRC_RTMP_RTMPSESSION_H_ */
......@@ -733,28 +733,12 @@ void RtspPlayer::onRecvRTP_l(const RtpPacket::Ptr &rtp, const SdpTrack::Ptr &tra
}
void RtspPlayer::onPlayResult_l(const SockException &ex , bool handshakeCompleted) {
WarnL << ex.getErrCode() << " " << ex.what();
if(!ex){
//播放成功,恢复rtp接收超时定时器
_rtp_recv_ticker.resetTime();
weak_ptr<RtspPlayer> weakSelf = dynamic_pointer_cast<RtspPlayer>(shared_from_this());
int timeoutMS = (*this)[kMediaTimeoutMS].as<int>();
//创建rtp数据接收超时检测定时器
_rtp_check_timer.reset(new Timer(timeoutMS / 2000.0, [weakSelf,timeoutMS]() {
auto strongSelf=weakSelf.lock();
if(!strongSelf) {
return false;
}
if(strongSelf->_rtp_recv_ticker.elapsedTime() > timeoutMS) {
//接收rtp媒体数据包超时
strongSelf->onPlayResult_l(SockException(Err_timeout,"receive rtp timeout"), true);
return false;
}
return true;
}, getPoller()));
if (ex.getErrCode() == Err_shutdown) {
//主动shutdown的,不触发回调
return;
}
WarnL << ex.getErrCode() << " " << ex.what();
if (!handshakeCompleted) {
//开始播放阶段
_play_check_timer.reset();
......@@ -769,7 +753,26 @@ void RtspPlayer::onPlayResult_l(const SockException &ex , bool handshakeComplete
onResume();
}
if(ex){
if (!ex) {
//播放成功,恢复rtp接收超时定时器
_rtp_recv_ticker.resetTime();
int timeoutMS = (*this)[kMediaTimeoutMS].as<int>();
weak_ptr<RtspPlayer> weakSelf = dynamic_pointer_cast<RtspPlayer>(shared_from_this());
auto lam = [weakSelf, timeoutMS]() {
auto strongSelf = weakSelf.lock();
if (!strongSelf) {
return false;
}
if (strongSelf->_rtp_recv_ticker.elapsedTime() > timeoutMS) {
//接收rtp媒体数据包超时
strongSelf->onPlayResult_l(SockException(Err_timeout, "receive rtp timeout"), true);
return false;
}
return true;
};
//创建rtp数据接收超时检测定时器
_rtp_check_timer = std::make_shared<Timer>(timeoutMS / 2000.0, lam, getPoller());
} else {
teardown();
}
}
......
......@@ -1123,7 +1123,7 @@ inline void RtspSession::onSendRtpPacket(const RtpPacket::Ptr &pkt){
//send rtcp every 5 second
ticker.resetTime();
//直接保存网络字节序
memcpy(&counter.timeStamp, pkt->data() + 8, 4);
memcpy(&counter.time_stamp, pkt->data() + 8, 4);
sendSenderReport(_rtp_type == Rtsp::RTP_TCP, track_index);
}
#endif
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论