Commit a0de7577 by custompal

Merge branch 'master' into cus_pr

同步代码
parents 82f6bfc7 d4ad44c8
...@@ -71,6 +71,7 @@ SpaceBeforeInheritanceColon: true ...@@ -71,6 +71,7 @@ SpaceBeforeInheritanceColon: true
SpaceBeforeParens: ControlStatements SpaceBeforeParens: ControlStatements
# 空 {} 中不加空格 # 空 {} 中不加空格
SpaceInEmptyBlock: false SpaceInEmptyBlock: false
Standard: C++11
# Tab 占 4 位 # Tab 占 4 位
TabWidth: 4 TabWidth: 4
# 不使用 TAB # 不使用 TAB
......
name: Android CI name: Android
on: [push, pull_request] on: [push, pull_request]
jobs: jobs:
build: build:
......
name: linux C/C++ CI name: Linux
on: [push, pull_request] on: [push, pull_request]
......
name: macos C/C++ CI name: macOS
on: [push, pull_request] on: [push, pull_request]
......
name: style check
on: [pull_request]
jobs:
check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
# with all history
fetch-depth: 0
- name: Validate BOM
run: |
ret=0
for i in $(git diff --name-only origin/${GITHUB_BASE_REF}...${GITHUB_SHA}); do
if [ -f ${i} ]; then
case ${i} in
*.c|*.cc|*.cpp|*.h)
if file ${i} | grep -qv BOM; then
echo "Missing BOM in ${i}" && ret=1;
fi
;;
esac
fi
done
exit ${ret}
name: MSVC C/C++ CI name: Windows
on: [push, pull_request] on: [push, pull_request]
......
ZLToolKit @ 61f2c6c8
Subproject commit 136b6b2f28193da218f577423db217aeb0f7aa6a Subproject commit 61f2c6c8d4288c2c60299a84473d9cfec113891c
...@@ -22,6 +22,10 @@ endif() ...@@ -22,6 +22,10 @@ endif()
set(CMAKE_CXX_STANDARD 11) set(CMAKE_CXX_STANDARD 11)
#加载自定义模块 #加载自定义模块
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}/cmake") set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}/cmake")
set(DEP_ROOT_DIR ${CMAKE_SOURCE_DIR}/3rdpart/external-${CMAKE_SYSTEM_NAME})
if(NOT EXISTS ${DEP_ROOT_DIR})
file(MAKE_DIRECTORY ${DEP_ROOT_DIR})
endif()
find_program(CCACHE_FOUND ccache) find_program(CCACHE_FOUND ccache)
if(CCACHE_FOUND) if(CCACHE_FOUND)
...@@ -108,6 +112,7 @@ option(ENABLE_MSVC_MT "Enable MSVC Mt/Mtd lib" true) ...@@ -108,6 +112,7 @@ option(ENABLE_MSVC_MT "Enable MSVC Mt/Mtd lib" true)
option(ENABLE_API_STATIC_LIB "Enable mk_api static lib" false) option(ENABLE_API_STATIC_LIB "Enable mk_api static lib" false)
option(USE_SOLUTION_FOLDERS "Enable solution dir supported" ON) option(USE_SOLUTION_FOLDERS "Enable solution dir supported" ON)
option(ENABLE_SRT "Enable SRT" true) option(ENABLE_SRT "Enable SRT" true)
option(ENABLE_JEMALLOC_STATIC "Enable static linking to the jemalloc library" false)
# ---------------------------------------------------------------------------- # ----------------------------------------------------------------------------
# Solution folders: # Solution folders:
# ---------------------------------------------------------------------------- # ----------------------------------------------------------------------------
...@@ -299,6 +304,13 @@ if (ENABLE_FFMPEG) ...@@ -299,6 +304,13 @@ if (ENABLE_FFMPEG)
endif () endif ()
if(ENABLE_JEMALLOC_STATIC)
include(cmake/Jemalloc.cmake)
include_directories(${DEP_ROOT_DIR}/${JEMALLOC_NAME}/include/jemalloc)
link_directories(${DEP_ROOT_DIR}/${JEMALLOC_NAME}/lib)
set(JEMALLOC_ROOT_DIR "${DEP_ROOT_DIR}/${JEMALLOC_NAME}")
endif ()
#默认链接jemalloc库避免内存碎片 #默认链接jemalloc库避免内存碎片
find_package(JEMALLOC QUIET) find_package(JEMALLOC QUIET)
if (JEMALLOC_FOUND) if (JEMALLOC_FOUND)
......
![logo](https://raw.githubusercontent.com/xia-chu/ZLMediaKit/master/www/logo.png) ![logo](https://raw.githubusercontent.com/ZLMediaKit/ZLMediaKit/master/www/logo.png)
# 一个基于C++11的高性能运营级流媒体服务框架 # 一个基于C++11的高性能运营级流媒体服务框架
[![license](http://img.shields.io/badge/license-MIT-green.svg)](https://github.com/xia-chu/ZLMediaKit/blob/master/LICENSE) [![](https://img.shields.io/badge/license-MIT-green.svg)](https://github.com/ZLMediaKit/ZLMediaKit/blob/master/LICENSE)
[![C++](https://img.shields.io/badge/language-c++-red.svg)](https://en.cppreference.com/) [![](https://img.shields.io/badge/language-c++-red.svg)](https://en.cppreference.com/)
[![platform](https://img.shields.io/badge/platform-linux%20|%20macos%20|%20windows-blue.svg)](https://github.com/xia-chu/ZLMediaKit) [![](https://img.shields.io/badge/platform-linux%20|%20macos%20|%20windows-blue.svg)](https://github.com/ZLMediaKit/ZLMediaKit)
[![PRs Welcome](https://img.shields.io/badge/PRs-welcome-yellow.svg)](https://github.com/xia-chu/ZLMediaKit/pulls) [![](https://img.shields.io/badge/PRs-welcome-yellow.svg)](https://github.com/ZLMediaKit/ZLMediaKit/pulls)
[![Build Status](https://travis-ci.org/xia-chu/ZLMediaKit.svg?branch=master)](https://travis-ci.org/xia-chu/ZLMediaKit)
[![Docker](https://img.shields.io/docker/pulls/zlmediakit/zlmediakit)](https://hub.docker.com/r/zlmediakit/zlmediakit/tags) [![](https://github.com/ZLMediaKit/ZLMediaKit/actions/workflows/android.yml/badge.svg)](https://github.com/ZLMediaKit/ZLMediaKit)
[![](https://github.com/ZLMediaKit/ZLMediaKit/actions/workflows/linux.yml/badge.svg)](https://github.com/ZLMediaKit/ZLMediaKit)
[![](https://github.com/ZLMediaKit/ZLMediaKit/actions/workflows/macos.yml/badge.svg)](https://github.com/ZLMediaKit/ZLMediaKit)
[![](https://github.com/ZLMediaKit/ZLMediaKit/actions/workflows/windows.yml/badge.svg)](https://github.com/ZLMediaKit/ZLMediaKit)
[![](https://github.com/ZLMediaKit/ZLMediaKit/actions/workflows/docker.yml/badge.svg)](https://hub.docker.com/r/zlmediakit/zlmediakit/tags)
[![](https://img.shields.io/docker/pulls/zlmediakit/zlmediakit)](https://hub.docker.com/r/zlmediakit/zlmediakit/tags)
## 项目特点 ## 项目特点
...@@ -16,10 +22,10 @@ ...@@ -16,10 +22,10 @@
- 使用多路复用/多线程/异步网络IO模式开发,并发性能优越,支持海量客户端连接。 - 使用多路复用/多线程/异步网络IO模式开发,并发性能优越,支持海量客户端连接。
- 代码经过长期大量的稳定性、性能测试,已经在线上商用验证已久。 - 代码经过长期大量的稳定性、性能测试,已经在线上商用验证已久。
- 支持linux、macos、ios、android、windows全平台。 - 支持linux、macos、ios、android、windows全平台。
- 支持画面秒开、极低延时([500毫秒内,最低可达100毫秒](https://github.com/xia-chu/ZLMediaKit/wiki/%E5%BB%B6%E6%97%B6%E6%B5%8B%E8%AF%95))。 - 支持画面秒开、极低延时([500毫秒内,最低可达100毫秒](https://github.com/ZLMediaKit/ZLMediaKit/wiki/%E5%BB%B6%E6%97%B6%E6%B5%8B%E8%AF%95))。
- 提供完善的标准[C API](https://github.com/xia-chu/ZLMediaKit/tree/master/api/include),可以作SDK用,或供其他语言调用。 - 提供完善的标准[C API](https://github.com/ZLMediaKit/ZLMediaKit/tree/master/api/include),可以作SDK用,或供其他语言调用。
- 提供完整的[MediaServer](https://github.com/xia-chu/ZLMediaKit/tree/master/server)服务器,可以免开发直接部署为商用服务器。 - 提供完整的[MediaServer](https://github.com/ZLMediaKit/ZLMediaKit/tree/master/server)服务器,可以免开发直接部署为商用服务器。
- 提供完善的[restful api](https://github.com/xia-chu/ZLMediaKit/wiki/MediaServer%E6%94%AF%E6%8C%81%E7%9A%84HTTP-API)以及[web hook](https://github.com/xia-chu/ZLMediaKit/wiki/MediaServer%E6%94%AF%E6%8C%81%E7%9A%84HTTP-HOOK-API),支持丰富的业务逻辑。 - 提供完善的[restful api](https://github.com/ZLMediaKit/ZLMediaKit/wiki/MediaServer%E6%94%AF%E6%8C%81%E7%9A%84HTTP-API)以及[web hook](https://github.com/ZLMediaKit/ZLMediaKit/wiki/MediaServer%E6%94%AF%E6%8C%81%E7%9A%84HTTP-HOOK-API),支持丰富的业务逻辑。
- 打通了视频监控协议栈与直播协议栈,对RTSP/RTMP支持都很完善。 - 打通了视频监控协议栈与直播协议栈,对RTSP/RTMP支持都很完善。
- 全面支持H265/H264/AAC/G711/OPUS。 - 全面支持H265/H264/AAC/G711/OPUS。
- 功能完善,支持集群、按需转协议、按需推拉流、先播后推、断连续推等功能。 - 功能完善,支持集群、按需转协议、按需推拉流、先播后推、断连续推等功能。
...@@ -58,7 +64,7 @@ ...@@ -58,7 +64,7 @@
- 支持websocket-flv直播 - 支持websocket-flv直播
- 支持H264/H265/AAC/G711/OPUS编码,其他编码能转发但不能转协议 - 支持H264/H265/AAC/G711/OPUS编码,其他编码能转发但不能转协议
- 支持[RTMP-H265](https://github.com/ksvc/FFmpeg/wiki) - 支持[RTMP-H265](https://github.com/ksvc/FFmpeg/wiki)
- 支持[RTMP-OPUS](https://github.com/xia-chu/ZLMediaKit/wiki/RTMP%E5%AF%B9H265%E5%92%8COPUS%E7%9A%84%E6%94%AF%E6%8C%81) - 支持[RTMP-OPUS](https://github.com/ZLMediaKit/ZLMediaKit/wiki/RTMP%E5%AF%B9H265%E5%92%8COPUS%E7%9A%84%E6%94%AF%E6%8C%81)
- HLS - HLS
- 支持HLS文件生成,自带HTTP文件服务器 - 支持HLS文件生成,自带HTTP文件服务器
...@@ -126,15 +132,15 @@ ...@@ -126,15 +132,15 @@
## 编译以及测试 ## 编译以及测试
**编译前务必仔细参考wiki:[快速开始](https://github.com/xia-chu/ZLMediaKit/wiki/%E5%BF%AB%E9%80%9F%E5%BC%80%E5%A7%8B)操作!!!** **编译前务必仔细参考wiki:[快速开始](https://github.com/ZLMediaKit/ZLMediaKit/wiki/%E5%BF%AB%E9%80%9F%E5%BC%80%E5%A7%8B)操作!!!**
## 怎么使用 ## 怎么使用
你有三种方法使用ZLMediaKit,分别是: 你有三种方法使用ZLMediaKit,分别是:
- 1、使用c api,作为sdk使用,请参考[这里](https://github.com/xia-chu/ZLMediaKit/tree/master/api/include). - 1、使用c api,作为sdk使用,请参考[这里](https://github.com/ZLMediaKit/ZLMediaKit/tree/master/api/include).
- 2、作为独立的流媒体服务器使用,不想做c/c++开发的,可以参考 [restful api](https://github.com/xia-chu/ZLMediaKit/wiki/MediaServer支持的HTTP-API)[web hook](https://github.com/xia-chu/ZLMediaKit/wiki/MediaServer支持的HTTP-HOOK-API ). - 2、作为独立的流媒体服务器使用,不想做c/c++开发的,可以参考 [restful api](https://github.com/ZLMediaKit/ZLMediaKit/wiki/MediaServer支持的HTTP-API)[web hook](https://github.com/ZLMediaKit/ZLMediaKit/wiki/MediaServer支持的HTTP-HOOK-API ).
- 3、如果想做c/c++开发,添加业务逻辑增加功能,可以参考这里的[测试程序](https://github.com/xia-chu/ZLMediaKit/tree/master/tests). - 3、如果想做c/c++开发,添加业务逻辑增加功能,可以参考这里的[测试程序](https://github.com/ZLMediaKit/ZLMediaKit/tree/master/tests).
## Docker 镜像 ## Docker 镜像
...@@ -167,6 +173,7 @@ bash build_docker_images.sh ...@@ -167,6 +173,7 @@ bash build_docker_images.sh
- [Go实现的海康ehome服务器](https://github.com/tsingeye/FreeEhome) - [Go实现的海康ehome服务器](https://github.com/tsingeye/FreeEhome)
- 客户端 - 客户端
- [c sdk完整c#包装库](https://github.com/malegend/ZLMediaKit.Autogen)
- [基于C SDK实现的推流客户端](https://github.com/hctym1995/ZLM_ApiDemo) - [基于C SDK实现的推流客户端](https://github.com/hctym1995/ZLM_ApiDemo)
- [C#版本的Http API与Hook](https://github.com/chengxiaosheng/ZLMediaKit.HttpApi) - [C#版本的Http API与Hook](https://github.com/chengxiaosheng/ZLMediaKit.HttpApi)
- [DotNetCore的RESTful客户端](https://github.com/MingZhuLiu/ZLMediaKit.DotNetCore.Sdk) - [DotNetCore的RESTful客户端](https://github.com/MingZhuLiu/ZLMediaKit.DotNetCore.Sdk)
...@@ -195,7 +202,7 @@ bash build_docker_images.sh ...@@ -195,7 +202,7 @@ bash build_docker_images.sh
- 1、仔细看下readme、wiki,如果有必要可以查看下issue. - 1、仔细看下readme、wiki,如果有必要可以查看下issue.
- 2、如果您的问题还没解决,可以提issue. - 2、如果您的问题还没解决,可以提issue.
- 3、有些问题,如果不具备参考性的,无需在issue提的,可以在qq群提. - 3、有些问题,如果不具备参考性的,无需在issue提的,可以在qq群提.
- 4、QQ私聊一般不接受无偿技术咨询和支持([为什么不提倡QQ私聊](https://github.com/xia-chu/ZLMediaKit/wiki/%E4%B8%BA%E4%BB%80%E4%B9%88%E4%B8%8D%E5%BB%BA%E8%AE%AEQQ%E7%A7%81%E8%81%8A%E5%92%A8%E8%AF%A2%E9%97%AE%E9%A2%98%EF%BC%9F)). - 4、QQ私聊一般不接受无偿技术咨询和支持([为什么不提倡QQ私聊](https://github.com/ZLMediaKit/ZLMediaKit/wiki/%E4%B8%BA%E4%BB%80%E4%B9%88%E4%B8%8D%E5%BB%BA%E8%AE%AEQQ%E7%A7%81%E8%81%8A%E5%92%A8%E8%AF%A2%E9%97%AE%E9%A2%98%EF%BC%9F)).
## 特别感谢 ## 特别感谢
...@@ -278,4 +285,4 @@ bash build_docker_images.sh ...@@ -278,4 +285,4 @@ bash build_docker_images.sh
本项目已经得到不少公司和个人开发者的认可,据作者不完全统计, 本项目已经得到不少公司和个人开发者的认可,据作者不完全统计,
使用本项目的公司包括知名的互联网巨头、国内排名前列的云服务公司、多家知名的AI独角兽公司, 使用本项目的公司包括知名的互联网巨头、国内排名前列的云服务公司、多家知名的AI独角兽公司,
以及一系列中小型公司。使用者可以通过在 [issue](https://github.com/xia-chu/ZLMediaKit/issues/511) 上粘贴公司的大名和相关项目介绍为本项目背书,感谢支持! 以及一系列中小型公司。使用者可以通过在 [issue](https://github.com/ZLMediaKit/ZLMediaKit/issues/511) 上粘贴公司的大名和相关项目介绍为本项目背书,感谢支持!
...@@ -187,7 +187,6 @@ API_EXPORT int API_CALL mk_media_init_video(mk_media ctx, int codec_id, int widt ...@@ -187,7 +187,6 @@ API_EXPORT int API_CALL mk_media_init_video(mk_media ctx, int codec_id, int widt
info.iWidth = width; info.iWidth = width;
info.iHeight = height; info.iHeight = height;
info.iBitRate = bit_rate; info.iBitRate = bit_rate;
(*obj)->getChannel()->initVideo(info);
return (*obj)->getChannel()->initVideo(info); return (*obj)->getChannel()->initVideo(info);
} }
......
...@@ -10,12 +10,13 @@ ...@@ -10,12 +10,13 @@
#include "mk_track.h" #include "mk_track.h"
#include "Extension/Track.h" #include "Extension/Track.h"
#include "Extension/Factory.h"
using namespace std; using namespace std;
using namespace toolkit; using namespace toolkit;
using namespace mediakit; using namespace mediakit;
class VideoTrackForC : public VideoTrack { class VideoTrackForC : public VideoTrack, public std::enable_shared_from_this<VideoTrackForC> {
public: public:
VideoTrackForC(int codec_id, codec_args *args) { VideoTrackForC(int codec_id, codec_args *args) {
_codec_id = (CodecId) codec_id; _codec_id = (CodecId) codec_id;
...@@ -49,7 +50,8 @@ public: ...@@ -49,7 +50,8 @@ public:
} }
Track::Ptr clone() override { Track::Ptr clone() override {
return std::make_shared<std::remove_reference<decltype(*this)>::type>(*this); auto track_in = std::shared_ptr<Track>(shared_from_this());
return Factory::getTrackByAbstractTrack(track_in);
} }
Sdp::Ptr getSdp() override { Sdp::Ptr getSdp() override {
...@@ -61,7 +63,7 @@ private: ...@@ -61,7 +63,7 @@ private:
codec_args _args; codec_args _args;
}; };
class AudioTrackForC : public AudioTrackImp { class AudioTrackForC : public AudioTrackImp, public std::enable_shared_from_this<AudioTrackForC> {
public: public:
~AudioTrackForC() override = default; ~AudioTrackForC() override = default;
...@@ -69,7 +71,8 @@ public: ...@@ -69,7 +71,8 @@ public:
AudioTrackImp((CodecId) codec_id, args->audio.sample_rate, args->audio.channels, 16) {} AudioTrackImp((CodecId) codec_id, args->audio.sample_rate, args->audio.channels, 16) {}
Track::Ptr clone() override { Track::Ptr clone() override {
return std::make_shared<std::remove_reference<decltype(*this)>::type>(*this); auto track_in = std::shared_ptr<Track>(shared_from_this());
return Factory::getTrackByAbstractTrack(track_in);
} }
Sdp::Ptr getSdp() override { Sdp::Ptr getSdp() override {
......
...@@ -64,8 +64,12 @@ int main(int argc, char *argv[]) { ...@@ -64,8 +64,12 @@ int main(int argc, char *argv[]) {
mk_media media = mk_media_create("__defaultVhost__", "live", "test", 0, 0, 0); mk_media media = mk_media_create("__defaultVhost__", "live", "test", 0, 0, 0);
//h264的codec //h264的codec
mk_media_init_video(media, 0, 0, 0, 0, 2 * 104 * 1024); //mk_media_init_video(media, 0, 0, 0, 0, 2 * 104 * 1024);
codec_args v_args={0};
mk_track v_track = mk_track_create(MKCodecH264,&v_args);
mk_media_init_track(media,v_track);
mk_media_init_complete(media); mk_media_init_complete(media);
mk_track_unref(v_track);
//创建h264分帧器 //创建h264分帧器
mk_h264_splitter splitter = mk_h264_splitter_create(on_h264_frame, media); mk_h264_splitter splitter = mk_h264_splitter_create(on_h264_frame, media);
......
find_path(JEMALLOC_INCLUDE_DIR # Tries to find Jemalloc headers and libraries.
#
# Usage of this module as follows:
#
# find_package(jemalloc)
#
# Variables used by this module, they can change the default behaviour and need
# to be set before calling find_package:
#
# JEMALLOC_ROOT_DIR Set this variable to the root installation of
# Jemalloc if the module has problems finding
# the proper installation path.
#
# Variables defined by this module:
#
# JEMALLOC_FOUND System has Jemalloc libs/headers
# JEMALLOC_LIBRARIES The Jemalloc libraries
# JEMALLOC_INCLUDE_DIR The location of Jemalloc headers
if (ENABLE_JEMALLOC_STATIC)
find_path(JEMALLOC_INCLUDE_DIR
NAMES jemalloc.h
HINTS ${JEMALLOC_ROOT_DIR}/include/jemalloc
NO_DEFAULT_PATH)
find_library(JEMALLOC_LIBRARIES
NAMES jemalloc
HINTS ${JEMALLOC_ROOT_DIR}/lib
NO_DEFAULT_PATH)
else ()
find_path(JEMALLOC_INCLUDE_DIR
NAMES jemalloc/jemalloc.h NAMES jemalloc/jemalloc.h
) )
find_library(JEMALLOC_LIBRARY find_library(JEMALLOC_LIBRARIES
NAMES jemalloc NAMES jemalloc
) )
set(JEMALLOC_INCLUDE_DIRS ${JEMALLOC_INCLUDE_DIR}) endif ()
set(JEMALLOC_LIBRARIES ${JEMALLOC_LIBRARY})
include(FindPackageHandleStandardArgs) include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(JEMALLOC DEFAULT_MSG
JEMALLOC_LIBRARIES
JEMALLOC_INCLUDE_DIR)
find_package_handle_standard_args(JEMALLOC DEFAULT_MSG JEMALLOC_LIBRARY JEMALLOC_INCLUDE_DIR) mark_as_advanced(
JEMALLOC_ROOT_DIR
JEMALLOC_LIBRARIES
JEMALLOC_INCLUDE_DIR)
# Download and build Jemalloc
set(JEMALLOC_VERSION 5.2.1)
set(JEMALLOC_NAME jemalloc-${JEMALLOC_VERSION})
set(JEMALLOC_TAR_PATH ${DEP_ROOT_DIR}/${JEMALLOC_NAME}.tar.bz2)
list(APPEND jemalloc_CONFIG_ARGS --disable-initial-exec-tls)
list(APPEND jemalloc_CONFIG_ARGS --without-export)
list(APPEND jemalloc_CONFIG_ARGS --disable-stats)
list(APPEND jemalloc_CONFIG_ARGS --disable-libdl)
#list(APPEND jemalloc_CONFIG_ARGS --disable-cxx)
#list(APPEND jemalloc_CONFIG_ARGS --with-jemalloc-prefix=je_)
#list(APPEND jemalloc_CONFIG_ARGS --enable-debug)
if(NOT EXISTS ${JEMALLOC_TAR_PATH})
message(STATUS "Downloading ${JEMALLOC_NAME}...")
file(DOWNLOAD https://github.com/jemalloc/jemalloc/releases/download/${JEMALLOC_VERSION}/${JEMALLOC_NAME}.tar.bz2
${JEMALLOC_TAR_PATH})
endif()
SET( DIR_CONTAINING_JEMALLOC ${DEP_ROOT_DIR}/${JEMALLOC_NAME} )
if(NOT EXISTS ${DIR_CONTAINING_JEMALLOC})
message(STATUS "Extracting jemalloc...")
execute_process(COMMAND ${CMAKE_COMMAND} -E tar xzf ${JEMALLOC_TAR_PATH} WORKING_DIRECTORY ${DEP_ROOT_DIR})
endif()
if(NOT EXISTS ${DIR_CONTAINING_JEMALLOC}/Makefile)
message("Configuring jemalloc locally...")
# Builds with "--with-jemalloc-prefix=je_" on OSX
# SET( BASH_COMMAND_TO_RUN bash -l -c "cd ${DIR_CONTAINING_JEMALLOC} && ./configure ${jemalloc_CONFIG_ARGS}" )
#
# EXECUTE_PROCESS( COMMAND ${BASH_COMMAND_TO_RUN}
# WORKING_DIRECTORY ${DIR_CONTAINING_JEMALLOC} RESULT_VARIABLE JEMALLOC_CONFIGURE )
execute_process(COMMAND ./configure ${jemalloc_CONFIG_ARGS} WORKING_DIRECTORY ${DIR_CONTAINING_JEMALLOC} RESULT_VARIABLE JEMALLOC_CONFIGURE)
if(NOT JEMALLOC_CONFIGURE EQUAL 0)
message(FATAL_ERROR "${JEMALLOC_NAME} configure failed!")
message("${JEMALLOC_CONFIGURE}")
endif()
endif()
if(NOT EXISTS ${DIR_CONTAINING_JEMALLOC}/lib/libjemalloc.a)
message("Building jemalloc locally...")
execute_process(COMMAND make "build_lib_static" WORKING_DIRECTORY ${DIR_CONTAINING_JEMALLOC})
if(NOT EXISTS ${DIR_CONTAINING_JEMALLOC}/lib/libjemalloc.a)
message(FATAL_ERROR "${JEMALLOC_NAME} build failed!")
endif()
endif()
\ No newline at end of file
...@@ -298,6 +298,7 @@ g711a_pt=8 ...@@ -298,6 +298,7 @@ g711a_pt=8
#rtc播放推流、播放超时时间 #rtc播放推流、播放超时时间
timeoutSec=15 timeoutSec=15
#本机对rtc客户端的可见ip,作为服务器时一般为公网ip,可有多个,用','分开,当置空时,会自动获取网卡ip #本机对rtc客户端的可见ip,作为服务器时一般为公网ip,可有多个,用','分开,当置空时,会自动获取网卡ip
#同时支持环境变量,以$开头,如"$EXTERN_IP"; 请参考:https://github.com/ZLMediaKit/ZLMediaKit/pull/1786
externIP= externIP=
#rtc udp服务器监听端口号,所有rtc客户端将通过该端口传输stun/dtls/srtp/srtcp数据, #rtc udp服务器监听端口号,所有rtc客户端将通过该端口传输stun/dtls/srtp/srtcp数据,
#该端口是多线程的,同时支持客户端网络切换导致的连接迁移 #该端口是多线程的,同时支持客户端网络切换导致的连接迁移
......
...@@ -47,13 +47,13 @@ static void on_ffmpeg_log(void *ctx, int level, const char *fmt, va_list args) { ...@@ -47,13 +47,13 @@ static void on_ffmpeg_log(void *ctx, int level, const char *fmt, va_list args) {
} }
LogLevel lev; LogLevel lev;
switch (level) { switch (level) {
case AV_LOG_FATAL: lev = LError; break; case AV_LOG_FATAL:
case AV_LOG_ERROR: lev = LError; break; case AV_LOG_ERROR: lev = LError; break;
case AV_LOG_WARNING: lev = LWarn; break; case AV_LOG_WARNING: lev = LWarn; break;
case AV_LOG_INFO: lev = LInfo; break; case AV_LOG_INFO: lev = LInfo; break;
case AV_LOG_VERBOSE: lev = LDebug; break; case AV_LOG_VERBOSE:
case AV_LOG_DEBUG: lev = LDebug; break; case AV_LOG_DEBUG: lev = LDebug; break;
case AV_LOG_TRACE: lev = LTrace; break; case AV_LOG_TRACE:
default: lev = LTrace; break; default: lev = LTrace; break;
} }
LoggerWrapper::printLogV(::toolkit::getLogger(), lev, __FILE__, ctx ? av_default_item_name(ctx) : "NULL", level, fmt, args); LoggerWrapper::printLogV(::toolkit::getLogger(), lev, __FILE__, ctx ? av_default_item_name(ctx) : "NULL", level, fmt, args);
...@@ -63,7 +63,9 @@ static bool setupFFmpeg_l() { ...@@ -63,7 +63,9 @@ static bool setupFFmpeg_l() {
av_log_set_level(AV_LOG_TRACE); av_log_set_level(AV_LOG_TRACE);
av_log_set_flags(AV_LOG_PRINT_LEVEL); av_log_set_flags(AV_LOG_PRINT_LEVEL);
av_log_set_callback(on_ffmpeg_log); av_log_set_callback(on_ffmpeg_log);
#if (LIBAVCODEC_VERSION_MAJOR < 58)
avcodec_register_all(); avcodec_register_all();
#endif
return true; return true;
} }
...@@ -243,14 +245,14 @@ AVFrame *FFmpegFrame::get() const { ...@@ -243,14 +245,14 @@ AVFrame *FFmpegFrame::get() const {
void FFmpegFrame::fillPicture(AVPixelFormat target_format, int target_width, int target_height) { void FFmpegFrame::fillPicture(AVPixelFormat target_format, int target_width, int target_height) {
assert(_data == nullptr); assert(_data == nullptr);
_data = new char[avpicture_get_size(target_format, target_width, target_height)]; _data = new char[av_image_get_buffer_size(target_format, target_width, target_height, 1)];
avpicture_fill((AVPicture *) _frame.get(), (uint8_t *) _data, target_format, target_width, target_height); av_image_fill_arrays(_frame->data, _frame->linesize, (uint8_t *) _data, target_format, target_width, target_height,1);
} }
/////////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////////
template<bool decoder = true> template<bool decoder = true>
static inline AVCodec *getCodec_l(const char *name) { static inline const AVCodec *getCodec_l(const char *name) {
auto codec = decoder ? avcodec_find_decoder_by_name(name) : avcodec_find_encoder_by_name(name); auto codec = decoder ? avcodec_find_decoder_by_name(name) : avcodec_find_encoder_by_name(name);
if (codec) { if (codec) {
InfoL << (decoder ? "got decoder:" : "got encoder:") << name; InfoL << (decoder ? "got decoder:" : "got encoder:") << name;
...@@ -261,7 +263,7 @@ static inline AVCodec *getCodec_l(const char *name) { ...@@ -261,7 +263,7 @@ static inline AVCodec *getCodec_l(const char *name) {
} }
template<bool decoder = true> template<bool decoder = true>
static inline AVCodec *getCodec_l(enum AVCodecID id) { static inline const AVCodec *getCodec_l(enum AVCodecID id) {
auto codec = decoder ? avcodec_find_decoder(id) : avcodec_find_encoder(id); auto codec = decoder ? avcodec_find_decoder(id) : avcodec_find_encoder(id);
if (codec) { if (codec) {
InfoL << (decoder ? "got decoder:" : "got encoder:") << avcodec_get_name(id); InfoL << (decoder ? "got decoder:" : "got encoder:") << avcodec_get_name(id);
...@@ -277,7 +279,7 @@ public: ...@@ -277,7 +279,7 @@ public:
CodecName(enum AVCodecID id) : _id(id) {} CodecName(enum AVCodecID id) : _id(id) {}
template <bool decoder> template <bool decoder>
AVCodec *getCodec() const { const AVCodec *getCodec() const {
if (!_codec_name.empty()) { if (!_codec_name.empty()) {
return getCodec_l<decoder>(_codec_name.data()); return getCodec_l<decoder>(_codec_name.data());
} }
...@@ -290,8 +292,8 @@ private: ...@@ -290,8 +292,8 @@ private:
}; };
template <bool decoder = true> template <bool decoder = true>
static inline AVCodec *getCodec(const std::initializer_list<CodecName> &codec_list) { static inline const AVCodec *getCodec(const std::initializer_list<CodecName> &codec_list) {
AVCodec *ret = nullptr; const AVCodec *ret = nullptr;
for (int i = codec_list.size(); i >= 1; --i) { for (int i = codec_list.size(); i >= 1; --i) {
ret = codec_list.begin()[i - 1].getCodec<decoder>(); ret = codec_list.begin()[i - 1].getCodec<decoder>();
if (ret) { if (ret) {
...@@ -303,8 +305,8 @@ static inline AVCodec *getCodec(const std::initializer_list<CodecName> &codec_li ...@@ -303,8 +305,8 @@ static inline AVCodec *getCodec(const std::initializer_list<CodecName> &codec_li
FFmpegDecoder::FFmpegDecoder(const Track::Ptr &track, int thread_num) { FFmpegDecoder::FFmpegDecoder(const Track::Ptr &track, int thread_num) {
setupFFmpeg(); setupFFmpeg();
AVCodec *codec = nullptr; const AVCodec *codec = nullptr;
AVCodec *codec_default = nullptr; const AVCodec *codec_default = nullptr;
switch (track->getCodecId()) { switch (track->getCodecId()) {
case CodecH264: case CodecH264:
codec_default = getCodec({AV_CODEC_ID_H264}); codec_default = getCodec({AV_CODEC_ID_H264});
...@@ -358,7 +360,9 @@ FFmpegDecoder::FFmpegDecoder(const Track::Ptr &track, int thread_num) { ...@@ -358,7 +360,9 @@ FFmpegDecoder::FFmpegDecoder(const Track::Ptr &track, int thread_num) {
} }
//保存AVFrame的引用 //保存AVFrame的引用
#ifdef FF_API_OLD_ENCDEC
_context->refcounted_frames = 1; _context->refcounted_frames = 1;
#endif
_context->flags |= AV_CODEC_FLAG_LOW_DELAY; _context->flags |= AV_CODEC_FLAG_LOW_DELAY;
_context->flags2 |= AV_CODEC_FLAG2_FAST; _context->flags2 |= AV_CODEC_FLAG2_FAST;
if (track->getTrackType() == TrackVideo) { if (track->getTrackType() == TrackVideo) {
...@@ -539,7 +543,7 @@ FFmpegSwr::~FFmpegSwr() { ...@@ -539,7 +543,7 @@ FFmpegSwr::~FFmpegSwr() {
FFmpegFrame::Ptr FFmpegSwr::inputFrame(const FFmpegFrame::Ptr &frame) { FFmpegFrame::Ptr FFmpegSwr::inputFrame(const FFmpegFrame::Ptr &frame) {
if (frame->get()->format == _target_format && if (frame->get()->format == _target_format &&
frame->get()->channels == _target_channels && frame->get()->channels == _target_channels &&
frame->get()->channel_layout == _target_channel_layout && frame->get()->channel_layout == (uint64_t)_target_channel_layout &&
frame->get()->sample_rate == _target_samplerate) { frame->get()->sample_rate == _target_samplerate) {
//不转格式 //不转格式
return frame; return frame;
...@@ -596,7 +600,8 @@ int FFmpegSws::inputFrame(const FFmpegFrame::Ptr &frame, uint8_t *data) { ...@@ -596,7 +600,8 @@ int FFmpegSws::inputFrame(const FFmpegFrame::Ptr &frame, uint8_t *data) {
} }
AVFrame dst; AVFrame dst;
memset(&dst, 0, sizeof(dst)); memset(&dst, 0, sizeof(dst));
avpicture_fill((AVPicture *) &dst, data, _target_format, _target_width, _target_height); av_image_fill_arrays(dst.data, dst.linesize, data, _target_format, _target_width, _target_height,1);
if (!_ctx) { if (!_ctx) {
_ctx = sws_getContext(frame->get()->width, frame->get()->height, (enum AVPixelFormat) frame->get()->format, _ctx = sws_getContext(frame->get()->width, frame->get()->height, (enum AVPixelFormat) frame->get()->format,
_target_width, _target_height, _target_format, SWS_FAST_BILINEAR, NULL, NULL, NULL); _target_width, _target_height, _target_format, SWS_FAST_BILINEAR, NULL, NULL, NULL);
......
...@@ -25,6 +25,7 @@ extern "C" { ...@@ -25,6 +25,7 @@ extern "C" {
#include "libavcodec/avcodec.h" #include "libavcodec/avcodec.h"
#include "libswresample/swresample.h" #include "libswresample/swresample.h"
#include "libavutil/audio_fifo.h" #include "libavutil/audio_fifo.h"
#include "libavutil/imgutils.h"
#ifdef __cplusplus #ifdef __cplusplus
} }
#endif #endif
......
...@@ -354,64 +354,12 @@ void MultiMediaSourceMuxer::resetTracks() { ...@@ -354,64 +354,12 @@ void MultiMediaSourceMuxer::resetTracks() {
} }
} }
//该类实现frame级别的时间戳覆盖
class FrameModifyStamp : public Frame{
public:
typedef std::shared_ptr<FrameModifyStamp> Ptr;
FrameModifyStamp(const Frame::Ptr &frame, Stamp &stamp){
_frame = frame;
//覆盖时间戳
stamp.revise(frame->dts(), frame->pts(), _dts, _pts, true);
}
~FrameModifyStamp() override {}
uint32_t dts() const override{
return (uint32_t)_dts;
}
uint32_t pts() const override{
return (uint32_t)_pts;
}
size_t prefixSize() const override {
return _frame->prefixSize();
}
bool keyFrame() const override {
return _frame->keyFrame();
}
bool configFrame() const override {
return _frame->configFrame();
}
bool cacheAble() const override {
return _frame->cacheAble();
}
char *data() const override {
return _frame->data();
}
size_t size() const override {
return _frame->size();
}
CodecId getCodecId() const override {
return _frame->getCodecId();
}
private:
int64_t _dts;
int64_t _pts;
Frame::Ptr _frame;
};
bool MultiMediaSourceMuxer::onTrackFrame(const Frame::Ptr &frame_in) { bool MultiMediaSourceMuxer::onTrackFrame(const Frame::Ptr &frame_in) {
GET_CONFIG(bool, modify_stamp, General::kModifyStamp); GET_CONFIG(bool, modify_stamp, General::kModifyStamp);
auto frame = frame_in; auto frame = frame_in;
if (modify_stamp) { if (modify_stamp) {
//开启了时间戳覆盖 //开启了时间戳覆盖
frame = std::make_shared<FrameModifyStamp>(frame, _stamp[frame->getTrackType()]); frame = std::make_shared<FrameStamp>(frame, _stamp[frame->getTrackType()],true);
} }
bool ret = false; bool ret = false;
......
...@@ -274,6 +274,9 @@ bool AACTrack::inputFrame(const Frame::Ptr &frame) { ...@@ -274,6 +274,9 @@ bool AACTrack::inputFrame(const Frame::Ptr &frame) {
bool ret = false; bool ret = false;
//有adts头,尝试分帧 //有adts头,尝试分帧
int64_t dts = frame->dts();
int64_t pts = frame->pts();
auto ptr = frame->data(); auto ptr = frame->data();
auto end = frame->data() + frame->size(); auto end = frame->data() + frame->size();
while (ptr < end) { while (ptr < end) {
...@@ -284,7 +287,7 @@ bool AACTrack::inputFrame(const Frame::Ptr &frame) { ...@@ -284,7 +287,7 @@ bool AACTrack::inputFrame(const Frame::Ptr &frame) {
if (frame_len == frame->size()) { if (frame_len == frame->size()) {
return inputFrame_l(frame); return inputFrame_l(frame);
} }
auto sub_frame = std::make_shared<FrameInternal<FrameFromPtr> >(frame, (char *) ptr, frame_len, ADTS_HEADER_LEN); auto sub_frame = std::make_shared<FrameTSInternal<FrameFromPtr> >(frame, (char *) ptr, frame_len, ADTS_HEADER_LEN,dts,pts);
ptr += frame_len; ptr += frame_len;
if (ptr > end) { if (ptr > end) {
WarnL << "invalid aac length in adts header: " << frame_len WarnL << "invalid aac length in adts header: " << frame_len
...@@ -295,6 +298,8 @@ bool AACTrack::inputFrame(const Frame::Ptr &frame) { ...@@ -295,6 +298,8 @@ bool AACTrack::inputFrame(const Frame::Ptr &frame) {
if (inputFrame_l(sub_frame)) { if (inputFrame_l(sub_frame)) {
ret = true; ret = true;
} }
dts += 1024*1000/getAudioSampleRate();
pts += 1024*1000/getAudioSampleRate();
} }
return ret; return ret;
} }
......
...@@ -76,7 +76,7 @@ bool CommonRtpEncoder::inputFrame(const Frame::Ptr &frame){ ...@@ -76,7 +76,7 @@ bool CommonRtpEncoder::inputFrame(const Frame::Ptr &frame){
auto len = frame->size() - frame->prefixSize(); auto len = frame->size() - frame->prefixSize();
auto remain_size = len; auto remain_size = len;
auto max_size = getMaxSize(); auto max_size = getMaxSize();
bool is_key = frame->keyFrame();
bool mark = false; bool mark = false;
while (remain_size > 0) { while (remain_size > 0) {
size_t rtp_size; size_t rtp_size;
...@@ -86,9 +86,10 @@ bool CommonRtpEncoder::inputFrame(const Frame::Ptr &frame){ ...@@ -86,9 +86,10 @@ bool CommonRtpEncoder::inputFrame(const Frame::Ptr &frame){
rtp_size = remain_size; rtp_size = remain_size;
mark = true; mark = true;
} }
RtpCodec::inputRtp(makeRtp(getTrackType(), ptr, rtp_size, mark, stamp), false); RtpCodec::inputRtp(makeRtp(getTrackType(), ptr, rtp_size, mark, stamp), is_key);
ptr += rtp_size; ptr += rtp_size;
remain_size -= rtp_size; remain_size -= rtp_size;
is_key = false;
} }
return len > 0; return len > 0;
} }
\ No newline at end of file
...@@ -95,6 +95,32 @@ Track::Ptr Factory::getTrackBySdp(const SdpTrack::Ptr &track) { ...@@ -95,6 +95,32 @@ Track::Ptr Factory::getTrackBySdp(const SdpTrack::Ptr &track) {
} }
} }
Track::Ptr Factory::getTrackByAbstractTrack(const Track::Ptr& track) {
auto codec = track->getCodecId();
switch (codec) {
case CodecG711A:
case CodecG711U: {
auto audio_track = dynamic_pointer_cast<AudioTrackImp>(track);
return std::make_shared<G711Track>(codec, audio_track->getAudioSampleRate(), audio_track->getAudioChannel(), 16);
}
case CodecL16: {
auto audio_track = dynamic_pointer_cast<AudioTrackImp>(track);
return std::make_shared<L16Track>(audio_track->getAudioSampleRate(), audio_track->getAudioChannel());
}
case CodecAAC : return std::make_shared<AACTrack>();
case CodecOpus : return std::make_shared<OpusTrack>();
case CodecH265 : return std::make_shared<H265Track>();
case CodecH264 : return std::make_shared<H264Track>();
default: {
//其他codec不支持
WarnL << "暂不支持该该编码类型创建Track:" << track->getCodecName();
return nullptr;
}
}
}
RtpCodec::Ptr Factory::getRtpEncoderBySdp(const Sdp::Ptr &sdp) { RtpCodec::Ptr Factory::getRtpEncoderBySdp(const Sdp::Ptr &sdp) {
GET_CONFIG(uint32_t,audio_mtu,Rtp::kAudioMtuSize); GET_CONFIG(uint32_t,audio_mtu,Rtp::kAudioMtuSize);
GET_CONFIG(uint32_t,video_mtu,Rtp::kVideoMtuSize); GET_CONFIG(uint32_t,video_mtu,Rtp::kVideoMtuSize);
......
...@@ -28,6 +28,11 @@ public: ...@@ -28,6 +28,11 @@ public:
static Track::Ptr getTrackBySdp(const SdpTrack::Ptr &track); static Track::Ptr getTrackBySdp(const SdpTrack::Ptr &track);
/** /**
* 根据c api 抽象的Track生成具体Track对象
*/
static Track::Ptr getTrackByAbstractTrack(const Track::Ptr& track);
/**
* 根据sdp生成rtp编码器 * 根据sdp生成rtp编码器
* @param sdp sdp对象 * @param sdp sdp对象
*/ */
......
...@@ -15,6 +15,7 @@ ...@@ -15,6 +15,7 @@
#include <functional> #include <functional>
#include "Util/RingBuffer.h" #include "Util/RingBuffer.h"
#include "Network/Socket.h" #include "Network/Socket.h"
#include "Common/Stamp.h"
namespace mediakit{ namespace mediakit{
...@@ -263,6 +264,27 @@ private: ...@@ -263,6 +264,27 @@ private:
}; };
/** /**
* 一个Frame类中可以有多个帧(AAC),时间戳会变化
* ZLMediaKit会先把这种复合帧split成单个帧然后再处理
* 一个复合帧可以通过无内存拷贝的方式切割成多个子Frame
* 提供该类的目的是切割复合帧时防止内存拷贝,提高性能
*/
template<typename Parent>
class FrameTSInternal : public Parent{
public:
typedef std::shared_ptr<FrameTSInternal> Ptr;
FrameTSInternal(const Frame::Ptr &parent_frame, char *ptr, size_t size, size_t prefix_size,uint32_t dts,uint32_t pts)
: Parent(ptr, size, dts, pts, prefix_size) {
_parent_frame = parent_frame;
}
bool cacheAble() const override {
return _parent_frame->cacheAble();
}
private:
Frame::Ptr _parent_frame;
};
/**
* 写帧接口的抽象接口类 * 写帧接口的抽象接口类
*/ */
class FrameWriterInterface { class FrameWriterInterface {
...@@ -369,17 +391,18 @@ class FrameFromPtr : public Frame{ ...@@ -369,17 +391,18 @@ class FrameFromPtr : public Frame{
public: public:
typedef std::shared_ptr<FrameFromPtr> Ptr; typedef std::shared_ptr<FrameFromPtr> Ptr;
FrameFromPtr(CodecId codec_id, char *ptr, size_t size, uint32_t dts, uint32_t pts = 0, size_t prefix_size = 0) FrameFromPtr(CodecId codec_id, char *ptr, size_t size, uint32_t dts, uint32_t pts = 0, size_t prefix_size = 0,bool is_key = false )
: FrameFromPtr(ptr, size, dts, pts, prefix_size) { : FrameFromPtr(ptr, size, dts, pts, prefix_size,is_key) {
_codec_id = codec_id; _codec_id = codec_id;
} }
FrameFromPtr(char *ptr, size_t size, uint32_t dts, uint32_t pts = 0, size_t prefix_size = 0){ FrameFromPtr(char *ptr, size_t size, uint32_t dts, uint32_t pts = 0, size_t prefix_size = 0,bool is_key = false){
_ptr = ptr; _ptr = ptr;
_size = size; _size = size;
_dts = dts; _dts = dts;
_pts = pts; _pts = pts;
_prefix_size = prefix_size; _prefix_size = prefix_size;
_is_key = is_key;
} }
char *data() const override{ char *data() const override{
...@@ -418,7 +441,7 @@ public: ...@@ -418,7 +441,7 @@ public:
} }
bool keyFrame() const override { bool keyFrame() const override {
return false; return _is_key;
} }
bool configFrame() const override{ bool configFrame() const override{
...@@ -435,6 +458,7 @@ protected: ...@@ -435,6 +458,7 @@ protected:
size_t _size; size_t _size;
size_t _prefix_size; size_t _prefix_size;
CodecId _codec_id = CodecInvalid; CodecId _codec_id = CodecInvalid;
bool _is_key;
}; };
/** /**
...@@ -498,6 +522,58 @@ private: ...@@ -498,6 +522,58 @@ private:
FrameImp::Ptr _buffer; FrameImp::Ptr _buffer;
}; };
//该类实现frame级别的时间戳覆盖
class FrameStamp : public Frame{
public:
typedef std::shared_ptr<FrameStamp> Ptr;
FrameStamp(const Frame::Ptr &frame, Stamp &stamp,bool modify_stamp){
_frame = frame;
//覆盖时间戳
stamp.revise(frame->dts(), frame->pts(), _dts, _pts, modify_stamp);
}
~FrameStamp() override {}
uint32_t dts() const override{
return (uint32_t)_dts;
}
uint32_t pts() const override{
return (uint32_t)_pts;
}
size_t prefixSize() const override {
return _frame->prefixSize();
}
bool keyFrame() const override {
return _frame->keyFrame();
}
bool configFrame() const override {
return _frame->configFrame();
}
bool cacheAble() const override {
return _frame->cacheAble();
}
char *data() const override {
return _frame->data();
}
size_t size() const override {
return _frame->size();
}
CodecId getCodecId() const override {
return _frame->getCodecId();
}
private:
int64_t _dts;
int64_t _pts;
Frame::Ptr _frame;
};
/** /**
* 该对象可以把Buffer对象转换成可缓存的Frame对象 * 该对象可以把Buffer对象转换成可缓存的Frame对象
*/ */
......
...@@ -14,7 +14,8 @@ using namespace toolkit; ...@@ -14,7 +14,8 @@ using namespace toolkit;
namespace mediakit { namespace mediakit {
void RtcpContext::onRtp(uint16_t /*seq*/, uint32_t stamp, uint64_t ntp_stamp_ms, uint32_t /*sample_rate*/, size_t bytes) { void RtcpContext::onRtp(
uint16_t /*seq*/, uint32_t stamp, uint64_t ntp_stamp_ms, uint32_t /*sample_rate*/, size_t bytes) {
++_packets; ++_packets;
_bytes += bytes; _bytes += bytes;
_last_rtp_stamp = stamp; _last_rtp_stamp = stamp;
...@@ -45,12 +46,16 @@ Buffer::Ptr RtcpContext::createRtcpRR(uint32_t rtcp_ssrc, uint32_t rtp_ssrc) { ...@@ -45,12 +46,16 @@ Buffer::Ptr RtcpContext::createRtcpRR(uint32_t rtcp_ssrc, uint32_t rtp_ssrc) {
throw std::runtime_error("没有实现, rtp发送者尝试发送rr包"); throw std::runtime_error("没有实现, rtp发送者尝试发送rr包");
} }
Buffer::Ptr RtcpContext::createRtcpXRDLRR(uint32_t rtcp_ssrc, uint32_t rtp_ssrc) {
throw std::runtime_error("没有实现, rtp发送者尝试发送xr dlrr包");
}
//////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////////
void RtcpContextForSend::onRtcp(RtcpHeader *rtcp) { void RtcpContextForSend::onRtcp(RtcpHeader *rtcp) {
switch ((RtcpType) rtcp->pt) { switch ((RtcpType)rtcp->pt) {
case RtcpType::RTCP_RR: { case RtcpType::RTCP_RR: {
auto rtcp_rr = (RtcpRR *) rtcp; auto rtcp_rr = (RtcpRR *)rtcp;
for (auto item : rtcp_rr->getItemList()) { for (auto item : rtcp_rr->getItemList()) {
if (!item->last_sr_stamp) { if (!item->last_sr_stamp) {
continue; continue;
...@@ -59,20 +64,34 @@ void RtcpContextForSend::onRtcp(RtcpHeader *rtcp) { ...@@ -59,20 +64,34 @@ void RtcpContextForSend::onRtcp(RtcpHeader *rtcp) {
if (it == _sender_report_ntp.end()) { if (it == _sender_report_ntp.end()) {
continue; continue;
} }
//发送sr到收到rr之间的时间戳增量 // 发送sr到收到rr之间的时间戳增量
auto ms_inc = getCurrentMillisecond() - it->second; auto ms_inc = getCurrentMillisecond() - it->second;
//rtp接收端收到sr包后,回复rr包的延时,已转换为毫秒 // rtp接收端收到sr包后,回复rr包的延时,已转换为毫秒
auto delay_ms = (uint64_t) item->delay_since_last_sr * 1000 / 65536; auto delay_ms = (uint64_t)item->delay_since_last_sr * 1000 / 65536;
auto rtt = (int) (ms_inc - delay_ms); auto rtt = (int)(ms_inc - delay_ms);
if (rtt >= 0) { if (rtt >= 0) {
//rtt不可能小于0 // rtt不可能小于0
_rtt[item->ssrc] = rtt; _rtt[item->ssrc] = rtt;
//InfoL << "ssrc:" << item->ssrc << ",rtt:" << rtt; // InfoL << "ssrc:" << item->ssrc << ",rtt:" << rtt;
}
}
break;
} }
case RtcpType::RTCP_XR: {
auto rtcp_xr = (RtcpXRRRTR *)rtcp;
if (rtcp_xr->bt == 4) {
_xr_xrrtr_recv_last_rr[rtcp_xr->ssrc]
= ((rtcp_xr->ntpmsw & 0xFFFF) << 16) | ((rtcp_xr->ntplsw >> 16) & 0xFFFF);
_xr_rrtr_recv_sys_stamp[rtcp_xr->ssrc] = getCurrentMillisecond();
} else if (rtcp_xr->bt == 5) {
TraceL << "for sender not recive dlrr";
} else {
TraceL << "not support xr bt " << rtcp_xr->bt;
} }
break; break;
} }
default: break; default:
break;
} }
} }
...@@ -89,55 +108,85 @@ Buffer::Ptr RtcpContextForSend::createRtcpSR(uint32_t rtcp_ssrc) { ...@@ -89,55 +108,85 @@ Buffer::Ptr RtcpContextForSend::createRtcpSR(uint32_t rtcp_ssrc) {
rtcp->setNtpStamp(_last_ntp_stamp_ms); rtcp->setNtpStamp(_last_ntp_stamp_ms);
rtcp->rtpts = htonl(_last_rtp_stamp); rtcp->rtpts = htonl(_last_rtp_stamp);
rtcp->ssrc = htonl(rtcp_ssrc); rtcp->ssrc = htonl(rtcp_ssrc);
rtcp->packet_count = htonl((uint32_t) _packets); rtcp->packet_count = htonl((uint32_t)_packets);
rtcp->octet_count = htonl((uint32_t) _bytes); rtcp->octet_count = htonl((uint32_t)_bytes);
//记录上次发送的sender report信息,用于后续统计rtt // 记录上次发送的sender report信息,用于后续统计rtt
auto last_sr_lsr = ((ntohl(rtcp->ntpmsw) & 0xFFFF) << 16) | ((ntohl(rtcp->ntplsw) >> 16) & 0xFFFF); auto last_sr_lsr = ((ntohl(rtcp->ntpmsw) & 0xFFFF) << 16) | ((ntohl(rtcp->ntplsw) >> 16) & 0xFFFF);
_sender_report_ntp[last_sr_lsr] = getCurrentMillisecond(); _sender_report_ntp[last_sr_lsr] = getCurrentMillisecond();
if (_sender_report_ntp.size() >= 5) { if (_sender_report_ntp.size() >= 5) {
//删除最早的sr rtcp // 删除最早的sr rtcp
_sender_report_ntp.erase(_sender_report_ntp.begin()); _sender_report_ntp.erase(_sender_report_ntp.begin());
} }
return RtcpHeader::toBuffer(std::move(rtcp)); return RtcpHeader::toBuffer(std::move(rtcp));
} }
toolkit::Buffer::Ptr RtcpContextForSend::createRtcpXRDLRR(uint32_t rtcp_ssrc, uint32_t rtp_ssrc) {
auto rtcp = RtcpXRDLRR::create(1);
rtcp->bt = 5;
rtcp->reserved = 0;
rtcp->block_length = htons(3);
rtcp->ssrc = htonl(rtcp_ssrc);
rtcp->items.ssrc = htonl(rtp_ssrc);
if (_xr_xrrtr_recv_last_rr.find(rtp_ssrc) == _xr_xrrtr_recv_last_rr.end()) {
rtcp->items.lrr = 0;
WarnL;
} else {
rtcp->items.lrr = htonl(_xr_xrrtr_recv_last_rr[rtp_ssrc]);
}
if (_xr_rrtr_recv_sys_stamp.find(rtp_ssrc) == _xr_rrtr_recv_sys_stamp.end()) {
rtcp->items.dlrr = 0;
WarnL;
} else {
// now - Last SR time,单位毫秒
auto delay = getCurrentMillisecond() - _xr_rrtr_recv_sys_stamp[rtp_ssrc];
// in units of 1/65536 seconds
auto dlsr = (uint32_t)(delay / 1000.0f * 65536);
rtcp->items.dlrr = htonl(dlsr);
}
return RtcpHeader::toBuffer(std::move(rtcp));
}
//////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////////
void RtcpContextForRecv::onRtp(uint16_t seq, uint32_t stamp, uint64_t ntp_stamp_ms, uint32_t sample_rate, size_t bytes) { void RtcpContextForRecv::onRtp(
uint16_t seq, uint32_t stamp, uint64_t ntp_stamp_ms, uint32_t sample_rate, size_t bytes) {
{ {
//接收者才做复杂的统计运算 // 接收者才做复杂的统计运算
auto sys_stamp = getCurrentMillisecond(); auto sys_stamp = getCurrentMillisecond();
if (_last_rtp_sys_stamp) { if (_last_rtp_sys_stamp) {
//计算时间戳抖动值 // 计算时间戳抖动值
double diff = double((int64_t(sys_stamp) - int64_t(_last_rtp_sys_stamp)) * (sample_rate / double(1000.0)) double diff = double(
(int64_t(sys_stamp) - int64_t(_last_rtp_sys_stamp)) * (sample_rate / double(1000.0))
- (int64_t(stamp) - int64_t(_last_rtp_stamp))); - (int64_t(stamp) - int64_t(_last_rtp_stamp)));
if (diff < 0) { if (diff < 0) {
diff = -diff; diff = -diff;
} }
//抖动单位为采样次数 // 抖动单位为采样次数
_jitter += (diff - _jitter) / 16.0; _jitter += (diff - _jitter) / 16.0;
} else { } else {
_jitter = 0; _jitter = 0;
} }
if (_last_rtp_seq > 0xFF00 && seq < 0xFF && (!_seq_cycles || _packets - _last_cycle_packets > 0x1FFF)) { if (_last_rtp_seq > 0xFF00 && seq < 0xFF && (!_seq_cycles || _packets - _last_cycle_packets > 0x1FFF)) {
//上次seq大于0xFF00且本次seq小于0xFF, // 上次seq大于0xFF00且本次seq小于0xFF,
//且未发生回环或者距离上次回环间隔超过0x1FFF个包,则认为回环 // 且未发生回环或者距离上次回环间隔超过0x1FFF个包,则认为回环
++_seq_cycles; ++_seq_cycles;
_last_cycle_packets = _packets; _last_cycle_packets = _packets;
_seq_max = seq; _seq_max = seq;
} else if (seq > _seq_max) { } else if (seq > _seq_max) {
//本次回环前最大seq // 本次回环前最大seq
_seq_max = seq; _seq_max = seq;
} }
if (!_seq_base) { if (!_seq_base) {
//记录第一个rtp的seq // 记录第一个rtp的seq
_seq_base = seq; _seq_base = seq;
} else if (!_seq_cycles && seq < _seq_base) { } else if (!_seq_cycles && seq < _seq_base) {
//未发生回环,那么取最新的seq为基准seq // 未发生回环,那么取最新的seq为基准seq
_seq_base = seq; _seq_base = seq;
} }
...@@ -148,9 +197,9 @@ void RtcpContextForRecv::onRtp(uint16_t seq, uint32_t stamp, uint64_t ntp_stamp_ ...@@ -148,9 +197,9 @@ void RtcpContextForRecv::onRtp(uint16_t seq, uint32_t stamp, uint64_t ntp_stamp_
} }
void RtcpContextForRecv::onRtcp(RtcpHeader *rtcp) { void RtcpContextForRecv::onRtcp(RtcpHeader *rtcp) {
switch ((RtcpType) rtcp->pt) { switch ((RtcpType)rtcp->pt) {
case RtcpType::RTCP_SR: { case RtcpType::RTCP_SR: {
auto rtcp_sr = (RtcpSR *) rtcp; auto rtcp_sr = (RtcpSR *)rtcp;
/** /**
last SR timestamp (LSR): 32 bits last SR timestamp (LSR): 32 bits
The middle 32 bits out of 64 in the NTP timestamp (as explained in The middle 32 bits out of 64 in the NTP timestamp (as explained in
...@@ -162,7 +211,8 @@ void RtcpContextForRecv::onRtcp(RtcpHeader *rtcp) { ...@@ -162,7 +211,8 @@ void RtcpContextForRecv::onRtcp(RtcpHeader *rtcp) {
_last_sr_ntp_sys = getCurrentMillisecond(); _last_sr_ntp_sys = getCurrentMillisecond();
break; break;
} }
default: break; default:
break;
} }
} }
...@@ -192,7 +242,7 @@ Buffer::Ptr RtcpContextForRecv::createRtcpRR(uint32_t rtcp_ssrc, uint32_t rtp_ss ...@@ -192,7 +242,7 @@ Buffer::Ptr RtcpContextForRecv::createRtcpRR(uint32_t rtcp_ssrc, uint32_t rtp_ss
auto rtcp = RtcpRR::create(1); auto rtcp = RtcpRR::create(1);
rtcp->ssrc = htonl(rtcp_ssrc); rtcp->ssrc = htonl(rtcp_ssrc);
ReportItem *item = (ReportItem *) &rtcp->items; ReportItem *item = (ReportItem *)&rtcp->items;
item->ssrc = htonl(rtp_ssrc); item->ssrc = htonl(rtp_ssrc);
uint8_t fraction = 0; uint8_t fraction = 0;
...@@ -211,9 +261,9 @@ Buffer::Ptr RtcpContextForRecv::createRtcpRR(uint32_t rtcp_ssrc, uint32_t rtp_ss ...@@ -211,9 +261,9 @@ Buffer::Ptr RtcpContextForRecv::createRtcpRR(uint32_t rtcp_ssrc, uint32_t rtp_ss
// now - Last SR time,单位毫秒 // now - Last SR time,单位毫秒
auto delay = getCurrentMillisecond() - _last_sr_ntp_sys; auto delay = getCurrentMillisecond() - _last_sr_ntp_sys;
// in units of 1/65536 seconds // in units of 1/65536 seconds
auto dlsr = (uint32_t) (delay / 1000.0f * 65536); auto dlsr = (uint32_t)(delay / 1000.0f * 65536);
item->delay_since_last_sr = htonl(_last_sr_lsr ? dlsr : 0); item->delay_since_last_sr = htonl(_last_sr_lsr ? dlsr : 0);
return RtcpHeader::toBuffer(rtcp); return RtcpHeader::toBuffer(rtcp);
} }
}//namespace mediakit } // namespace mediakit
\ No newline at end of file \ No newline at end of file
...@@ -11,9 +11,9 @@ ...@@ -11,9 +11,9 @@
#ifndef ZLMEDIAKIT_RTCPCONTEXT_H #ifndef ZLMEDIAKIT_RTCPCONTEXT_H
#define ZLMEDIAKIT_RTCPCONTEXT_H #define ZLMEDIAKIT_RTCPCONTEXT_H
#include <stdint.h>
#include <stddef.h>
#include "Rtcp.h" #include "Rtcp.h"
#include <stddef.h>
#include <stdint.h>
namespace mediakit { namespace mediakit {
...@@ -56,6 +56,13 @@ public: ...@@ -56,6 +56,13 @@ public:
virtual toolkit::Buffer::Ptr createRtcpSR(uint32_t rtcp_ssrc); virtual toolkit::Buffer::Ptr createRtcpSR(uint32_t rtcp_ssrc);
/** /**
* @brief 创建xr的dlrr包,用于接收者估算rtt
*
* @return toolkit::Buffer::Ptr
*/
virtual toolkit::Buffer::Ptr createRtcpXRDLRR(uint32_t rtcp_ssrc, uint32_t rtp_ssrc);
/**
* 创建RR rtcp包 * 创建RR rtcp包
* @param rtcp_ssrc rtcp的ssrc * @param rtcp_ssrc rtcp的ssrc
* @param rtp_ssrc rtp的ssrc * @param rtp_ssrc rtp的ssrc
...@@ -74,11 +81,11 @@ public: ...@@ -74,11 +81,11 @@ public:
virtual size_t geLostInterval(); virtual size_t geLostInterval();
protected: protected:
//收到或发送的rtp的字节数 // 收到或发送的rtp的字节数
size_t _bytes = 0; size_t _bytes = 0;
//收到或发送的rtp的个数 // 收到或发送的rtp的个数
size_t _packets = 0; size_t _packets = 0;
//上次的rtp时间戳,毫秒 // 上次的rtp时间戳,毫秒
uint32_t _last_rtp_stamp = 0; uint32_t _last_rtp_stamp = 0;
uint64_t _last_ntp_stamp_ms = 0; uint64_t _last_ntp_stamp_ms = 0;
}; };
...@@ -86,8 +93,11 @@ protected: ...@@ -86,8 +93,11 @@ protected:
class RtcpContextForSend : public RtcpContext { class RtcpContextForSend : public RtcpContext {
public: public:
toolkit::Buffer::Ptr createRtcpSR(uint32_t rtcp_ssrc) override; toolkit::Buffer::Ptr createRtcpSR(uint32_t rtcp_ssrc) override;
void onRtcp(RtcpHeader *rtcp) override; void onRtcp(RtcpHeader *rtcp) override;
toolkit::Buffer::Ptr createRtcpXRDLRR(uint32_t rtcp_ssrc, uint32_t rtp_ssrc) override;
/** /**
* 获取rtt * 获取rtt
* @param ssrc rtp ssrc * @param ssrc rtp ssrc
...@@ -96,8 +106,11 @@ public: ...@@ -96,8 +106,11 @@ public:
uint32_t getRtt(uint32_t ssrc) const; uint32_t getRtt(uint32_t ssrc) const;
private: private:
std::map<uint32_t/*ssrc*/, uint32_t/*rtt*/> _rtt; std::map<uint32_t /*ssrc*/, uint32_t /*rtt*/> _rtt;
std::map<uint32_t/*last_sr_lsr*/, uint64_t/*ntp stamp*/> _sender_report_ntp; std::map<uint32_t /*last_sr_lsr*/, uint64_t /*ntp stamp*/> _sender_report_ntp;
std::map<uint32_t /*ssrc*/, uint64_t /*xr rrtr sys stamp*/> _xr_rrtr_recv_sys_stamp;
std::map<uint32_t /*ssrc*/, uint32_t /*last rr */> _xr_xrrtr_recv_last_rr;
}; };
class RtcpContextForRecv : public RtcpContext { class RtcpContextForRecv : public RtcpContext {
...@@ -111,29 +124,29 @@ public: ...@@ -111,29 +124,29 @@ public:
void onRtcp(RtcpHeader *rtcp) override; void onRtcp(RtcpHeader *rtcp) override;
private: private:
//时间戳抖动值 // 时间戳抖动值
double _jitter = 0; double _jitter = 0;
//第一个seq的值 // 第一个seq的值
uint16_t _seq_base = 0; uint16_t _seq_base = 0;
//rtp最大seq // rtp最大seq
uint16_t _seq_max = 0; uint16_t _seq_max = 0;
//rtp回环次数 // rtp回环次数
uint16_t _seq_cycles = 0; uint16_t _seq_cycles = 0;
//上次回环发生时,记录的rtp包数 // 上次回环发生时,记录的rtp包数
size_t _last_cycle_packets = 0; size_t _last_cycle_packets = 0;
//上次的seq // 上次的seq
uint16_t _last_rtp_seq = 0; uint16_t _last_rtp_seq = 0;
//上次的rtp的系统时间戳(毫秒)用于统计抖动 // 上次的rtp的系统时间戳(毫秒)用于统计抖动
uint64_t _last_rtp_sys_stamp = 0; uint64_t _last_rtp_sys_stamp = 0;
//上次统计的丢包总数 // 上次统计的丢包总数
size_t _last_lost = 0; size_t _last_lost = 0;
//上次统计应收rtp包总数 // 上次统计应收rtp包总数
size_t _last_expected = 0; size_t _last_expected = 0;
//上次收到sr包时计算出的Last SR timestamp // 上次收到sr包时计算出的Last SR timestamp
uint32_t _last_sr_lsr = 0; uint32_t _last_sr_lsr = 0;
//上次收到sr时的系统时间戳,单位毫秒 // 上次收到sr时的系统时间戳,单位毫秒
uint64_t _last_sr_ntp_sys = 0; uint64_t _last_sr_ntp_sys = 0;
}; };
}//namespace mediakit } // namespace mediakit
#endif //ZLMEDIAKIT_RTCPCONTEXT_H #endif // ZLMEDIAKIT_RTCPCONTEXT_H
...@@ -11,21 +11,21 @@ ...@@ -11,21 +11,21 @@
#ifndef ZLMEDIAKIT_RTCPFCI_H #ifndef ZLMEDIAKIT_RTCPFCI_H
#define ZLMEDIAKIT_RTCPFCI_H #define ZLMEDIAKIT_RTCPFCI_H
#include "Rtcp.h"
#include "Common/config.h" #include "Common/config.h"
#include "Rtcp.h"
namespace mediakit { namespace mediakit {
/////////////////////////////////////////// PSFB //////////////////////////////////////////////////// /////////////////////////////////////////// PSFB ////////////////////////////////////////////////////
//PSFB fmt = 2 // PSFB fmt = 2
//https://tools.ietf.org/html/rfc4585#section-6.3.2.2 // https://tools.ietf.org/html/rfc4585#section-6.3.2.2
// 0 1 2 3 // 0 1 2 3
// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// | First | Number | PictureID | // | First | Number | PictureID |
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
//First: 13 bits // First: 13 bits
// The macroblock (MB) address of the first lost macroblock. The MB // The macroblock (MB) address of the first lost macroblock. The MB
// numbering is done such that the macroblock in the upper left // numbering is done such that the macroblock in the upper left
// corner of the picture is considered macroblock number 1 and the // corner of the picture is considered macroblock number 1 and the
...@@ -101,8 +101,8 @@ public: ...@@ -101,8 +101,8 @@ public:
} PACKED; } PACKED;
#endif #endif
//PSFB fmt = 4 // PSFB fmt = 4
//https://tools.ietf.org/html/rfc5104#section-4.3.1.1 // https://tools.ietf.org/html/rfc5104#section-4.3.1.1
// 0 1 2 3 // 0 1 2 3
// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
...@@ -188,8 +188,8 @@ private: ...@@ -188,8 +188,8 @@ private:
#endif #endif
//PSFB fmt = 15 // PSFB fmt = 15
//https://tools.ietf.org/html/draft-alvestrand-rmcat-remb-03 // https://tools.ietf.org/html/draft-alvestrand-rmcat-remb-03
// 0 1 2 3 // 0 1 2 3
// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
...@@ -227,9 +227,9 @@ public: ...@@ -227,9 +227,9 @@ public:
std::vector<uint32_t> getSSRC(); std::vector<uint32_t> getSSRC();
private: private:
//Unique identifier 'R' 'E' 'M' 'B' // Unique identifier 'R' 'E' 'M' 'B'
char magic[4]; char magic[4];
//Num SSRC (8 bits)/BR Exp (6 bits)/ BR Mantissa (18 bits) // Num SSRC (8 bits)/BR Exp (6 bits)/ BR Mantissa (18 bits)
uint8_t bitrate[4]; uint8_t bitrate[4];
// SSRC feedback (32 bits) Consists of one or more SSRC entries which // SSRC feedback (32 bits) Consists of one or more SSRC entries which
// this feedback message applies to. // this feedback message applies to.
...@@ -238,8 +238,8 @@ private: ...@@ -238,8 +238,8 @@ private:
/////////////////////////////////////////// RTPFB //////////////////////////////////////////////////// /////////////////////////////////////////// RTPFB ////////////////////////////////////////////////////
//RTPFB fmt = 1 // RTPFB fmt = 1
//https://tools.ietf.org/html/rfc4585#section-6.2.1 // https://tools.ietf.org/html/rfc4585#section-6.2.1
// 0 1 2 3 // 0 1 2 3
// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
...@@ -255,7 +255,7 @@ public: ...@@ -255,7 +255,7 @@ public:
void check(size_t size); void check(size_t size);
uint16_t getPid() const; uint16_t getPid() const;
uint16_t getBlp() const; uint16_t getBlp() const;
//返回丢包列表,总长度17,第一个包必丢 // 返回丢包列表,总长度17,第一个包必丢
// TODO: replace std::bitset // TODO: replace std::bitset
std::vector<bool> getBitArray() const; std::vector<bool> getBitArray() const;
std::string dumpString() const; std::string dumpString() const;
...@@ -318,20 +318,20 @@ public: ...@@ -318,20 +318,20 @@ public:
} PACKED; } PACKED;
#endif #endif
enum class SymbolStatus : uint8_t{ enum class SymbolStatus : uint8_t {
//Packet not received // Packet not received
not_received = 0, not_received = 0,
//Packet received, small delta (所谓small detal是指能用一个字节表示的数值) // Packet received, small delta (所谓small detal是指能用一个字节表示的数值)
small_delta = 1, small_delta = 1,
// Packet received, large ornegative delta (large即是能用两个字节表示的数值) // Packet received, large ornegative delta (large即是能用两个字节表示的数值)
large_delta = 2, large_delta = 2,
//Reserved // Reserved
reserved = 3 reserved = 3
}; };
//RTPFB fmt = 15 // RTPFB fmt = 15
//https://tools.ietf.org/html/draft-holmer-rmcat-transport-wide-cc-extensions-01#section-3.1 // https://tools.ietf.org/html/draft-holmer-rmcat-transport-wide-cc-extensions-01#section-3.1
//https://zhuanlan.zhihu.com/p/206656654 // https://zhuanlan.zhihu.com/p/206656654
// 0 1 2 3 // 0 1 2 3
// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
...@@ -351,14 +351,15 @@ enum class SymbolStatus : uint8_t{ ...@@ -351,14 +351,15 @@ enum class SymbolStatus : uint8_t{
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// | recv delta | recv delta | zero padding | // | recv delta | recv delta | zero padding |
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
class FCI_TWCC{ class FCI_TWCC {
public: public:
static size_t constexpr kSize = 8; static size_t constexpr kSize = 8;
using TwccPacketStatus = std::map<uint16_t/*rtp ext seq*/, std::pair<SymbolStatus, int16_t/*recv delta,单位为250us*/> >; using TwccPacketStatus
= std::map<uint16_t /*rtp ext seq*/, std::pair<SymbolStatus, int16_t /*recv delta,单位为250us*/>>;
void check(size_t size); void check(size_t size);
std::string dumpString(size_t total_size) const; std::string dumpString(size_t total_size) const;
uint16_t getBaseSeq() const; uint16_t getBaseSeq() const;
//单位64ms // 单位64ms
uint32_t getReferenceTime() const; uint32_t getReferenceTime() const;
uint16_t getPacketCount() const; uint16_t getPacketCount() const;
TwccPacketStatus getPacketChunkList(size_t total_size) const; TwccPacketStatus getPacketChunkList(size_t total_size) const;
...@@ -366,15 +367,15 @@ public: ...@@ -366,15 +367,15 @@ public:
static std::string create(uint32_t ref_time, uint8_t fb_pkt_count, TwccPacketStatus &status); static std::string create(uint32_t ref_time, uint8_t fb_pkt_count, TwccPacketStatus &status);
private: private:
//base sequence number,基础序号,本次反馈的第一个包的序号;也就是RTP扩展头的序列号 // base sequence number,基础序号,本次反馈的第一个包的序号;也就是RTP扩展头的序列号
uint16_t base_seq; uint16_t base_seq;
//packet status count, 包个数,本次反馈包含多少个包的状态;从基础序号开始算 // packet status count, 包个数,本次反馈包含多少个包的状态;从基础序号开始算
uint16_t pkt_status_count; uint16_t pkt_status_count;
//reference time,基准时间,绝对时间;计算该包中每个媒体包的到达时间都要基于这个基准时间计算 // reference time,基准时间,绝对时间;计算该包中每个媒体包的到达时间都要基于这个基准时间计算
uint8_t ref_time[3]; uint8_t ref_time[3];
//feedback packet count,反馈包号,本包是第几个transport-cc包,每次加1 | // feedback packet count,反馈包号,本包是第几个transport-cc包,每次加1 |
uint8_t fb_pkt_count; uint8_t fb_pkt_count;
} PACKED; } PACKED;
} //namespace mediakit } // namespace mediakit
#endif //ZLMEDIAKIT_RTCPFCI_H #endif // ZLMEDIAKIT_RTCPFCI_H
...@@ -99,7 +99,11 @@ private: ...@@ -99,7 +99,11 @@ private:
if (_rtmp_src) { if (_rtmp_src) {
_rtmp_src->setMetaData(val); _rtmp_src->setMetaData(val);
} }
if(_demuxer){
return;
}
_demuxer = std::make_shared<RtmpDemuxer>(); _demuxer = std::make_shared<RtmpDemuxer>();
//TraceL<<" _wait_track_ready "<<_wait_track_ready;
_demuxer->setTrackListener(this, _wait_track_ready); _demuxer->setTrackListener(this, _wait_track_ready);
_demuxer->loadMetaData(val); _demuxer->loadMetaData(val);
} }
......
...@@ -28,7 +28,7 @@ RtmpSession::~RtmpSession() { ...@@ -28,7 +28,7 @@ RtmpSession::~RtmpSession() {
} }
void RtmpSession::onError(const SockException& err) { void RtmpSession::onError(const SockException& err) {
bool is_player = !_push_src; bool is_player = !_push_src_ownership;
uint64_t duration = _ticker.createdTime() / 1000; uint64_t duration = _ticker.createdTime() / 1000;
WarnP(this) << (is_player ? "RTMP播放器(" : "RTMP推流器(") WarnP(this) << (is_player ? "RTMP播放器(" : "RTMP推流器(")
<< _media_info._vhost << "/" << _media_info._vhost << "/"
...@@ -219,10 +219,11 @@ void RtmpSession::onCmd_publish(AMFDecoder &dec) { ...@@ -219,10 +219,11 @@ void RtmpSession::onCmd_publish(AMFDecoder &dec) {
} }
void RtmpSession::onCmd_deleteStream(AMFDecoder &dec) { void RtmpSession::onCmd_deleteStream(AMFDecoder &dec) {
_push_src = nullptr;
//此时回复可能触发broken pipe事件,从而直接触发onError回调;所以需要先把_push_src置空,防止触发断流续推功能
sendStatus({ "level", "status", sendStatus({ "level", "status",
"code", "NetStream.Unpublish.Success", "code", "NetStream.Unpublish.Success",
"description", "Stop publishing." }); "description", "Stop publishing." });
//_push_src = nullptr;
throw std::runtime_error(StrPrinter << "Stop publishing" << endl); throw std::runtime_error(StrPrinter << "Stop publishing" << endl);
} }
......
...@@ -24,6 +24,15 @@ ...@@ -24,6 +24,15 @@
using namespace toolkit; using namespace toolkit;
namespace mediakit { namespace mediakit {
void Decoder::setOnDecode(Decoder::onDecode cb) {
_on_decode = std::move(cb);
}
void Decoder::setOnStream(Decoder::onStream cb) {
_on_stream = std::move(cb);
}
static Decoder::Ptr createDecoder_l(DecoderImp::Type type) { static Decoder::Ptr createDecoder_l(DecoderImp::Type type) {
switch (type){ switch (type){
case DecoderImp::decoder_ps: case DecoderImp::decoder_ps:
......
...@@ -25,12 +25,16 @@ public: ...@@ -25,12 +25,16 @@ public:
typedef std::function<void(int stream, int codecid, const void *extra, size_t bytes, int finish)> onStream; typedef std::function<void(int stream, int codecid, const void *extra, size_t bytes, int finish)> onStream;
virtual ssize_t input(const uint8_t *data, size_t bytes) = 0; virtual ssize_t input(const uint8_t *data, size_t bytes) = 0;
virtual void setOnDecode(onDecode cb) = 0; void setOnDecode(onDecode cb);
virtual void setOnStream(onStream cb) = 0; void setOnStream(onStream cb);
protected: protected:
Decoder() = default; Decoder() = default;
virtual ~Decoder() = default; virtual ~Decoder() = default;
protected:
onDecode _on_decode;
onStream _on_stream;
}; };
class DecoderImp{ class DecoderImp{
......
...@@ -53,14 +53,6 @@ ssize_t PSDecoder::input(const uint8_t *data, size_t bytes) { ...@@ -53,14 +53,6 @@ ssize_t PSDecoder::input(const uint8_t *data, size_t bytes) {
return bytes; return bytes;
} }
void PSDecoder::setOnDecode(Decoder::onDecode cb) {
_on_decode = std::move(cb);
}
void PSDecoder::setOnStream(Decoder::onStream cb) {
_on_stream = std::move(cb);
}
const char *PSDecoder::onSearchPacketTail(const char *data, size_t len) { const char *PSDecoder::onSearchPacketTail(const char *data, size_t len) {
try { try {
auto ret = ps_demuxer_input(static_cast<struct ps_demuxer_t *>(_ps_demuxer), reinterpret_cast<const uint8_t *>(data), len); auto ret = ps_demuxer_input(static_cast<struct ps_demuxer_t *>(_ps_demuxer), reinterpret_cast<const uint8_t *>(data), len);
......
...@@ -25,8 +25,6 @@ public: ...@@ -25,8 +25,6 @@ public:
~PSDecoder(); ~PSDecoder();
ssize_t input(const uint8_t* data, size_t bytes) override; ssize_t input(const uint8_t* data, size_t bytes) override;
void setOnDecode(onDecode cb) override;
void setOnStream(onStream cb) override;
// HttpRequestSplitter interface // HttpRequestSplitter interface
private: private:
...@@ -36,8 +34,6 @@ private: ...@@ -36,8 +34,6 @@ private:
private: private:
void *_ps_demuxer = nullptr; void *_ps_demuxer = nullptr;
onDecode _on_decode;
onStream _on_stream;
}; };
}//namespace mediakit }//namespace mediakit
......
...@@ -23,7 +23,7 @@ PSEncoderImp::PSEncoderImp(uint32_t ssrc, uint8_t payload_type) : MpegMuxer(true ...@@ -23,7 +23,7 @@ PSEncoderImp::PSEncoderImp(uint32_t ssrc, uint8_t payload_type) : MpegMuxer(true
_rtp_encoder = std::make_shared<CommonRtpEncoder>(CodecInvalid, ssrc, video_mtu, 90000, payload_type, 0); _rtp_encoder = std::make_shared<CommonRtpEncoder>(CodecInvalid, ssrc, video_mtu, 90000, payload_type, 0);
_rtp_encoder->setRtpRing(std::make_shared<RtpRing::RingType>()); _rtp_encoder->setRtpRing(std::make_shared<RtpRing::RingType>());
_rtp_encoder->getRtpRing()->setDelegate(std::make_shared<RingDelegateHelper>([this](RtpPacket::Ptr rtp, bool is_key){ _rtp_encoder->getRtpRing()->setDelegate(std::make_shared<RingDelegateHelper>([this](RtpPacket::Ptr rtp, bool is_key){
onRTP(std::move(rtp)); onRTP(std::move(rtp),is_key);
})); }));
InfoL << this << " " << printSSRC(_rtp_encoder->getSsrc()); InfoL << this << " " << printSSRC(_rtp_encoder->getSsrc());
} }
...@@ -36,7 +36,7 @@ void PSEncoderImp::onWrite(std::shared_ptr<Buffer> buffer, uint32_t stamp, bool ...@@ -36,7 +36,7 @@ void PSEncoderImp::onWrite(std::shared_ptr<Buffer> buffer, uint32_t stamp, bool
if (!buffer) { if (!buffer) {
return; return;
} }
_rtp_encoder->inputFrame(std::make_shared<FrameFromPtr>(buffer->data(), buffer->size(), stamp, stamp)); _rtp_encoder->inputFrame(std::make_shared<FrameFromPtr>(buffer->data(), buffer->size(), stamp, stamp,0,key_pos));
} }
}//namespace mediakit }//namespace mediakit
......
...@@ -27,7 +27,7 @@ public: ...@@ -27,7 +27,7 @@ public:
protected: protected:
//rtp打包后回调 //rtp打包后回调
virtual void onRTP(toolkit::Buffer::Ptr rtp) = 0; virtual void onRTP(toolkit::Buffer::Ptr rtp,bool is_key = false) = 0;
protected: protected:
void onWrite(std::shared_ptr<toolkit::Buffer> buffer, uint32_t stamp, bool key_pos) override; void onWrite(std::shared_ptr<toolkit::Buffer> buffer, uint32_t stamp, bool key_pos) override;
......
...@@ -35,7 +35,7 @@ bool RawEncoderImp::addTrack(const Track::Ptr &track){ ...@@ -35,7 +35,7 @@ bool RawEncoderImp::addTrack(const Track::Ptr &track){
_rtp_encoder = createRtpEncoder(track); _rtp_encoder = createRtpEncoder(track);
_rtp_encoder->setRtpRing(std::make_shared<RtpRing::RingType>()); _rtp_encoder->setRtpRing(std::make_shared<RtpRing::RingType>());
_rtp_encoder->getRtpRing()->setDelegate(std::make_shared<RingDelegateHelper>([this](RtpPacket::Ptr rtp, bool is_key){ _rtp_encoder->getRtpRing()->setDelegate(std::make_shared<RingDelegateHelper>([this](RtpPacket::Ptr rtp, bool is_key){
onRTP(std::move(rtp)); onRTP(std::move(rtp),true);
})); }));
return true; return true;
} }
...@@ -44,7 +44,7 @@ bool RawEncoderImp::addTrack(const Track::Ptr &track){ ...@@ -44,7 +44,7 @@ bool RawEncoderImp::addTrack(const Track::Ptr &track){
_rtp_encoder = createRtpEncoder(track); _rtp_encoder = createRtpEncoder(track);
_rtp_encoder->setRtpRing(std::make_shared<RtpRing::RingType>()); _rtp_encoder->setRtpRing(std::make_shared<RtpRing::RingType>());
_rtp_encoder->getRtpRing()->setDelegate(std::make_shared<RingDelegateHelper>([this](RtpPacket::Ptr rtp, bool is_key){ _rtp_encoder->getRtpRing()->setDelegate(std::make_shared<RingDelegateHelper>([this](RtpPacket::Ptr rtp, bool is_key){
onRTP(std::move(rtp)); onRTP(std::move(rtp),is_key);
})); }));
return true; return true;
} }
......
...@@ -43,10 +43,9 @@ public: ...@@ -43,10 +43,9 @@ public:
protected: protected:
//rtp打包后回调 //rtp打包后回调
virtual void onRTP(toolkit::Buffer::Ptr rtp) = 0; virtual void onRTP(toolkit::Buffer::Ptr rtp, bool is_key = false) = 0;
private: private:
RtpCodec::Ptr createRtpEncoder(const Track::Ptr &track); RtpCodec::Ptr createRtpEncoder(const Track::Ptr &track);
uint32_t _ssrc; uint32_t _ssrc;
uint8_t _payload_type; uint8_t _payload_type;
bool _sendAudio; bool _sendAudio;
......
...@@ -19,25 +19,37 @@ namespace mediakit{ ...@@ -19,25 +19,37 @@ namespace mediakit{
RtpCache::RtpCache(onFlushed cb) { RtpCache::RtpCache(onFlushed cb) {
_cb = std::move(cb); _cb = std::move(cb);
} }
bool RtpCache::firstKeyReady(bool in) {
if(_first_key){
return _first_key;
}
_first_key = in;
return _first_key;
}
void RtpCache::onFlush(std::shared_ptr<List<Buffer::Ptr> > rtp_list, bool) { void RtpCache::onFlush(std::shared_ptr<List<Buffer::Ptr> > rtp_list, bool) {
_cb(std::move(rtp_list)); _cb(std::move(rtp_list));
} }
void RtpCache::input(uint64_t stamp, Buffer::Ptr buffer) { void RtpCache::input(uint64_t stamp, Buffer::Ptr buffer,bool is_key ) {
inputPacket(stamp, true, std::move(buffer), false); inputPacket(stamp, true, std::move(buffer), is_key);
} }
void RtpCachePS::onRTP(Buffer::Ptr buffer) { void RtpCachePS::onRTP(Buffer::Ptr buffer,bool is_key) {
if(!firstKeyReady(is_key)){
return;
}
auto rtp = std::static_pointer_cast<RtpPacket>(buffer); auto rtp = std::static_pointer_cast<RtpPacket>(buffer);
auto stamp = rtp->getStampMS(); auto stamp = rtp->getStampMS();
input(stamp, std::move(buffer)); input(stamp, std::move(buffer),is_key);
} }
void RtpCacheRaw::onRTP(Buffer::Ptr buffer) { void RtpCacheRaw::onRTP(Buffer::Ptr buffer,bool is_key) {
if(!firstKeyReady(is_key)){
return;
}
auto rtp = std::static_pointer_cast<RtpPacket>(buffer); auto rtp = std::static_pointer_cast<RtpPacket>(buffer);
auto stamp = rtp->getStampMS(); auto stamp = rtp->getStampMS();
input(stamp, std::move(buffer)); input(stamp, std::move(buffer),is_key);
} }
}//namespace mediakit }//namespace mediakit
......
...@@ -30,13 +30,15 @@ protected: ...@@ -30,13 +30,15 @@ protected:
* 输入rtp(目的是为了合并写) * 输入rtp(目的是为了合并写)
* @param buffer rtp数据 * @param buffer rtp数据
*/ */
void input(uint64_t stamp, toolkit::Buffer::Ptr buffer); void input(uint64_t stamp, toolkit::Buffer::Ptr buffer,bool is_key = false);
bool firstKeyReady(bool in);
protected: protected:
void onFlush(std::shared_ptr<toolkit::List<toolkit::Buffer::Ptr> > rtp_list, bool) override; void onFlush(std::shared_ptr<toolkit::List<toolkit::Buffer::Ptr> > rtp_list, bool) override;
private: private:
onFlushed _cb; onFlushed _cb;
bool _first_key = false;
}; };
class RtpCachePS : public RtpCache, public PSEncoderImp{ class RtpCachePS : public RtpCache, public PSEncoderImp{
...@@ -45,7 +47,7 @@ public: ...@@ -45,7 +47,7 @@ public:
~RtpCachePS() override = default; ~RtpCachePS() override = default;
protected: protected:
void onRTP(toolkit::Buffer::Ptr rtp) override; void onRTP(toolkit::Buffer::Ptr rtp,bool is_key = false) override;
}; };
...@@ -55,7 +57,7 @@ public: ...@@ -55,7 +57,7 @@ public:
~RtpCacheRaw() override = default; ~RtpCacheRaw() override = default;
protected: protected:
void onRTP(toolkit::Buffer::Ptr rtp) override; void onRTP(toolkit::Buffer::Ptr rtp,bool is_key = false) override;
}; };
}//namespace mediakit }//namespace mediakit
......
...@@ -101,14 +101,6 @@ ssize_t TSDecoder::input(const uint8_t *data, size_t bytes) { ...@@ -101,14 +101,6 @@ ssize_t TSDecoder::input(const uint8_t *data, size_t bytes) {
return bytes; return bytes;
} }
void TSDecoder::setOnDecode(Decoder::onDecode cb) {
_on_decode = std::move(cb);
}
void TSDecoder::setOnStream(Decoder::onStream cb) {
_on_stream = std::move(cb);
}
#endif//defined(ENABLE_HLS) #endif//defined(ENABLE_HLS)
}//namespace mediakit }//namespace mediakit
...@@ -45,14 +45,10 @@ public: ...@@ -45,14 +45,10 @@ public:
TSDecoder(); TSDecoder();
~TSDecoder(); ~TSDecoder();
ssize_t input(const uint8_t* data, size_t bytes) override ; ssize_t input(const uint8_t* data, size_t bytes) override ;
void setOnDecode(onDecode cb) override;
void setOnStream(onStream cb) override;
private: private:
TSSegment _ts_segment; TSSegment _ts_segment;
struct ts_demuxer_t* _demuxer_ctx = nullptr; struct ts_demuxer_t* _demuxer_ctx = nullptr;
onDecode _on_decode;
onStream _on_stream;
}; };
#endif//defined(ENABLE_HLS) #endif//defined(ENABLE_HLS)
......
...@@ -60,7 +60,7 @@ RtspSession::~RtspSession() { ...@@ -60,7 +60,7 @@ RtspSession::~RtspSession() {
} }
void RtspSession::onError(const SockException &err) { void RtspSession::onError(const SockException &err) {
bool is_player = !_push_src; bool is_player = !_push_src_ownership;
uint64_t duration = _alive_ticker.createdTime() / 1000; uint64_t duration = _alive_ticker.createdTime() / 1000;
WarnP(this) << (is_player ? "RTSP播放器(" : "RTSP推流器(") WarnP(this) << (is_player ? "RTSP播放器(" : "RTSP推流器(")
<< _media_info._vhost << "/" << _media_info._vhost << "/"
...@@ -867,8 +867,9 @@ void RtspSession::handleReq_Pause(const Parser &parser) { ...@@ -867,8 +867,9 @@ void RtspSession::handleReq_Pause(const Parser &parser) {
} }
void RtspSession::handleReq_Teardown(const Parser &parser) { void RtspSession::handleReq_Teardown(const Parser &parser) {
sendRtspResponse("200 OK");
_push_src = nullptr; _push_src = nullptr;
//此时回复可能触发broken pipe事件,从而直接触发onError回调;所以需要先把_push_src置空,防止触发断流续推功能
sendRtspResponse("200 OK");
throw SockException(Err_shutdown,"recv teardown request"); throw SockException(Err_shutdown,"recv teardown request");
} }
......
#include "Util/MD5.h" #include <atomic>
#include "Util/MD5.h"
#include "Util/logger.h" #include "Util/logger.h"
#include <atomic>
#include "Packet.hpp" #include "Packet.hpp"
...@@ -225,7 +225,35 @@ size_t ControlPacket::size() const { ...@@ -225,7 +225,35 @@ size_t ControlPacket::size() const {
uint32_t ControlPacket::getSocketID(uint8_t *buf, size_t len) { uint32_t ControlPacket::getSocketID(uint8_t *buf, size_t len) {
return loadUint32(buf + 12); return loadUint32(buf + 12);
} }
std::string HandshakePacket::dump(){
_StrPrinter printer;
printer <<"flag:"<< (int)f<<"\r\n";
printer <<"control_type:"<< (int)control_type<<"\r\n";
printer <<"sub_type:"<< (int)sub_type<<"\r\n";
printer <<"type_specific_info:"<< (int)type_specific_info[0]<<":"<<(int)type_specific_info[1]<<":"<<(int)type_specific_info[2]<<":"<<(int)type_specific_info[3]<<"\r\n";
printer <<"timestamp:"<< timestamp<<"\r\n";
printer <<"dst_socket_id:"<< dst_socket_id<<"\r\n";
printer <<"version:"<< version<<"\r\n";
printer <<"encryption_field:"<< encryption_field<<"\r\n";
printer <<"extension_field:"<< extension_field<<"\r\n";
printer <<"initial_packet_sequence_number:"<< initial_packet_sequence_number<<"\r\n";
printer <<"mtu:"<< mtu<<"\r\n";
printer <<"max_flow_window_size:"<< max_flow_window_size<<"\r\n";
printer <<"handshake_type:"<< handshake_type<<"\r\n";
printer <<"srt_socket_id:"<< srt_socket_id<<"\r\n";
printer <<"syn_cookie:"<< syn_cookie<<"\r\n";
printer <<"peer_ip_addr:";
for(size_t i=0;i<sizeof(peer_ip_addr);++i){
printer<<(int)peer_ip_addr[i]<<":";
}
printer<<"\r\n";
for(size_t i=0;i<ext_list.size();++i){
printer<<ext_list[i]->dump()<<"\r\n";
}
return std::move(printer);
}
bool HandshakePacket::loadFromData(uint8_t *buf, size_t len) { bool HandshakePacket::loadFromData(uint8_t *buf, size_t len) {
if (HEADER_SIZE + HS_CONTENT_MIN_SIZE > len) { if (HEADER_SIZE + HS_CONTENT_MIN_SIZE > len) {
ErrorL << "size too smalle " << encryption_field; ErrorL << "size too smalle " << encryption_field;
...@@ -435,15 +463,11 @@ uint32_t HandshakePacket::generateSynCookie( ...@@ -435,15 +463,11 @@ uint32_t HandshakePacket::generateSynCookie(
while (true) { while (true) {
// SYN cookie // SYN cookie
char clienthost[NI_MAXHOST];
char clientport[NI_MAXSERV];
getnameinfo(
(struct sockaddr *)addr, sizeof(struct sockaddr_storage), clienthost, sizeof(clienthost), clientport,
sizeof(clientport), NI_NUMERICHOST | NI_NUMERICSERV);
int64_t timestamp = (DurationCountMicroseconds(SteadyClock::now() - ts) / 60000000) + distractor.load() int64_t timestamp = (DurationCountMicroseconds(SteadyClock::now() - ts) / 60000000) + distractor.load()
+ correction; // secret changes every one minute + correction; // secret changes every one minute
std::stringstream cookiestr; std::stringstream cookiestr;
cookiestr << clienthost << ":" << clientport << ":" << timestamp; cookiestr << SockUtil::inet_ntoa((struct sockaddr *)addr) << ":" << SockUtil::inet_port((struct sockaddr *)addr)
<< ":" << timestamp;
union { union {
unsigned char cookie[16]; unsigned char cookie[16];
uint32_t cookie_val; uint32_t cookie_val;
......
...@@ -118,9 +118,9 @@ public: ...@@ -118,9 +118,9 @@ public:
USERDEFINEDTYPE = 0x7FFF USERDEFINEDTYPE = 0x7FFF
}; };
uint32_t sub_type : 16; uint16_t sub_type;
uint32_t control_type : 15; uint16_t control_type;
uint32_t f : 1; uint8_t f;
uint8_t type_specific_info[4]; uint8_t type_specific_info[4];
uint32_t timestamp; uint32_t timestamp;
uint32_t dst_socket_id; uint32_t dst_socket_id;
...@@ -189,7 +189,7 @@ public: ...@@ -189,7 +189,7 @@ public:
static uint32_t getSynCookie(uint8_t *buf, size_t len); static uint32_t getSynCookie(uint8_t *buf, size_t len);
static uint32_t static uint32_t
generateSynCookie(struct sockaddr_storage *addr, TimePoint ts, uint32_t current_cookie = 0, int correction = 0); generateSynCookie(struct sockaddr_storage *addr, TimePoint ts, uint32_t current_cookie = 0, int correction = 0);
std::string dump();
void assignPeerIP(struct sockaddr_storage *addr); void assignPeerIP(struct sockaddr_storage *addr);
///////ControlPacket override/////// ///////ControlPacket override///////
bool loadFromData(uint8_t *buf, size_t len) override; bool loadFromData(uint8_t *buf, size_t len) override;
......
...@@ -86,6 +86,7 @@ void SrtTransport::inputSockData(uint8_t *buf, int len, struct sockaddr_storage ...@@ -86,6 +86,7 @@ void SrtTransport::inputSockData(uint8_t *buf, int len, struct sockaddr_storage
handleDataPacket(buf, len, addr); handleDataPacket(buf, len, addr);
} else { } else {
WarnL<<"DataPacket switch to other transport: "<<socketId;
switchToOtherTransport(buf, len, socketId, addr); switchToOtherTransport(buf, len, socketId, addr);
} }
} else { } else {
...@@ -94,6 +95,7 @@ void SrtTransport::inputSockData(uint8_t *buf, int len, struct sockaddr_storage ...@@ -94,6 +95,7 @@ void SrtTransport::inputSockData(uint8_t *buf, int len, struct sockaddr_storage
uint16_t type = ControlPacket::getControlType(buf, len); uint16_t type = ControlPacket::getControlType(buf, len);
if (type != ControlPacket::HANDSHAKE && socketId != _socket_id && _socket_id != 0) { if (type != ControlPacket::HANDSHAKE && socketId != _socket_id && _socket_id != 0) {
// socket id not same // socket id not same
WarnL<<"ControlPacket: "<< (int)type <<" switch to other transport: "<<socketId;
switchToOtherTransport(buf, len, socketId, addr); switchToOtherTransport(buf, len, socketId, addr);
return; return;
} }
...@@ -168,7 +170,7 @@ void SrtTransport::handleHandshakeConclusion(HandshakePacket &pkt, struct sockad ...@@ -168,7 +170,7 @@ void SrtTransport::handleHandshakeConclusion(HandshakePacket &pkt, struct sockad
if (delay <= 120) { if (delay <= 120) {
delay = 120; delay = 120;
} }
for (auto ext : pkt.ext_list) { for (auto& ext : pkt.ext_list) {
// TraceL << getIdentifier() << " ext " << ext->dump(); // TraceL << getIdentifier() << " ext " << ext->dump();
if (!req) { if (!req) {
req = std::dynamic_pointer_cast<HSExtMessage>(ext); req = std::dynamic_pointer_cast<HSExtMessage>(ext);
...@@ -228,7 +230,10 @@ void SrtTransport::handleHandshakeConclusion(HandshakePacket &pkt, struct sockad ...@@ -228,7 +230,10 @@ void SrtTransport::handleHandshakeConclusion(HandshakePacket &pkt, struct sockad
void SrtTransport::handleHandshake(uint8_t *buf, int len, struct sockaddr_storage *addr) { void SrtTransport::handleHandshake(uint8_t *buf, int len, struct sockaddr_storage *addr) {
HandshakePacket pkt; HandshakePacket pkt;
assert(pkt.loadFromData(buf, len)); if(!pkt.loadFromData(buf, len)){
WarnL<<"is not vaild HandshakePacket";
return;
}
if (pkt.handshake_type == HandshakePacket::HS_TYPE_INDUCTION) { if (pkt.handshake_type == HandshakePacket::HS_TYPE_INDUCTION) {
handleHandshakeInduction(pkt, addr); handleHandshakeInduction(pkt, addr);
...@@ -236,6 +241,7 @@ void SrtTransport::handleHandshake(uint8_t *buf, int len, struct sockaddr_storag ...@@ -236,6 +241,7 @@ void SrtTransport::handleHandshake(uint8_t *buf, int len, struct sockaddr_storag
handleHandshakeConclusion(pkt, addr); handleHandshakeConclusion(pkt, addr);
} else { } else {
WarnL << " not support handshake type = " << pkt.handshake_type; WarnL << " not support handshake type = " << pkt.handshake_type;
WarnL <<pkt.dump();
} }
_ack_ticker.resetTime(_now); _ack_ticker.resetTime(_now);
_nak_ticker.resetTime(_now); _nak_ticker.resetTime(_now);
...@@ -288,13 +294,13 @@ void SrtTransport::handleNAK(uint8_t *buf, int len, struct sockaddr_storage *add ...@@ -288,13 +294,13 @@ void SrtTransport::handleNAK(uint8_t *buf, int len, struct sockaddr_storage *add
bool empty = false; bool empty = false;
bool flush = false; bool flush = false;
for (auto it : pkt.lost_list) { for (auto& it : pkt.lost_list) {
if (pkt.lost_list.back() == it) { if (pkt.lost_list.back() == it) {
flush = true; flush = true;
} }
empty = true; empty = true;
auto re_list = _send_buf->findPacketBySeq(it.first, it.second - 1); auto re_list = _send_buf->findPacketBySeq(it.first, it.second - 1);
for (auto pkt : re_list) { for (auto& pkt : re_list) {
pkt->R = 1; pkt->R = 1;
pkt->storeToHeader(); pkt->storeToHeader();
sendPacket(pkt, flush); sendPacket(pkt, flush);
...@@ -325,7 +331,7 @@ void SrtTransport::handleDropReq(uint8_t *buf, int len, struct sockaddr_storage ...@@ -325,7 +331,7 @@ void SrtTransport::handleDropReq(uint8_t *buf, int len, struct sockaddr_storage
return; return;
} }
uint32_t max_seq = 0; uint32_t max_seq = 0;
for (auto data : list) { for (auto& data : list) {
max_seq = data->packet_seq_number; max_seq = data->packet_seq_number;
if (_last_pkt_seq + 1 != data->packet_seq_number) { if (_last_pkt_seq + 1 != data->packet_seq_number) {
TraceL << "pkt lost " << _last_pkt_seq + 1 << "->" << data->packet_seq_number; TraceL << "pkt lost " << _last_pkt_seq + 1 << "->" << data->packet_seq_number;
...@@ -495,7 +501,7 @@ void SrtTransport::handleDataPacket(uint8_t *buf, int len, struct sockaddr_stora ...@@ -495,7 +501,7 @@ void SrtTransport::handleDataPacket(uint8_t *buf, int len, struct sockaddr_stora
// when no data ok send nack to sender immediately // when no data ok send nack to sender immediately
} else { } else {
uint32_t last_seq; uint32_t last_seq;
for (auto data : list) { for (auto& data : list) {
last_seq = data->packet_seq_number; last_seq = data->packet_seq_number;
if (_last_pkt_seq + 1 != data->packet_seq_number) { if (_last_pkt_seq + 1 != data->packet_seq_number) {
TraceL << "pkt lost " << _last_pkt_seq + 1 << "->" << data->packet_seq_number; TraceL << "pkt lost " << _last_pkt_seq + 1 << "->" << data->packet_seq_number;
......
...@@ -17,7 +17,7 @@ SrtTransportImp::~SrtTransportImp() { ...@@ -17,7 +17,7 @@ SrtTransportImp::~SrtTransportImp() {
GET_CONFIG(uint32_t, iFlowThreshold, General::kFlowThreshold); GET_CONFIG(uint32_t, iFlowThreshold, General::kFlowThreshold);
if (_total_bytes >= iFlowThreshold * 1024) { if (_total_bytes >= iFlowThreshold * 1024) {
NoticeCenter::Instance().emitEvent( NoticeCenter::Instance().emitEvent(
Broadcast::kBroadcastFlowReport, _media_info, _total_bytes, duration, false, Broadcast::kBroadcastFlowReport, _media_info, _total_bytes, duration, !_is_pusher,
static_cast<SockInfo &>(*this)); static_cast<SockInfo &>(*this));
} }
} }
...@@ -149,6 +149,11 @@ std::shared_ptr<SockInfo> SrtTransportImp::getOriginSock(mediakit::MediaSource & ...@@ -149,6 +149,11 @@ std::shared_ptr<SockInfo> SrtTransportImp::getOriginSock(mediakit::MediaSource &
return static_pointer_cast<SockInfo>(getSession()); return static_pointer_cast<SockInfo>(getSession());
} }
toolkit::EventPoller::Ptr SrtTransportImp::getOwnerPoller(MediaSource &sender){
auto session = getSession();
return session ? session->getPoller() : nullptr;
}
void SrtTransportImp::emitOnPublish() { void SrtTransportImp::emitOnPublish() {
std::weak_ptr<SrtTransportImp> weak_self = static_pointer_cast<SrtTransportImp>(shared_from_this()); std::weak_ptr<SrtTransportImp> weak_self = static_pointer_cast<SrtTransportImp>(shared_from_this());
Broadcast::PublishAuthInvoker invoker = [weak_self](const std::string &err, const ProtocolOption &option) { Broadcast::PublishAuthInvoker invoker = [weak_self](const std::string &err, const ProtocolOption &option) {
...@@ -282,7 +287,10 @@ std::string SrtTransportImp::getIdentifier() const { ...@@ -282,7 +287,10 @@ std::string SrtTransportImp::getIdentifier() const {
bool SrtTransportImp::inputFrame(const Frame::Ptr &frame) { bool SrtTransportImp::inputFrame(const Frame::Ptr &frame) {
if (_muxer) { if (_muxer) {
return _muxer->inputFrame(frame); //TraceL<<"before type "<<frame->getCodecName()<<" dts "<<frame->dts()<<" pts "<<frame->pts();
auto frame_tmp = std::make_shared<FrameStamp>(frame, _type_to_stamp[frame->getTrackType()],false);
//TraceL<<"after type "<<frame_tmp->getCodecName()<<" dts "<<frame_tmp->dts()<<" pts "<<frame_tmp->pts();
return _muxer->inputFrame(frame_tmp);
} }
if (_cached_func.size() > 200) { if (_cached_func.size() > 200) {
WarnL << "cached frame of track(" << frame->getCodecName() << ") is too much, now dropped"; WarnL << "cached frame of track(" << frame->getCodecName() << ") is too much, now dropped";
...@@ -290,11 +298,17 @@ bool SrtTransportImp::inputFrame(const Frame::Ptr &frame) { ...@@ -290,11 +298,17 @@ bool SrtTransportImp::inputFrame(const Frame::Ptr &frame) {
} }
auto frame_cached = Frame::getCacheAbleFrame(frame); auto frame_cached = Frame::getCacheAbleFrame(frame);
lock_guard<recursive_mutex> lck(_func_mtx); lock_guard<recursive_mutex> lck(_func_mtx);
_cached_func.emplace_back([this, frame_cached]() { _muxer->inputFrame(frame_cached); }); _cached_func.emplace_back([this, frame_cached]() {
//TraceL<<"before type "<<frame_cached->getCodecName()<<" dts "<<frame_cached->dts()<<" pts "<<frame_cached->pts();
auto frame_tmp = std::make_shared<FrameStamp>(frame_cached, _type_to_stamp[frame_cached->getTrackType()],false);
//TraceL<<"after type "<<frame_tmp->getCodecName()<<" dts "<<frame_tmp->dts()<<" pts "<<frame_tmp->pts();
_muxer->inputFrame(frame_tmp);
});
return true; return true;
} }
bool SrtTransportImp::addTrack(const Track::Ptr &track) { bool SrtTransportImp::addTrack(const Track::Ptr &track) {
_type_to_stamp.emplace(track->getTrackType(),Stamp());
if (_muxer) { if (_muxer) {
return _muxer->addTrack(track); return _muxer->addTrack(track);
} }
...@@ -311,6 +325,9 @@ void SrtTransportImp::addTrackCompleted() { ...@@ -311,6 +325,9 @@ void SrtTransportImp::addTrackCompleted() {
lock_guard<recursive_mutex> lck(_func_mtx); lock_guard<recursive_mutex> lck(_func_mtx);
_cached_func.emplace_back([this]() { _muxer->addTrackCompleted(); }); _cached_func.emplace_back([this]() { _muxer->addTrackCompleted(); });
} }
if(_type_to_stamp.size() >1){
_type_to_stamp[TrackType::TrackAudio].syncTo(_type_to_stamp[TrackType::TrackVideo]);
}
} }
void SrtTransportImp::doCachedFunc() { void SrtTransportImp::doCachedFunc() {
......
...@@ -59,6 +59,8 @@ protected: ...@@ -59,6 +59,8 @@ protected:
std::string getOriginUrl(mediakit::MediaSource &sender) const override; std::string getOriginUrl(mediakit::MediaSource &sender) const override;
// 获取媒体源客户端相关信息 // 获取媒体源客户端相关信息
std::shared_ptr<SockInfo> getOriginSock(mediakit::MediaSource &sender) const override; std::shared_ptr<SockInfo> getOriginSock(mediakit::MediaSource &sender) const override;
// get poller
toolkit::EventPoller::Ptr getOwnerPoller(MediaSource &sender) override;
///////MediaSinkInterface override/////// ///////MediaSinkInterface override///////
void resetTracks() override {}; void resetTracks() override {};
...@@ -87,6 +89,8 @@ private: ...@@ -87,6 +89,8 @@ private:
DecoderImp::Ptr _decoder; DecoderImp::Ptr _decoder;
std::recursive_mutex _func_mtx; std::recursive_mutex _func_mtx;
std::deque<std::function<void()>> _cached_func; std::deque<std::function<void()>> _cached_func;
std::unordered_map<int, Stamp> _type_to_stamp;
}; };
} // namespace SRT } // namespace SRT
......
...@@ -771,7 +771,6 @@ void RtcSession::loadFrom(const string &str) { ...@@ -771,7 +771,6 @@ void RtcSession::loadFrom(const string &str) {
session_name = sdp.getSessionName(); session_name = sdp.getSessionName();
session_info = sdp.getSessionInfo(); session_info = sdp.getSessionInfo();
connection = sdp.getConnection(); connection = sdp.getConnection();
bandwidth = sdp.getBandwidth();
time = sdp.getSessionTime(); time = sdp.getSessionTime();
msid_semantic = sdp.getItemClass<SdpAttrMsidSemantic>('a', "msid-semantic"); msid_semantic = sdp.getItemClass<SdpAttrMsidSemantic>('a', "msid-semantic");
for (auto &media : sdp.medias) { for (auto &media : sdp.medias) {
...@@ -783,6 +782,7 @@ void RtcSession::loadFrom(const string &str) { ...@@ -783,6 +782,7 @@ void RtcSession::loadFrom(const string &str) {
rtc_media.type = mline.type; rtc_media.type = mline.type;
rtc_media.port = mline.port; rtc_media.port = mline.port;
rtc_media.addr = media.getItemClass<SdpConnection>('c'); rtc_media.addr = media.getItemClass<SdpConnection>('c');
rtc_media.bandwidth = media.getItemClass<SdpBandwidth>('b');
rtc_media.ice_ufrag = media.getStringItem('a', "ice-ufrag"); rtc_media.ice_ufrag = media.getStringItem('a', "ice-ufrag");
rtc_media.ice_pwd = media.getStringItem('a', "ice-pwd"); rtc_media.ice_pwd = media.getStringItem('a', "ice-pwd");
rtc_media.role = media.getItemClass<SdpAttrSetup>('a', "setup").role; rtc_media.role = media.getItemClass<SdpAttrSetup>('a', "setup").role;
...@@ -1060,9 +1060,6 @@ RtcSessionSdp::Ptr RtcSession::toRtcSessionSdp() const{ ...@@ -1060,9 +1060,6 @@ RtcSessionSdp::Ptr RtcSession::toRtcSessionSdp() const{
if(connection.empty()){ if(connection.empty()){
sdp.addItem(std::make_shared<SdpConnection>(connection)); sdp.addItem(std::make_shared<SdpConnection>(connection));
} }
if (!bandwidth.empty()) {
sdp.addItem(std::make_shared<SdpBandwidth>(bandwidth));
}
sdp.addAttr(std::make_shared<SdpAttrGroup>(group)); sdp.addAttr(std::make_shared<SdpAttrGroup>(group));
sdp.addAttr(std::make_shared<SdpAttrMsidSemantic>(msid_semantic)); sdp.addAttr(std::make_shared<SdpAttrMsidSemantic>(msid_semantic));
for (auto &m : media) { for (auto &m : media) {
...@@ -1080,6 +1077,9 @@ RtcSessionSdp::Ptr RtcSession::toRtcSessionSdp() const{ ...@@ -1080,6 +1077,9 @@ RtcSessionSdp::Ptr RtcSession::toRtcSessionSdp() const{
} }
sdp_media.addItem(std::move(mline)); sdp_media.addItem(std::move(mline));
sdp_media.addItem(std::make_shared<SdpConnection>(m.addr)); sdp_media.addItem(std::make_shared<SdpConnection>(m.addr));
if (!m.bandwidth.empty() && m.type != TrackAudio) {
sdp_media.addItem(std::make_shared<SdpBandwidth>(m.bandwidth));
}
if (!m.rtcp_addr.empty()) { if (!m.rtcp_addr.empty()) {
sdp_media.addAttr(std::make_shared<SdpAttrRtcp>(m.rtcp_addr)); sdp_media.addAttr(std::make_shared<SdpAttrRtcp>(m.rtcp_addr));
} }
...@@ -1631,6 +1631,7 @@ RETRY: ...@@ -1631,6 +1631,7 @@ RETRY:
answer_media.proto = offer_media.proto; answer_media.proto = offer_media.proto;
answer_media.port = offer_media.port; answer_media.port = offer_media.port;
answer_media.addr = offer_media.addr; answer_media.addr = offer_media.addr;
answer_media.bandwidth = offer_media.bandwidth;
answer_media.rtcp_addr = offer_media.rtcp_addr; answer_media.rtcp_addr = offer_media.rtcp_addr;
answer_media.rtcp_mux = offer_media.rtcp_mux && configure.rtcp_mux; answer_media.rtcp_mux = offer_media.rtcp_mux && configure.rtcp_mux;
answer_media.rtcp_rsize = offer_media.rtcp_rsize && configure.rtcp_rsize; answer_media.rtcp_rsize = offer_media.rtcp_rsize && configure.rtcp_rsize;
......
...@@ -612,6 +612,7 @@ public: ...@@ -612,6 +612,7 @@ public:
std::string mid; std::string mid;
uint16_t port{0}; uint16_t port{0};
SdpConnection addr; SdpConnection addr;
SdpBandwidth bandwidth;
std::string proto; std::string proto;
RtpDirection direction{RtpDirection::invalid}; RtpDirection direction{RtpDirection::invalid};
std::vector<RtcCodecPlan> plan; std::vector<RtcCodecPlan> plan;
...@@ -666,7 +667,6 @@ public: ...@@ -666,7 +667,6 @@ public:
std::string session_info; std::string session_info;
SdpTime time; SdpTime time;
SdpConnection connection; SdpConnection connection;
SdpBandwidth bandwidth;
SdpAttrMsidSemantic msid_semantic; SdpAttrMsidSemantic msid_semantic;
std::vector<RtcMedia> media; std::vector<RtcMedia> media;
SdpAttrGroup group; SdpAttrGroup group;
......
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论