Commit a4621896 by baiyfcu Committed by GitHub

Merge pull request #17 from xiongziliang/master

update
parents 05a65d49 f881108d
ZLToolKit @ 5030af90
Subproject commit 4ede70fc435eb0a4d3a752b521170d86440b3935 Subproject commit 5030af90126ea8f01ded6744ae8abdf549d00a81
/*
* Copyright (c) 2016 The ZLMediaKit project authors. All Rights Reserved.
*
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
*
* Use of this source code is governed by MIT license that can be found in the
* LICENSE file in the root of the source tree. All contributing project authors
* may be found in the AUTHORS file in the root of the source tree.
*/
#ifndef ZLMEDIAKIT_ASSERT_H
#define ZLMEDIAKIT_ASSERT_H
#include <stdio.h>
#ifndef NDEBUG
#ifdef assert
#undef assert
#endif//assert
#ifdef __cplusplus
extern "C" {
#endif
extern void Assert_Throw(int failed, const char *exp, const char *func, const char *file, int line);
#ifdef __cplusplus
}
#endif
#define assert(exp) Assert_Throw(!(exp), #exp, __FUNCTION__, __FILE__, __LINE__);
#else
#define assert(e) ((void)0)
#endif//NDEBUG
#endif //ZLMEDIAKIT_ASSERT_H
media-server @ 576216c6
Subproject commit abc08f61bb1250b94d252cfeaea249527912dd3b Subproject commit 576216c64bf3bcdc5e787da2adb3e169bdd97118
...@@ -39,6 +39,7 @@ set(MediaServer_Root ${CMAKE_CURRENT_SOURCE_DIR}/3rdpart/media-server) ...@@ -39,6 +39,7 @@ set(MediaServer_Root ${CMAKE_CURRENT_SOURCE_DIR}/3rdpart/media-server)
#设置头文件目录 #设置头文件目录
INCLUDE_DIRECTORIES(${ToolKit_Root}) INCLUDE_DIRECTORIES(${ToolKit_Root})
INCLUDE_DIRECTORIES(${MediaKit_Root}) INCLUDE_DIRECTORIES(${MediaKit_Root})
INCLUDE_DIRECTORIES(${CMAKE_CURRENT_SOURCE_DIR}/3rdpart)
set(ENABLE_HLS true) set(ENABLE_HLS true)
set(ENABLE_OPENSSL true) set(ENABLE_OPENSSL true)
...@@ -57,6 +58,8 @@ if (OPENSSL_FOUND AND ENABLE_OPENSSL) ...@@ -57,6 +58,8 @@ if (OPENSSL_FOUND AND ENABLE_OPENSSL)
include_directories(${OPENSSL_INCLUDE_DIR}) include_directories(${OPENSSL_INCLUDE_DIR})
add_definitions(-DENABLE_OPENSSL) add_definitions(-DENABLE_OPENSSL)
list(APPEND LINK_LIB_LIST ${OPENSSL_LIBRARIES}) list(APPEND LINK_LIB_LIST ${OPENSSL_LIBRARIES})
else()
message(WARNING "openssl未找到,rtmp将不支持flash播放器,https/wss/rtsps/rtmps也将失效")
endif () endif ()
#查找mysql是否安装 #查找mysql是否安装
...@@ -104,9 +107,9 @@ if(ENABLE_HLS) ...@@ -104,9 +107,9 @@ if(ENABLE_HLS)
message(STATUS "ENABLE_HLS defined") message(STATUS "ENABLE_HLS defined")
add_definitions(-DENABLE_HLS) add_definitions(-DENABLE_HLS)
include_directories(${MediaServer_Root}/libmpeg/include)
aux_source_directory(${MediaServer_Root}/libmpeg/include src_mpeg) aux_source_directory(${MediaServer_Root}/libmpeg/include src_mpeg)
aux_source_directory(${MediaServer_Root}/libmpeg/source src_mpeg) aux_source_directory(${MediaServer_Root}/libmpeg/source src_mpeg)
include_directories(${MediaServer_Root}/libmpeg/include)
add_library(mpeg STATIC ${src_mpeg}) add_library(mpeg STATIC ${src_mpeg})
list(APPEND LINK_LIB_LIST mpeg) list(APPEND LINK_LIB_LIST mpeg)
...@@ -121,13 +124,14 @@ if(ENABLE_MP4) ...@@ -121,13 +124,14 @@ if(ENABLE_MP4)
message(STATUS "ENABLE_MP4 defined") message(STATUS "ENABLE_MP4 defined")
add_definitions(-DENABLE_MP4) add_definitions(-DENABLE_MP4)
include_directories(${MediaServer_Root}/libmov/include)
include_directories(${MediaServer_Root}/libflv/include)
aux_source_directory(${MediaServer_Root}/libmov/include src_mov) aux_source_directory(${MediaServer_Root}/libmov/include src_mov)
aux_source_directory(${MediaServer_Root}/libmov/source src_mov) aux_source_directory(${MediaServer_Root}/libmov/source src_mov)
include_directories(${MediaServer_Root}/libmov/include)
aux_source_directory(${MediaServer_Root}/libflv/include src_flv) aux_source_directory(${MediaServer_Root}/libflv/include src_flv)
aux_source_directory(${MediaServer_Root}/libflv/source src_flv) aux_source_directory(${MediaServer_Root}/libflv/source src_flv)
include_directories(${MediaServer_Root}/libflv/include)
add_library(mov STATIC ${src_mov}) add_library(mov STATIC ${src_mov})
add_library(flv STATIC ${src_flv}) add_library(flv STATIC ${src_flv})
...@@ -141,10 +145,11 @@ endif() ...@@ -141,10 +145,11 @@ endif()
#添加rtp库用于rtp转ps/ts #添加rtp库用于rtp转ps/ts
if(ENABLE_RTPPROXY AND ENABLE_HLS) if(ENABLE_RTPPROXY AND ENABLE_HLS)
message(STATUS "ENABLE_RTPPROXY defined") message(STATUS "ENABLE_RTPPROXY defined")
include_directories(${MediaServer_Root}/librtp/include)
aux_source_directory(${MediaServer_Root}/librtp/include src_rtp) aux_source_directory(${MediaServer_Root}/librtp/include src_rtp)
aux_source_directory(${MediaServer_Root}/librtp/source src_rtp) aux_source_directory(${MediaServer_Root}/librtp/source src_rtp)
aux_source_directory(${MediaServer_Root}/librtp/payload src_rtp) aux_source_directory(${MediaServer_Root}/librtp/payload src_rtp)
include_directories(${MediaServer_Root}/librtp/include)
add_library(rtp STATIC ${src_rtp}) add_library(rtp STATIC ${src_rtp})
add_definitions(-DENABLE_RTPPROXY) add_definitions(-DENABLE_RTPPROXY)
list(APPEND LINK_LIB_LIST rtp) list(APPEND LINK_LIB_LIST rtp)
......
![logo](https://raw.githubusercontent.com/zlmediakit/ZLMediaKit/master/logo.png) ![logo](https://raw.githubusercontent.com/zlmediakit/ZLMediaKit/master/www/logo.png)
[english readme](https://github.com/xiongziliang/ZLMediaKit/blob/master/README_en.md) [english readme](https://github.com/xiongziliang/ZLMediaKit/blob/master/README_en.md)
...@@ -30,22 +30,21 @@ ...@@ -30,22 +30,21 @@
## 功能清单 ## 功能清单
- RTSP - RTSP[S]
- RTSP 服务器,支持RTMP/MP4转RTSP - RTSP[S] 服务器,支持RTMP/MP4/HLS转RTSP[S],支持亚马逊echo show这样的设备
- RTSPS 服务器,支持亚马逊echo show这样的设备 - RTSP[S] 播放器,支持RTSP代理,支持生成静音音频
- RTSP 播放器,支持RTSP代理,支持生成静音音频 - RTSP[S] 推流客户端与服务器
- RTSP 推流客户端与服务器
- 支持 `rtp over udp` `rtp over tcp` `rtp over http` `rtp组播` 四种RTP传输方式 - 支持 `rtp over udp` `rtp over tcp` `rtp over http` `rtp组播` 四种RTP传输方式
- 服务器/客户端完整支持Basic/Digest方式的登录鉴权,全异步可配置化的鉴权接口 - 服务器/客户端完整支持Basic/Digest方式的登录鉴权,全异步可配置化的鉴权接口
- 支持H265编码 - 支持H265编码
- 服务器支持RTSP推流(包括`rtp over udp` `rtp over tcp`方式) - 服务器支持RTSP推流(包括`rtp over udp` `rtp over tcp`方式)
- 支持任意编码格式的rtsp推流,只是除H264/H265/AAC/G711外无法转协议 - 支持任意编码格式的rtsp推流,只是除H264/H265/AAC/G711外无法转协议
- RTMP - RTMP[S]
- RTMP 播放服务器,支持RTSP/MP4转RTMP - RTMP[S] 播放服务器,支持RTSP/MP4/HLS转RTMP
- RTMP 发布服务器,支持录制发布流 - RTMP[S] 发布服务器,支持录制发布流
- RTMP 播放器,支持RTMP代理,支持生成静音音频 - RTMP[S] 播放器,支持RTMP代理,支持生成静音音频
- RTMP 推流客户端 - RTMP[S] 推流客户端
- 支持http[s]-flv直播 - 支持http[s]-flv直播
- 支持websocket-flv直播 - 支持websocket-flv直播
- 支持任意编码格式的rtmp推流,只是除H264/H265/AAC/G711外无法转协议 - 支持任意编码格式的rtmp推流,只是除H264/H265/AAC/G711外无法转协议
...@@ -55,6 +54,7 @@ ...@@ -55,6 +54,7 @@
- 支持HLS文件生成,自带HTTP文件服务器 - 支持HLS文件生成,自带HTTP文件服务器
- 通过cookie追踪技术,可以模拟HLS播放为长连接,实现丰富的业务逻辑 - 通过cookie追踪技术,可以模拟HLS播放为长连接,实现丰富的业务逻辑
- 支持完备的HLS用户追踪、播放统计等业务功能,可以实现HLS按需拉流等业务 - 支持完备的HLS用户追踪、播放统计等业务功能,可以实现HLS按需拉流等业务
- 支持HLS播发器,支持拉流HLS转rtsp/rtmp/mp4
- HTTP[S] - HTTP[S]
- 服务器支持`目录索引生成`,`文件下载`,`表单提交请求` - 服务器支持`目录索引生成`,`文件下载`,`表单提交请求`
...@@ -81,11 +81,15 @@ ...@@ -81,11 +81,15 @@
- 支持按需拉流,无人观看自动关断拉流 - 支持按需拉流,无人观看自动关断拉流
- 支持先拉流后推流,提高及时推流画面打开率 - 支持先拉流后推流,提高及时推流画面打开率
- 提供c api sdk - 提供c api sdk
- 支持FFmpeg拉流代理任意格式的流
- 支持http api生成并返回实时截图
## 更新日志
- 2020/5/17 新增支持hls播发器,支持hls拉流代理
## 编译以及测试
请参考wiki:[快速开始](https://github.com/xiongziliang/ZLMediaKit/wiki/%E5%BF%AB%E9%80%9F%E5%BC%80%E5%A7%8B) ## 编译以及测试
**编译前务必仔细参考wiki:[快速开始](https://github.com/xiongziliang/ZLMediaKit/wiki/%E5%BF%AB%E9%80%9F%E5%BC%80%E5%A7%8B)操作!!!**
## 怎么使用 ## 怎么使用
...@@ -114,8 +118,12 @@ bash build_docker_images.sh ...@@ -114,8 +118,12 @@ bash build_docker_images.sh
- [IOS摄像头实时录制,生成rtsp/rtmp/hls/http-flv](https://gitee.com/xiahcu/IOSMedia) - [IOS摄像头实时录制,生成rtsp/rtmp/hls/http-flv](https://gitee.com/xiahcu/IOSMedia)
- [IOS rtmp/rtsp播放器,视频推流器](https://gitee.com/xiahcu/IOSPlayer) - [IOS rtmp/rtsp播放器,视频推流器](https://gitee.com/xiahcu/IOSPlayer)
- [支持linux、windows、mac的rtmp/rtsp播放器](https://github.com/xiongziliang/ZLMediaPlayer) - [支持linux、windows、mac的rtmp/rtsp播放器](https://github.com/xiongziliang/ZLMediaPlayer)
- [配套的管理WEB网站](https://github.com/chenxiaolei/ZLMediaKit_NVR_UI) - [基于ZLMediaKit分支的管理WEB网站](https://github.com/chenxiaolei/ZLMediaKit_NVR_UI)
- [基于ZLMediaKit主线的管理WEB网站](https://gitee.com/kkkkk5G/MediaServerUI)
- [DotNetCore的RESTful客户端](https://github.com/MingZhuLiu/ZLMediaKit.DotNetCore.Sdk) - [DotNetCore的RESTful客户端](https://github.com/MingZhuLiu/ZLMediaKit.DotNetCore.Sdk)
- [GB28181-2016网络视频平台](https://github.com/swwheihei/wvp)
- [node-js版本的GB28181平台](https://gitee.com/hfwudao/GB28181_Node_Http)
## 授权协议 ## 授权协议
......
![logo](https://raw.githubusercontent.com/zlmediakit/ZLMediaKit/master/logo.png) ![logo](https://raw.githubusercontent.com/zlmediakit/ZLMediaKit/master/www/logo.png)
# A lightweight ,high performance and stable stream server and client framework based on C++11. # A lightweight ,high performance and stable stream server and client framework based on C++11.
...@@ -15,18 +15,18 @@ ...@@ -15,18 +15,18 @@
## Features ## Features
- RTSP - RTSP[S]
- RTSP[S] server,support rtsp push. - RTSP[S] server,support rtsp push.
- RTSP player and pusher. - RTSP[S] player and pusher.
- RTP Transport : `rtp over udp` `rtp over tcp` `rtp over http` `rtp udp multicast` . - RTP Transport : `rtp over udp` `rtp over tcp` `rtp over http` `rtp udp multicast` .
- Basic/Digest/Url Authentication. - Basic/Digest/Url Authentication.
- H264/H265/AAC/G711 codec. - H264/H265/AAC/G711 codec.
- Recorded as mp4. - Recorded as mp4.
- Vod of mp4. - Vod of mp4.
- RTMP - RTMP[S]
- RTMP server,support player and pusher. - RTMP[S] server,support player and pusher.
- RTMP player and pusher. - RTMP[S] player and pusher.
- Support HTTP-FLV player. - Support HTTP-FLV player.
- H264/H265/AAC/G711 codec. - H264/H265/AAC/G711 codec.
- Recorded as flv or mp4. - Recorded as flv or mp4.
...@@ -36,6 +36,7 @@ ...@@ -36,6 +36,7 @@
- HLS - HLS
- RTSP RTMP can be converted into HLS,built-in HTTP server. - RTSP RTMP can be converted into HLS,built-in HTTP server.
- Play authentication based on cookie. - Play authentication based on cookie.
- Support HLS player, support streaming HLS proxy to RTSP / RTMP / MP4.
- HTTP[S] - HTTP[S]
- HTTP server,suppor directory meun、RESTful http api. - HTTP server,suppor directory meun、RESTful http api.
...@@ -53,6 +54,7 @@ ...@@ -53,6 +54,7 @@
- Play and push authentication. - Play and push authentication.
- Pull stream on Demand. - Pull stream on Demand.
- Support TS / PS streaming push through RTP,and it can be converted to RTSP / RTMP / HLS / FLV. - Support TS / PS streaming push through RTP,and it can be converted to RTSP / RTMP / HLS / FLV.
- Support real-time online screenshot http api.
- Protocol conversion: - Protocol conversion:
...@@ -67,6 +69,7 @@ ...@@ -67,6 +69,7 @@
| RTMP --> MP4 | Y | Y | Y | N | | RTMP --> MP4 | Y | Y | Y | N |
| MP4 --> RTSP[S] | Y | Y | Y | N | | MP4 --> RTSP[S] | Y | Y | Y | N |
| MP4 --> RTMP | Y | Y | Y | N | | MP4 --> RTMP | Y | Y | Y | N |
| HLS --> RTSP/RTMP/MP4 | Y | Y | Y | N |
- Stream generation: - Stream generation:
...@@ -106,7 +109,7 @@ ...@@ -106,7 +109,7 @@
| RTMP Pusher | Y | | RTMP Pusher | Y |
| HTTP[S] | Y | | HTTP[S] | Y |
| WebSocket[S] | Y | | WebSocket[S] | Y |
| HLS player | Y |
## System Requirements ## System Requirements
......
...@@ -36,6 +36,22 @@ API_EXPORT mk_thread API_CALL mk_thread_from_tcp_session(mk_tcp_session ctx); ...@@ -36,6 +36,22 @@ API_EXPORT mk_thread API_CALL mk_thread_from_tcp_session(mk_tcp_session ctx);
*/ */
API_EXPORT mk_thread API_CALL mk_thread_from_tcp_client(mk_tcp_client ctx); API_EXPORT mk_thread API_CALL mk_thread_from_tcp_client(mk_tcp_client ctx);
/**
* 根据负载均衡算法,从事件线程池中随机获取一个事件线程
* 如果在事件线程内执行此函数将返回本事件线程
* 事件线程指的是定时器、网络io事件线程
* @return 事件线程
*/
API_EXPORT mk_thread API_CALL mk_thread_from_pool();
/**
* 根据负载均衡算法,从后台线程池中随机获取一个线程
* 后台线程本质与事件线程相同,只是优先级更低,同时可以执行短时间的阻塞任务
* ZLMediaKit中后台线程用于dns解析、mp4点播时的文件解复用
* @return 后台线程
*/
API_EXPORT mk_thread API_CALL mk_thread_from_pool_work();
///////////////////////////////////////////线程切换///////////////////////////////////////////// ///////////////////////////////////////////线程切换/////////////////////////////////////////////
typedef void (API_CALL *on_mk_async)(void *user_data); typedef void (API_CALL *on_mk_async)(void *user_data);
......
...@@ -144,10 +144,12 @@ API_EXPORT uint16_t API_CALL mk_tcp_server_start(uint16_t port, mk_tcp_type type ...@@ -144,10 +144,12 @@ API_EXPORT uint16_t API_CALL mk_tcp_server_start(uint16_t port, mk_tcp_type type
s_tcp_server[type]->start<TcpSessionWithSSL<TcpSessionForC> >(port); s_tcp_server[type]->start<TcpSessionWithSSL<TcpSessionForC> >(port);
break; break;
case mk_type_ws: case mk_type_ws:
s_tcp_server[type]->start<WebSocketSession<TcpSessionForC, HttpSession>>(port); //此处你也可以修改WebSocketHeader::BINARY
s_tcp_server[type]->start<WebSocketSession<TcpSessionForC, HttpSession, WebSocketHeader::TEXT> >(port);
break; break;
case mk_type_wss: case mk_type_wss:
s_tcp_server[type]->start<WebSocketSession<TcpSessionForC, HttpsSession>>(port); //此处你也可以修改WebSocketHeader::BINARY
s_tcp_server[type]->start<WebSocketSession<TcpSessionForC, HttpsSession, WebSocketHeader::TEXT> >(port);
break; break;
default: default:
return 0; return 0;
...@@ -208,8 +210,10 @@ TcpClientForC::Ptr *mk_tcp_client_create_l(mk_tcp_client_events *events, mk_tcp_ ...@@ -208,8 +210,10 @@ TcpClientForC::Ptr *mk_tcp_client_create_l(mk_tcp_client_events *events, mk_tcp_
case mk_type_ssl: case mk_type_ssl:
return (TcpClientForC::Ptr *)new shared_ptr<TcpSessionWithSSL<TcpClientForC> >(new TcpSessionWithSSL<TcpClientForC>(events)); return (TcpClientForC::Ptr *)new shared_ptr<TcpSessionWithSSL<TcpClientForC> >(new TcpSessionWithSSL<TcpClientForC>(events));
case mk_type_ws: case mk_type_ws:
//此处你也可以修改WebSocketHeader::BINARY
return (TcpClientForC::Ptr *)new shared_ptr<WebSocketClient<TcpClientForC, WebSocketHeader::TEXT, false> >(new WebSocketClient<TcpClientForC, WebSocketHeader::TEXT, false>(events)); return (TcpClientForC::Ptr *)new shared_ptr<WebSocketClient<TcpClientForC, WebSocketHeader::TEXT, false> >(new WebSocketClient<TcpClientForC, WebSocketHeader::TEXT, false>(events));
case mk_type_wss: case mk_type_wss:
//此处你也可以修改WebSocketHeader::BINARY
return (TcpClientForC::Ptr *)new shared_ptr<WebSocketClient<TcpClientForC, WebSocketHeader::TEXT, true> >(new WebSocketClient<TcpClientForC, WebSocketHeader::TEXT, true>(events)); return (TcpClientForC::Ptr *)new shared_ptr<WebSocketClient<TcpClientForC, WebSocketHeader::TEXT, true> >(new WebSocketClient<TcpClientForC, WebSocketHeader::TEXT, true>(events));
default: default:
return nullptr; return nullptr;
......
...@@ -12,6 +12,7 @@ ...@@ -12,6 +12,7 @@
#include "mk_tcp_private.h" #include "mk_tcp_private.h"
#include "Util/logger.h" #include "Util/logger.h"
#include "Poller/EventPoller.h" #include "Poller/EventPoller.h"
#include "Thread/WorkThreadPool.h"
using namespace std; using namespace std;
using namespace toolkit; using namespace toolkit;
...@@ -27,6 +28,14 @@ API_EXPORT mk_thread API_CALL mk_thread_from_tcp_client(mk_tcp_client ctx){ ...@@ -27,6 +28,14 @@ API_EXPORT mk_thread API_CALL mk_thread_from_tcp_client(mk_tcp_client ctx){
return (*client)->getPoller().get(); return (*client)->getPoller().get();
} }
API_EXPORT mk_thread API_CALL mk_thread_from_pool(){
return EventPollerPool::Instance().getPoller().get();
}
API_EXPORT mk_thread API_CALL mk_thread_from_pool_work(){
return WorkThreadPool::Instance().getPoller().get();
}
API_EXPORT void API_CALL mk_async_do(mk_thread ctx,on_mk_async cb, void *user_data){ API_EXPORT void API_CALL mk_async_do(mk_thread ctx,on_mk_async cb, void *user_data){
assert(ctx && cb); assert(ctx && cb);
EventPoller *poller = (EventPoller *)ctx; EventPoller *poller = (EventPoller *)ctx;
......
...@@ -4,12 +4,18 @@ apiDebug=1 ...@@ -4,12 +4,18 @@ apiDebug=1
#一些比较敏感的http api在访问时需要提供secret,否则无权限调用 #一些比较敏感的http api在访问时需要提供secret,否则无权限调用
#如果是通过127.0.0.1访问,那么可以不提供secret #如果是通过127.0.0.1访问,那么可以不提供secret
secret=035c73f7-bb6b-4889-a715-d9eb2d1925cc secret=035c73f7-bb6b-4889-a715-d9eb2d1925cc
#截图保存路径根目录,截图通过http api(/index/api/getSnap)生成和获取
snapRoot=./www/snap/
#默认截图图片,在启动FFmpeg截图后但是截图还未生成时,可以返回默认的预设图片
defaultSnap=./www/logo.png
[ffmpeg] [ffmpeg]
#FFmpeg可执行程序绝对路径 #FFmpeg可执行程序绝对路径
bin=/usr/local/bin/ffmpeg bin=/usr/local/bin/ffmpeg
#FFmpeg拉流再推流的命令模板,通过该模板可以设置再编码的一些参数 #FFmpeg拉流再推流的命令模板,通过该模板可以设置再编码的一些参数
cmd=%s -re -i %s -c:a aac -strict -2 -ar 44100 -ab 48k -c:v libx264 -f flv %s cmd=%s -re -i %s -c:a aac -strict -2 -ar 44100 -ab 48k -c:v libx264 -f flv %s
#FFmpeg生成截图的命令,可以通过修改该配置改变截图分辨率或质量
snap=%s -i %s -y -f mjpeg -t 0.001 %s
#FFmpeg日志的路径,如果置空则不生成FFmpeg日志 #FFmpeg日志的路径,如果置空则不生成FFmpeg日志
#可以为相对(相对于本可执行程序目录)或绝对路径 #可以为相对(相对于本可执行程序目录)或绝对路径
log=./ffmpeg/ffmpeg.log log=./ffmpeg/ffmpeg.log
...@@ -43,6 +49,11 @@ publishToMP4=0 ...@@ -43,6 +49,11 @@ publishToMP4=0
#合并写缓存大小(单位毫秒),合并写指服务器缓存一定的数据后才会一次性写入socket,这样能提高性能,但是会提高延时 #合并写缓存大小(单位毫秒),合并写指服务器缓存一定的数据后才会一次性写入socket,这样能提高性能,但是会提高延时
#开启后会同时关闭TCP_NODELAY并开启MSG_MORE #开启后会同时关闭TCP_NODELAY并开启MSG_MORE
mergeWriteMS=0 mergeWriteMS=0
#全局的时间戳覆盖开关,在转协议时,对frame进行时间戳覆盖
#该开关对rtsp/rtmp/rtp推流、rtsp/rtmp/hls拉流代理转协议时生效
#会直接影响rtsp/rtmp/hls/mp4/flv等协议的时间戳
#同协议情况下不影响(例如rtsp/rtmp推流,那么播放rtsp/rtmp时不会影响时间戳)
modifyStamp=0
[hls] [hls]
#hls写文件的buf大小,调整参数可以提高文件io性能 #hls写文件的buf大小,调整参数可以提高文件io性能
...@@ -76,8 +87,9 @@ on_publish=https://127.0.0.1/index/hook/on_publish ...@@ -76,8 +87,9 @@ on_publish=https://127.0.0.1/index/hook/on_publish
on_record_mp4=https://127.0.0.1/index/hook/on_record_mp4 on_record_mp4=https://127.0.0.1/index/hook/on_record_mp4
#rtsp播放鉴权事件,此事件中比对rtsp的用户名密码 #rtsp播放鉴权事件,此事件中比对rtsp的用户名密码
on_rtsp_auth=https://127.0.0.1/index/hook/on_rtsp_auth on_rtsp_auth=https://127.0.0.1/index/hook/on_rtsp_auth
#rtsp播放是否开启鉴权事件,置空则关闭rtsp鉴权。rtsp播放鉴权还支持url方式鉴权 #rtsp播放是否开启专属鉴权事件,置空则关闭rtsp鉴权。rtsp播放鉴权还支持url方式鉴权
#建议开发者统一采用url参数方式鉴权,rtsp用户名密码鉴权一般在设备上用的比较多 #建议开发者统一采用url参数方式鉴权,rtsp用户名密码鉴权一般在设备上用的比较多
#开启rtsp专属鉴权后,将不再触发on_play鉴权事件
on_rtsp_realm=https://127.0.0.1/index/hook/on_rtsp_realm on_rtsp_realm=https://127.0.0.1/index/hook/on_rtsp_realm
#远程telnet调试鉴权事件 #远程telnet调试鉴权事件
on_shell_login=https://127.0.0.1/index/hook/on_shell_login on_shell_login=https://127.0.0.1/index/hook/on_shell_login
......
...@@ -13,26 +13,27 @@ ...@@ -13,26 +13,27 @@
#include "Common/MediaSource.h" #include "Common/MediaSource.h"
#include "Util/File.h" #include "Util/File.h"
#include "System.h" #include "System.h"
#include "Thread/WorkThreadPool.h"
namespace FFmpeg { namespace FFmpeg {
#define FFmpeg_FIELD "ffmpeg." #define FFmpeg_FIELD "ffmpeg."
const string kBin = FFmpeg_FIELD"bin"; const string kBin = FFmpeg_FIELD"bin";
const string kCmd = FFmpeg_FIELD"cmd"; const string kCmd = FFmpeg_FIELD"cmd";
const string kLog = FFmpeg_FIELD"log"; const string kLog = FFmpeg_FIELD"log";
const string kSnap = FFmpeg_FIELD"snap";
onceToken token([]() { onceToken token([]() {
#ifdef _WIN32 #ifdef _WIN32
string ffmpeg_bin = System::execute("where ffmpeg"); string ffmpeg_bin = trim(System::execute("where ffmpeg"));
//windows下先关闭FFmpeg日志(目前不支持日志重定向)
mINI::Instance()[kCmd] = "%s -re -i \"%s\" -loglevel quiet -c:a aac -strict -2 -ar 44100 -ab 48k -c:v libx264 -f flv %s ";
#else #else
string ffmpeg_bin = System::execute("which ffmpeg"); string ffmpeg_bin = trim(System::execute("which ffmpeg"));
mINI::Instance()[kCmd] = "%s -re -i \"%s\" -c:a aac -strict -2 -ar 44100 -ab 48k -c:v libx264 -f flv %s ";
#endif #endif
//默认ffmpeg命令路径为环境变量中路径 //默认ffmpeg命令路径为环境变量中路径
mINI::Instance()[kBin] = ffmpeg_bin.empty() ? "ffmpeg" : ffmpeg_bin; mINI::Instance()[kBin] = ffmpeg_bin.empty() ? "ffmpeg" : ffmpeg_bin;
//ffmpeg日志保存路径 //ffmpeg日志保存路径
mINI::Instance()[kLog] = "./ffmpeg/ffmpeg.log"; mINI::Instance()[kLog] = "./ffmpeg/ffmpeg.log";
mINI::Instance()[kCmd] = "%s -re -i %s -c:a aac -strict -2 -ar 44100 -ab 48k -c:v libx264 -f flv %s";
mINI::Instance()[kSnap] = "%s -i %s -y -f mjpeg -t 0.001 %s";
}); });
} }
...@@ -114,8 +115,7 @@ void FFmpegSource::findAsync(int maxWaitMS, const function<void(const MediaSourc ...@@ -114,8 +115,7 @@ void FFmpegSource::findAsync(int maxWaitMS, const function<void(const MediaSourc
auto src = MediaSource::find(_media_info._schema, auto src = MediaSource::find(_media_info._schema,
_media_info._vhost, _media_info._vhost,
_media_info._app, _media_info._app,
_media_info._streamid, _media_info._streamid);
false);
if(src || !maxWaitMS){ if(src || !maxWaitMS){
cb(src); cb(src);
return; return;
...@@ -196,7 +196,19 @@ void FFmpegSource::startTimer(int timeout_ms) { ...@@ -196,7 +196,19 @@ void FFmpegSource::startTimer(int timeout_ms) {
//推流给其他服务器的,我们通过判断FFmpeg进程是否在线,如果FFmpeg推流中断,那么它应该会自动退出 //推流给其他服务器的,我们通过判断FFmpeg进程是否在线,如果FFmpeg推流中断,那么它应该会自动退出
if (!strongSelf->_process.wait(false)) { if (!strongSelf->_process.wait(false)) {
//ffmpeg不在线,重新拉流 //ffmpeg不在线,重新拉流
strongSelf->play(strongSelf->_src_url, strongSelf->_dst_url, timeout_ms, [](const SockException &) {}); strongSelf->play(strongSelf->_src_url, strongSelf->_dst_url, timeout_ms, [weakSelf](const SockException &ex) {
if(!ex){
//没有错误
return;
}
auto strongSelf = weakSelf.lock();
if (!strongSelf) {
//自身已经销毁
return;
}
//上次重试时间超过10秒,那么再重试FFmpeg拉流
strongSelf->startTimer(10 * 1000);
});
} }
} }
return true; return true;
...@@ -232,3 +244,31 @@ void FFmpegSource::onGetMediaSource(const MediaSource::Ptr &src) { ...@@ -232,3 +244,31 @@ void FFmpegSource::onGetMediaSource(const MediaSource::Ptr &src) {
_listener = src->getListener(); _listener = src->getListener();
src->setListener(shared_from_this()); src->setListener(shared_from_this());
} }
void FFmpegSnap::makeSnap(const string &play_url, const string &save_path, float timeout_sec, const function<void(bool)> &cb) {
GET_CONFIG(string,ffmpeg_bin,FFmpeg::kBin);
GET_CONFIG(string,ffmpeg_snap,FFmpeg::kSnap);
GET_CONFIG(string,ffmpeg_log,FFmpeg::kLog);
std::shared_ptr<Process> process = std::make_shared<Process>();
auto delayTask = EventPollerPool::Instance().getPoller()->doDelayTask(timeout_sec * 1000,[process,cb](){
if(process->wait(false)){
//FFmpeg进程还在运行,超时就关闭它
process->kill(2000);
}
return 0;
});
WorkThreadPool::Instance().getPoller()->async([process,play_url,save_path,delayTask,cb](){
char cmd[1024] = {0};
snprintf(cmd, sizeof(cmd),ffmpeg_snap.data(),ffmpeg_bin.data(),play_url.data(),save_path.data());
process->run(cmd,ffmpeg_log.empty() ? "" : File::absolutePath("",ffmpeg_log));
//等待FFmpeg进程退出
process->wait(true);
//FFmpeg进程退出了可以取消定时器了
delayTask->cancel();
//执行回调函数
cb(process->exit_code() == 0);
});
}
...@@ -23,6 +23,23 @@ using namespace std; ...@@ -23,6 +23,23 @@ using namespace std;
using namespace toolkit; using namespace toolkit;
using namespace mediakit; using namespace mediakit;
namespace FFmpeg {
extern const string kSnap;
}
class FFmpegSnap {
public:
/// 创建截图
/// \param play_url 播放url地址,只要FFmpeg支持即可
/// \param save_path 截图jpeg文件保存路径
/// \param timeout_sec 生成截图超时时间(防止阻塞太久)
/// \param cb 生成截图成功与否回调
static void makeSnap(const string &play_url, const string &save_path, float timeout_sec, const function<void(bool)> &cb);
private:
FFmpegSnap() = delete;
~FFmpegSnap() = delete;
};
class FFmpegSource : public std::enable_shared_from_this<FFmpegSource> , public MediaSourceEvent{ class FFmpegSource : public std::enable_shared_from_this<FFmpegSource> , public MediaSourceEvent{
public: public:
typedef shared_ptr<FFmpegSource> Ptr; typedef shared_ptr<FFmpegSource> Ptr;
......
...@@ -10,13 +10,13 @@ ...@@ -10,13 +10,13 @@
#include <limits.h> #include <limits.h>
#include <sys/stat.h> #include <sys/stat.h>
#ifndef _WIN32 #ifndef _WIN32
#include <sys/resource.h> #include <sys/resource.h>
#include <unistd.h> #include <unistd.h>
#else #else
//#include <TlHelp32.h> //#include <TlHelp32.h>
#include <windows.h> #include <windows.h>
#include <io.h>
#endif #endif
#include <stdexcept> #include <stdexcept>
...@@ -32,68 +32,83 @@ using namespace toolkit; ...@@ -32,68 +32,83 @@ using namespace toolkit;
void Process::run(const string &cmd, const string &log_file_tmp) { void Process::run(const string &cmd, const string &log_file_tmp) {
kill(2000); kill(2000);
#ifdef _WIN32 #ifdef _WIN32
STARTUPINFO si; STARTUPINFO si = {0};
PROCESS_INFORMATION pi; PROCESS_INFORMATION pi = {0};
ZeroMemory(&si, sizeof(si)); //结构体初始化; string log_file;
ZeroMemory(&pi, sizeof(pi)); if (log_file_tmp.empty()) {
//未指定子进程日志文件时,重定向至/dev/null
log_file = "NUL";
} else {
log_file = StrPrinter << log_file_tmp << "." << getCurrentMillisecond();
}
LPTSTR lpDir = const_cast<char*>(cmd.data()); //重定向shell日志至文件
auto fp = File::create_file(log_file.data(), "ab");
if (!fp) {
fprintf(stderr, "open log file %s failed:%d(%s)\r\n", log_file.data(), get_uv_error(), get_uv_errmsg());
} else {
auto log_fd = (HANDLE)(_get_osfhandle(fileno(fp)));
// dup to stdout and stderr.
si.wShowWindow = SW_HIDE;
// STARTF_USESHOWWINDOW:The wShowWindow member contains additional information.
// STARTF_USESTDHANDLES:The hStdInput, hStdOutput, and hStdError members contain additional information.
si.dwFlags = STARTF_USESHOWWINDOW | STARTF_USESTDHANDLES;
si.hStdError = log_fd;
si.hStdOutput = log_fd;
}
if (CreateProcess(NULL, lpDir, NULL, NULL, FALSE, 0, NULL, NULL, &si, &pi)){ LPTSTR lpDir = const_cast<char*>(cmd.data());
if (CreateProcess(NULL, lpDir, NULL, NULL, TRUE, 0, NULL, NULL, &si, &pi)){
//下面两行关闭句柄,解除本进程和新进程的关系,不然有可能 不小心调用TerminateProcess函数关掉子进程 //下面两行关闭句柄,解除本进程和新进程的关系,不然有可能 不小心调用TerminateProcess函数关掉子进程
CloseHandle(pi.hProcess);
CloseHandle(pi.hThread); CloseHandle(pi.hThread);
_pid = pi.dwProcessId; _pid = pi.dwProcessId;
InfoL << "start child proces " << _pid; _handle = pi.hProcess;
fprintf(fp, "\r\n\r\n#### pid=%d,cmd=%s #####\r\n\r\n", _pid, cmd.data());
InfoL << "start child process " << _pid << ", log file:" << log_file;
} else { } else {
WarnL << "start child proces fail: " << GetLastError(); WarnL << "start child process fail: " << get_uv_errmsg();
} }
fclose(fp);
#else #else
_pid = fork(); _pid = fork();
if (_pid < 0) { if (_pid < 0) {
throw std::runtime_error(StrPrinter << "fork child process falied,err:" << get_uv_errmsg()); throw std::runtime_error(StrPrinter << "fork child process failed,err:" << get_uv_errmsg());
} }
if (_pid == 0) { if (_pid == 0) {
string log_file;
if (log_file_tmp.empty()) {
//未指定子进程日志文件时,重定向至/dev/null
log_file = "/dev/null";
} else {
log_file = StrPrinter << log_file_tmp << "." << getpid();
}
//子进程关闭core文件生成 //子进程关闭core文件生成
struct rlimit rlim = { 0,0 }; struct rlimit rlim = {0, 0};
setrlimit(RLIMIT_CORE, &rlim); setrlimit(RLIMIT_CORE, &rlim);
//在启动子进程时,暂时禁用SIGINT、SIGTERM信号 //在启动子进程时,暂时禁用SIGINT、SIGTERM信号
// ignore the SIGINT and SIGTERM
signal(SIGINT, SIG_IGN); signal(SIGINT, SIG_IGN);
signal(SIGTERM, SIG_IGN); signal(SIGTERM, SIG_IGN);
string log_file; //重定向shell日志至文件
if (log_file_tmp.empty()) { auto fp = File::create_file(log_file.data(), "ab");
log_file = "/dev/null"; if (!fp) {
} fprintf(stderr, "open log file %s failed:%d(%s)\r\n", log_file.data(), get_uv_error(), get_uv_errmsg());
else { } else {
log_file = StrPrinter << log_file_tmp << "." << getpid(); auto log_fd = fileno(fp);
}
int log_fd = -1;
int flags = O_CREAT | O_WRONLY | O_APPEND;
mode_t mode = S_IRWXO | S_IRWXG | S_IRWXU;// S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP | S_IROTH;
File::create_path(log_file.data(), mode);
if ((log_fd = ::open(log_file.c_str(), flags, mode)) < 0) {
fprintf(stderr, "open log file %s failed:%d(%s)\r\n", log_file.data(), errno, strerror(errno));
}
else {
// dup to stdout and stderr. // dup to stdout and stderr.
if (dup2(log_fd, STDOUT_FILENO) < 0) { if (dup2(log_fd, STDOUT_FILENO) < 0) {
fprintf(stderr, "dup2 stdout file %s failed:%d(%s)\r\n", log_file.data(), errno, strerror(errno)); fprintf(stderr, "dup2 stdout file %s failed:%d(%s)\r\n", log_file.data(), get_uv_error(), get_uv_errmsg());
} }
if (dup2(log_fd, STDERR_FILENO) < 0) { if (dup2(log_fd, STDERR_FILENO) < 0) {
fprintf(stderr, "dup2 stderr file %s failed:%d(%s)\r\n", log_file.data(), errno, strerror(errno)); fprintf(stderr, "dup2 stderr file %s failed:%d(%s)\r\n", log_file.data(), get_uv_error(), get_uv_errmsg());
} }
// close log fd // 关闭日志文件
::close(log_fd); ::fclose(fp);
} }
fprintf(stderr, "\r\n\r\n#### pid=%d,cmd=%s #####\r\n\r\n", getpid(), cmd.data()); fprintf(stderr, "\r\n\r\n#### pid=%d,cmd=%s #####\r\n\r\n", getpid(), cmd.data());
// close other fds //关闭父进程继承的fd
// TODO: do in right way.
for (int i = 3; i < 1024; i++) { for (int i = 3; i < 1024; i++) {
::close(i); ::close(i);
} }
...@@ -101,9 +116,9 @@ void Process::run(const string &cmd, const string &log_file_tmp) { ...@@ -101,9 +116,9 @@ void Process::run(const string &cmd, const string &log_file_tmp) {
auto params = split(cmd, " "); auto params = split(cmd, " ");
// memory leak in child process, it's ok. // memory leak in child process, it's ok.
char **charpv_params = new char *[params.size() + 1]; char **charpv_params = new char *[params.size() + 1];
for (int i = 0; i < (int)params.size(); i++) { for (int i = 0; i < (int) params.size(); i++) {
std::string &p = params[i]; std::string &p = params[i];
charpv_params[i] = (char *)p.data(); charpv_params[i] = (char *) p.data();
} }
// EOF: NULL // EOF: NULL
charpv_params[params.size()] = NULL; charpv_params[params.size()] = NULL;
...@@ -111,11 +126,19 @@ void Process::run(const string &cmd, const string &log_file_tmp) { ...@@ -111,11 +126,19 @@ void Process::run(const string &cmd, const string &log_file_tmp) {
// TODO: execv or execvp // TODO: execv or execvp
auto ret = execv(params[0].c_str(), charpv_params); auto ret = execv(params[0].c_str(), charpv_params);
if (ret < 0) { if (ret < 0) {
fprintf(stderr, "fork process failed, errno=%d(%s)\r\n", errno, strerror(errno)); fprintf(stderr, "fork process failed:%d(%s)\r\n", get_uv_error(), get_uv_errmsg());
} }
exit(ret); exit(ret);
} }
InfoL << "start child proces " << _pid;
string log_file;
if (log_file_tmp.empty()) {
//未指定子进程日志文件时,重定向至/dev/null
log_file = "/dev/null";
} else {
log_file = StrPrinter << log_file_tmp << "." << _pid;
}
InfoL << "start child process " << _pid << ", log file:" << log_file;
#endif // _WIN32 #endif // _WIN32
} }
...@@ -126,24 +149,41 @@ void Process::run(const string &cmd, const string &log_file_tmp) { ...@@ -126,24 +149,41 @@ void Process::run(const string &cmd, const string &log_file_tmp) {
* @param block 是否阻塞等待 * @param block 是否阻塞等待
* @return 进程是否还在运行 * @return 进程是否还在运行
*/ */
static bool s_wait(pid_t pid,int *exit_code_ptr,bool block) { static bool s_wait(pid_t pid, void *handle, int *exit_code_ptr, bool block) {
if (pid <= 0) { if (pid <= 0) {
return false; return false;
} }
int status = 0;
#ifdef _WIN32 #ifdef _WIN32
HANDLE hProcess = NULL; DWORD code = 0;
hProcess = OpenProcess(PROCESS_TERMINATE, FALSE, pid); //打开目标进程 if (block) {
if (hProcess == NULL) { //一直等待
code = WaitForSingleObject(handle, INFINITE);
} else {
code = WaitForSingleObject(handle, 0);
}
if(code == WAIT_FAILED || code == WAIT_OBJECT_0){
//子进程已经退出了,获取子进程退出代码
DWORD exitCode = 0;
if(exit_code_ptr && GetExitCodeProcess(handle, &exitCode)){
*exit_code_ptr = exitCode;
}
return false; return false;
} }
CloseHandle(hProcess); if(code == WAIT_TIMEOUT){
//子进程还在线
return true;
}
//不太可能运行到此处
WarnL << "WaitForSingleObject ret:" << code;
return false;
#else #else
int status = 0;
pid_t p = waitpid(pid, &status, block ? 0 : WNOHANG); pid_t p = waitpid(pid, &status, block ? 0 : WNOHANG);
int exit_code = (status & 0xFF00) >> 8; int exit_code = (status & 0xFF00) >> 8;
if (exit_code_ptr) { if (exit_code_ptr) {
*exit_code_ptr = (status & 0xFF00) >> 8; *exit_code_ptr = exit_code;
} }
if (p < 0) { if (p < 0) {
WarnL << "waitpid failed, pid=" << pid << ", err=" << get_uv_errmsg(); WarnL << "waitpid failed, pid=" << pid << ", err=" << get_uv_errmsg();
...@@ -153,26 +193,57 @@ static bool s_wait(pid_t pid,int *exit_code_ptr,bool block) { ...@@ -153,26 +193,57 @@ static bool s_wait(pid_t pid,int *exit_code_ptr,bool block) {
InfoL << "process terminated, pid=" << pid << ", exit code=" << exit_code; InfoL << "process terminated, pid=" << pid << ", exit code=" << exit_code;
return false; return false;
} }
return true;
#endif // _WIN32 #endif // _WIN32
}
return true; #ifdef _WIN32
// Inspired from http://stackoverflow.com/a/15281070/1529139
// and http://stackoverflow.com/q/40059902/1529139
bool signalCtrl(DWORD dwProcessId, DWORD dwCtrlEvent){
bool success = false;
DWORD thisConsoleId = GetCurrentProcessId();
// Leave current console if it exists
// (otherwise AttachConsole will return ERROR_ACCESS_DENIED)
bool consoleDetached = (FreeConsole() != FALSE);
if (AttachConsole(dwProcessId) != FALSE){
// Add a fake Ctrl-C handler for avoid instant kill is this console
// WARNING: do not revert it or current program will be also killed
SetConsoleCtrlHandler(nullptr, true);
success = (GenerateConsoleCtrlEvent(dwCtrlEvent, 0) != FALSE);
FreeConsole();
}
if (consoleDetached){
// Create a new console if previous was deleted by OS
if (AttachConsole(thisConsoleId) == FALSE){
int errorCode = GetLastError();
if (errorCode == 31){
// 31=ERROR_GEN_FAILURE
AllocConsole();
}
}
}
return success;
} }
#endif // _WIN32
static void s_kill(pid_t pid,int max_delay,bool force){ static void s_kill(pid_t pid, void *handle, int max_delay, bool force) {
if (pid <= 0) { if (pid <= 0) {
//pid无效 //pid无效
return; return;
} }
#ifdef _WIN32 #ifdef _WIN32
HANDLE hProcess = NULL; //windows下目前没有比较好的手段往子进程发送SIGTERM或信号
hProcess = OpenProcess(PROCESS_TERMINATE, FALSE, pid); //打开目标进程 //所以杀死子进程的方式全部强制为立即关闭
if (hProcess == NULL) { force = true;
WarnL << "\nOpen Process fAiled: " << GetLastError(); if(force){
return; //强制关闭子进程
} TerminateProcess(handle, 0);
DWORD ret = TerminateProcess(hProcess, 0); //结束目标进程 }else{
if (ret == 0) { //非强制关闭,发送Ctr+C信号
WarnL << GetLastError; signalCtrl(pid, CTRL_C_EVENT);
} }
#else #else
if (::kill(pid, force ? SIGKILL : SIGTERM) == -1) { if (::kill(pid, force ? SIGKILL : SIGTERM) == -1) {
...@@ -182,33 +253,38 @@ static void s_kill(pid_t pid,int max_delay,bool force){ ...@@ -182,33 +253,38 @@ static void s_kill(pid_t pid,int max_delay,bool force){
} }
#endif // _WIN32 #endif // _WIN32
if (force) {
if(force){
//发送SIGKILL信号后,阻塞等待退出 //发送SIGKILL信号后,阻塞等待退出
s_wait(pid, NULL, true); s_wait(pid, handle, nullptr, true);
DebugL << "force kill " << pid << " success!"; DebugL << "force kill " << pid << " success!";
return; return;
} }
//发送SIGTERM信号后,2秒后检查子进程是否已经退出 //发送SIGTERM信号后,2秒后检查子进程是否已经退出
WorkThreadPool::Instance().getPoller()->doDelayTask(max_delay,[pid](){ WorkThreadPool::Instance().getPoller()->doDelayTask(max_delay, [pid, handle]() {
if (!s_wait(pid, nullptr, false)) { if (!s_wait(pid, handle, nullptr, false)) {
//进程已经退出了 //进程已经退出了
return 0; return 0;
} }
//进程还在运行 //进程还在运行
WarnL << "process still working,force kill it:" << pid; WarnL << "process still working,force kill it:" << pid;
s_kill(pid,0, true); s_kill(pid, handle, 0, true);
return 0; return 0;
}); });
} }
void Process::kill(int max_delay,bool force) { void Process::kill(int max_delay, bool force) {
if (_pid <= 0) { if (_pid <= 0) {
return; return;
} }
s_kill(_pid,max_delay,force); s_kill(_pid, _handle, max_delay, force);
_pid = -1; _pid = -1;
#ifdef _WIN32
if(_handle){
CloseHandle(_handle);
_handle = nullptr;
}
#endif
} }
Process::~Process() { Process::~Process() {
...@@ -218,7 +294,7 @@ Process::~Process() { ...@@ -218,7 +294,7 @@ Process::~Process() {
Process::Process() {} Process::Process() {}
bool Process::wait(bool block) { bool Process::wait(bool block) {
return s_wait(_pid,&_exit_code,block); return s_wait(_pid, _handle, &_exit_code, block);
} }
int Process::exit_code() { int Process::exit_code() {
......
...@@ -31,6 +31,7 @@ public: ...@@ -31,6 +31,7 @@ public:
int exit_code(); int exit_code();
private: private:
pid_t _pid = -1; pid_t _pid = -1;
void *_handle = nullptr;
int _exit_code = 0; int _exit_code = 0;
}; };
......
...@@ -52,7 +52,7 @@ string System::execute(const string &cmd) { ...@@ -52,7 +52,7 @@ string System::execute(const string &cmd) {
#if !defined(ANDROID) && !defined(_WIN32) #if !defined(ANDROID) && !defined(_WIN32)
static string addr2line(const string &address) { static string addr2line(const string &address) {
string cmd = StrPrinter << "addr2line -e " << exePath() << " " << address; string cmd = StrPrinter << "addr2line -C -f -e " << exePath() << " " << address;
return System::execute(cmd); return System::execute(cmd);
} }
......
...@@ -8,11 +8,12 @@ ...@@ -8,11 +8,12 @@
* may be found in the AUTHORS file in the root of the source tree. * may be found in the AUTHORS file in the root of the source tree.
*/ */
#include <sys/stat.h>
#include <math.h>
#include <signal.h> #include <signal.h>
#include <functional> #include <functional>
#include <sstream> #include <sstream>
#include <unordered_map> #include <unordered_map>
#include <math.h>
#include "jsoncpp/json.h" #include "jsoncpp/json.h"
#include "Util/util.h" #include "Util/util.h"
#include "Util/logger.h" #include "Util/logger.h"
...@@ -50,10 +51,14 @@ typedef enum { ...@@ -50,10 +51,14 @@ typedef enum {
#define API_FIELD "api." #define API_FIELD "api."
const string kApiDebug = API_FIELD"apiDebug"; const string kApiDebug = API_FIELD"apiDebug";
const string kSecret = API_FIELD"secret"; const string kSecret = API_FIELD"secret";
const string kSnapRoot = API_FIELD"snapRoot";
const string kDefaultSnap = API_FIELD"defaultSnap";
static onceToken token([]() { static onceToken token([]() {
mINI::Instance()[kApiDebug] = "1"; mINI::Instance()[kApiDebug] = "1";
mINI::Instance()[kSecret] = "035c73f7-bb6b-4889-a715-d9eb2d1925cc"; mINI::Instance()[kSecret] = "035c73f7-bb6b-4889-a715-d9eb2d1925cc";
mINI::Instance()[kSnapRoot] = "./www/snap/";
mINI::Instance()[kDefaultSnap] = "./www/logo.png";
}); });
}//namespace API }//namespace API
...@@ -145,7 +150,6 @@ static inline void addHttpListener(){ ...@@ -145,7 +150,6 @@ static inline void addHttpListener(){
NoticeCenter::Instance().addListener(nullptr, Broadcast::kBroadcastHttpRequest, [](BroadcastHttpRequestArgs) { NoticeCenter::Instance().addListener(nullptr, Broadcast::kBroadcastHttpRequest, [](BroadcastHttpRequestArgs) {
auto it = s_map_api.find(parser.Url()); auto it = s_map_api.find(parser.Url());
if (it == s_map_api.end()) { if (it == s_map_api.end()) {
consumed = false;
return; return;
} }
//该api已被消费 //该api已被消费
...@@ -174,7 +178,7 @@ static inline void addHttpListener(){ ...@@ -174,7 +178,7 @@ static inline void addHttpListener(){
size = body->remainSize(); size = body->remainSize();
} }
if(size < 4 * 1024){ if(size && size < 4 * 1024){
string contentOut = body->readData(size)->toString(); string contentOut = body->readData(size)->toString();
DebugL << "\r\n# request:\r\n" << parser.Method() << " " << parser.FullUrl() << "\r\n" DebugL << "\r\n# request:\r\n" << parser.Method() << " " << parser.FullUrl() << "\r\n"
<< "# content:\r\n" << parser.Content() << "\r\n" << "# content:\r\n" << parser.Content() << "\r\n"
...@@ -436,14 +440,14 @@ void installWebApi() { ...@@ -436,14 +440,14 @@ void installWebApi() {
api_regist1("/index/api/isMediaOnline",[](API_ARGS1){ api_regist1("/index/api/isMediaOnline",[](API_ARGS1){
CHECK_SECRET(); CHECK_SECRET();
CHECK_ARGS("schema","vhost","app","stream"); CHECK_ARGS("schema","vhost","app","stream");
val["online"] = (bool) (MediaSource::find(allArgs["schema"],allArgs["vhost"],allArgs["app"],allArgs["stream"],false)); val["online"] = (bool) (MediaSource::find(allArgs["schema"],allArgs["vhost"],allArgs["app"],allArgs["stream"]));
}); });
//测试url http://127.0.0.1/index/api/getMediaInfo?schema=rtsp&vhost=__defaultVhost__&app=live&stream=obs //测试url http://127.0.0.1/index/api/getMediaInfo?schema=rtsp&vhost=__defaultVhost__&app=live&stream=obs
api_regist1("/index/api/getMediaInfo",[](API_ARGS1){ api_regist1("/index/api/getMediaInfo",[](API_ARGS1){
CHECK_SECRET(); CHECK_SECRET();
CHECK_ARGS("schema","vhost","app","stream"); CHECK_ARGS("schema","vhost","app","stream");
auto src = MediaSource::find(allArgs["schema"],allArgs["vhost"],allArgs["app"],allArgs["stream"],false); auto src = MediaSource::find(allArgs["schema"],allArgs["vhost"],allArgs["app"],allArgs["stream"]);
if(!src){ if(!src){
val["online"] = false; val["online"] = false;
return; return;
...@@ -817,6 +821,78 @@ void installWebApi() { ...@@ -817,6 +821,78 @@ void installWebApi() {
val["data"]["paths"] = paths; val["data"]["paths"] = paths;
}); });
static auto responseSnap = [](const string &snap_path,
const HttpSession::KeyValue &headerIn,
const HttpSession::HttpResponseInvoker &invoker) {
StrCaseMap headerOut;
struct stat statbuf = {0};
GET_CONFIG(string, defaultSnap, API::kDefaultSnap);
if (!(stat(snap_path.data(), &statbuf) == 0 && statbuf.st_size != 0) && !defaultSnap.empty()) {
//空文件且设置了预设图,则返回预设图片(也就是FFmpeg生成截图中空档期的默认图片)
const_cast<string&>(snap_path) = File::absolutePath(defaultSnap, "");
headerOut["Content-Type"] = HttpFileManager::getContentType(snap_path.data());
} else {
//之前生成的截图文件,我们默认为jpeg格式
headerOut["Content-Type"] = HttpFileManager::getContentType(".jpeg");
}
//返回图片给http客户端
invoker.responseFile(headerIn, headerOut, snap_path);
};
//获取截图缓存或者实时截图
//http://127.0.0.1/index/api/getSnap?url=rtmp://127.0.0.1/record/robot.mp4&timeout_sec=10&expire_sec=3
api_regist2("/index/api/getSnap", [](API_ARGS2){
CHECK_SECRET();
CHECK_ARGS("url", "timeout_sec", "expire_sec");
GET_CONFIG(string, snap_root, API::kSnapRoot);
int expire_sec = allArgs["expire_sec"];
auto scan_path = File::absolutePath(MD5(allArgs["url"]).hexdigest(), snap_root) + "/";
string snap_path;
File::scanDir(scan_path, [&](const string &path, bool isDir) {
if (isDir) {
//忽略文件夹
return true;
}
//找到截图
auto tm = FindField(path.data() + scan_path.size(), nullptr, ".jpeg");
if (atoll(tm.data()) + expire_sec < time(NULL)) {
//截图已经过期,删除之,后面重新生成
File::delete_file(path.data());
return true;
}
//截图未过期,中断遍历,返回上次生成的截图
snap_path = path;
return false;
});
if(!snap_path.empty()){
responseSnap(snap_path, headerIn, invoker);
return;
}
//无截图或者截图已经过期
snap_path = StrPrinter << scan_path << time(NULL) << ".jpeg";
//生成一个空文件,目的是顺便创建文件夹路径,
//同时防止在FFmpeg生成截图途中不停的尝试调用该api启动FFmpeg生成相同的截图
auto file = File::create_file(snap_path.data(), "wb");
if (file) {
fclose(file);
}
//启动FFmpeg进程,开始截图
FFmpegSnap::makeSnap(allArgs["url"],snap_path,allArgs["timeout_sec"],[invoker,headerIn,snap_path](bool success){
if(!success){
//生成截图失败,可能残留空文件
File::delete_file(snap_path.data());
}
responseSnap(snap_path, headerIn, invoker);
});
});
////////////以下是注册的Hook API//////////// ////////////以下是注册的Hook API////////////
api_regist1("/index/hook/on_publish",[](API_ARGS1){ api_regist1("/index/hook/on_publish",[](API_ARGS1){
//开始推流事件 //开始推流事件
......
...@@ -78,14 +78,6 @@ void DevChannel::inputH264(const char *data, int len, uint32_t dts, uint32_t pts ...@@ -78,14 +78,6 @@ void DevChannel::inputH264(const char *data, int len, uint32_t dts, uint32_t pts
if(pts == 0){ if(pts == 0){
pts = dts; pts = dts;
} }
int prefixeSize;
if (memcmp("\x00\x00\x00\x01", data, 4) == 0) {
prefixeSize = 4;
} else if (memcmp("\x00\x00\x01", data, 3) == 0) {
prefixeSize = 3;
} else {
prefixeSize = 0;
}
//由于rtmp/hls/mp4需要缓存时间戳相同的帧, //由于rtmp/hls/mp4需要缓存时间戳相同的帧,
//所以使用FrameNoCacheAble类型的帧反而会在转换成FrameCacheAble时多次内存拷贝 //所以使用FrameNoCacheAble类型的帧反而会在转换成FrameCacheAble时多次内存拷贝
...@@ -93,9 +85,8 @@ void DevChannel::inputH264(const char *data, int len, uint32_t dts, uint32_t pts ...@@ -93,9 +85,8 @@ void DevChannel::inputH264(const char *data, int len, uint32_t dts, uint32_t pts
H264Frame::Ptr frame = std::make_shared<H264Frame>(); H264Frame::Ptr frame = std::make_shared<H264Frame>();
frame->_dts = dts; frame->_dts = dts;
frame->_pts = pts; frame->_pts = pts;
frame->_buffer.assign("\x00\x00\x00\x01",4); frame->_buffer.assign(data, len);
frame->_buffer.append(data + prefixeSize, len - prefixeSize); frame->_prefix_size = prefixSize(data,len);
frame->_prefix_size = 4;
inputFrame(frame); inputFrame(frame);
} }
...@@ -106,14 +97,6 @@ void DevChannel::inputH265(const char *data, int len, uint32_t dts, uint32_t pts ...@@ -106,14 +97,6 @@ void DevChannel::inputH265(const char *data, int len, uint32_t dts, uint32_t pts
if(pts == 0){ if(pts == 0){
pts = dts; pts = dts;
} }
int prefixeSize;
if (memcmp("\x00\x00\x00\x01", data, 4) == 0) {
prefixeSize = 4;
} else if (memcmp("\x00\x00\x01", data, 3) == 0) {
prefixeSize = 3;
} else {
prefixeSize = 0;
}
//由于rtmp/hls/mp4需要缓存时间戳相同的帧, //由于rtmp/hls/mp4需要缓存时间戳相同的帧,
//所以使用FrameNoCacheAble类型的帧反而会在转换成FrameCacheAble时多次内存拷贝 //所以使用FrameNoCacheAble类型的帧反而会在转换成FrameCacheAble时多次内存拷贝
...@@ -121,9 +104,8 @@ void DevChannel::inputH265(const char *data, int len, uint32_t dts, uint32_t pts ...@@ -121,9 +104,8 @@ void DevChannel::inputH265(const char *data, int len, uint32_t dts, uint32_t pts
H265Frame::Ptr frame = std::make_shared<H265Frame>(); H265Frame::Ptr frame = std::make_shared<H265Frame>();
frame->_dts = dts; frame->_dts = dts;
frame->_pts = pts; frame->_pts = pts;
frame->_buffer.assign("\x00\x00\x00\x01",4); frame->_buffer.assign(data, len);
frame->_buffer.append(data + prefixeSize, len - prefixeSize); frame->_prefix_size = prefixSize(data,len);
frame->_prefix_size = 4;
inputFrame(frame); inputFrame(frame);
} }
...@@ -163,7 +145,9 @@ void DevChannel::inputG711(const char *data, int len, uint32_t dts){ ...@@ -163,7 +145,9 @@ void DevChannel::inputG711(const char *data, int len, uint32_t dts){
if (dts == 0) { if (dts == 0) {
dts = (uint32_t)_aTicker[1].elapsedTime(); dts = (uint32_t)_aTicker[1].elapsedTime();
} }
inputFrame(std::make_shared<G711FrameNoCacheAble>(_audio->codecId, (char*)data, len, dts, 0)); auto frame = std::make_shared<G711FrameNoCacheAble>((char*)data, len, dts, 0);
frame->setCodec(_audio->codecId);
inputFrame(frame);
} }
void DevChannel::initVideo(const VideoInfo &info) { void DevChannel::initVideo(const VideoInfo &info) {
......
...@@ -180,9 +180,8 @@ static void eraseIfEmpty(MAP &map, IT0 it0, IT1 it1, IT2 it2) { ...@@ -180,9 +180,8 @@ static void eraseIfEmpty(MAP &map, IT0 it0, IT1 it1, IT2 it2) {
} }
}; };
void findAsync_l(const MediaInfo &info, const std::shared_ptr<TcpSession> &session, bool retry, void MediaSource::findAsync_l(const MediaInfo &info, const std::shared_ptr<TcpSession> &session, bool retry, const function<void(const MediaSource::Ptr &src)> &cb){
const function<void(const MediaSource::Ptr &src)> &cb){ auto src = MediaSource::find_l(info._schema, info._vhost, info._app, info._streamid, true);
auto src = MediaSource::find(info._schema, info._vhost, info._app, info._streamid, true);
if(src || !retry){ if(src || !retry){
cb(src); cb(src);
return; return;
...@@ -248,7 +247,11 @@ void MediaSource::findAsync(const MediaInfo &info, const std::shared_ptr<TcpSess ...@@ -248,7 +247,11 @@ void MediaSource::findAsync(const MediaInfo &info, const std::shared_ptr<TcpSess
return findAsync_l(info, session, true, cb); return findAsync_l(info, session, true, cb);
} }
MediaSource::Ptr MediaSource::find(const string &schema, const string &vhost_tmp, const string &app, const string &id, bool bMake) { MediaSource::Ptr MediaSource::find(const string &schema, const string &vhost, const string &app, const string &id) {
return find_l(schema, vhost, app, id, false);
}
MediaSource::Ptr MediaSource::find_l(const string &schema, const string &vhost_tmp, const string &app, const string &id, bool bMake) {
string vhost = vhost_tmp; string vhost = vhost_tmp;
if(vhost.empty()){ if(vhost.empty()){
vhost = DEFAULT_VHOST; vhost = DEFAULT_VHOST;
...@@ -419,12 +422,10 @@ void MediaSourceEvent::onNoneReader(MediaSource &sender){ ...@@ -419,12 +422,10 @@ void MediaSourceEvent::onNoneReader(MediaSource &sender){
//如果mp4点播, 无人观看时我们强制关闭点播 //如果mp4点播, 无人观看时我们强制关闭点播
bool is_mp4_vod = sender.getApp() == recordApp; bool is_mp4_vod = sender.getApp() == recordApp;
//无人观看mp4点播时,3秒后自动关闭
auto close_delay = is_mp4_vod ? 3.0 : stream_none_reader_delay / 1000.0;
//没有任何人观看该视频源,表明该源可以关闭了 //没有任何人观看该视频源,表明该源可以关闭了
weak_ptr<MediaSource> weakSender = sender.shared_from_this(); weak_ptr<MediaSource> weakSender = sender.shared_from_this();
_async_close_timer = std::make_shared<Timer>(close_delay, [weakSender,is_mp4_vod]() { _async_close_timer = std::make_shared<Timer>(stream_none_reader_delay / 1000.0, [weakSender,is_mp4_vod]() {
auto strongSender = weakSender.lock(); auto strongSender = weakSender.lock();
if (!strongSender) { if (!strongSender) {
//对象已经销毁 //对象已经销毁
...@@ -467,7 +468,7 @@ MediaSource::Ptr MediaSource::createFromMP4(const string &schema, const string & ...@@ -467,7 +468,7 @@ MediaSource::Ptr MediaSource::createFromMP4(const string &schema, const string &
try { try {
MP4Reader::Ptr pReader(new MP4Reader(vhost, app, stream, filePath)); MP4Reader::Ptr pReader(new MP4Reader(vhost, app, stream, filePath));
pReader->startReadMP4(); pReader->startReadMP4();
return MediaSource::find(schema, vhost, app, stream, false); return MediaSource::find(schema, vhost, app, stream);
} catch (std::exception &ex) { } catch (std::exception &ex) {
WarnL << ex.what(); WarnL << ex.what();
return nullptr; return nullptr;
...@@ -478,57 +479,51 @@ MediaSource::Ptr MediaSource::createFromMP4(const string &schema, const string & ...@@ -478,57 +479,51 @@ MediaSource::Ptr MediaSource::createFromMP4(const string &schema, const string &
#endif //ENABLE_MP4 #endif //ENABLE_MP4
} }
static bool isFlushAble_default(bool is_audio, uint32_t last_stamp, uint32_t new_stamp, int cache_size) { static bool isFlushAble_default(bool is_video, uint32_t last_stamp, uint32_t new_stamp, int cache_size) {
if (new_stamp < last_stamp) { if (new_stamp + 500 < last_stamp) {
//时间戳回退(可能seek中) //时间戳回退比较大(可能seek中),由于rtp中时间戳是pts,是可能存在一定程度的回退的
return true; return true;
} }
if (!is_audio) { //时间戳发送变化或者缓存超过1024个,sendmsg接口一般最多只能发送1024个数据包
//这是视频,时间戳发送变化或者缓存超过1024个
return last_stamp != new_stamp || cache_size >= 1024; return last_stamp != new_stamp || cache_size >= 1024;
}
//这是音频,缓存超过100ms或者缓存个数超过10个
return new_stamp > last_stamp + 100 || cache_size > 10;
} }
static bool isFlushAble_merge(bool is_audio, uint32_t last_stamp, uint32_t new_stamp, int cache_size, int merge_ms) { static bool isFlushAble_merge(bool is_video, uint32_t last_stamp, uint32_t new_stamp, int cache_size, int merge_ms) {
if (new_stamp < last_stamp) { if (new_stamp + 500 < last_stamp) {
//时间戳回退(可能seek中) //时间戳回退比较大(可能seek中),由于rtp中时间戳是pts,是可能存在一定程度的回退的
return true; return true;
} }
if(new_stamp > last_stamp + merge_ms){ if (new_stamp > last_stamp + merge_ms) {
//时间戳增量超过合并写阈值 //时间戳增量超过合并写阈值
return true; return true;
} }
if (!is_audio) { //缓存数超过1024个,这个逻辑用于避免时间戳异常的流导致的内存暴增问题
//这是视频,缓存数超过1024个,这个逻辑用于避免时间戳异常的流导致的内存暴增问题
//而且sendmsg接口一般最多只能发送1024个数据包 //而且sendmsg接口一般最多只能发送1024个数据包
return cache_size >= 1024; return cache_size >= 1024;
}
//这是音频,音频缓存超过20个
return cache_size > 20;
} }
bool FlushPolicy::isFlushAble(uint32_t new_stamp, int cache_size) { bool FlushPolicy::isFlushAble(bool is_video, bool is_key, uint32_t new_stamp, int cache_size) {
bool ret = false; bool flush_flag = false;
if (is_key && is_video) {
//遇到关键帧flush掉前面的数据,确保关键帧为该组数据的第一帧,确保GOP缓存有效
flush_flag = true;
} else {
GET_CONFIG(int, mergeWriteMS, General::kMergeWriteMS); GET_CONFIG(int, mergeWriteMS, General::kMergeWriteMS);
if (mergeWriteMS <= 0) { if (mergeWriteMS <= 0) {
//关闭了合并写或者合并写阈值小于等于0 //关闭了合并写或者合并写阈值小于等于0
ret = isFlushAble_default(_is_audio, _last_stamp, new_stamp, cache_size); flush_flag = isFlushAble_default(is_video, _last_stamp[is_video], new_stamp, cache_size);
} else { } else {
ret = isFlushAble_merge(_is_audio, _last_stamp, new_stamp, cache_size, mergeWriteMS); flush_flag = isFlushAble_merge(is_video, _last_stamp[is_video], new_stamp, cache_size, mergeWriteMS);
}
} }
if (ret) { if (flush_flag) {
// DebugL << _is_audio << " " << _last_stamp << " " << new_stamp; _last_stamp[is_video] = new_stamp;
_last_stamp = new_stamp;
} }
return ret; return flush_flag;
} }
} /* namespace mediakit */ } /* namespace mediakit */
\ No newline at end of file
...@@ -134,7 +134,7 @@ public: ...@@ -134,7 +134,7 @@ public:
virtual bool isRecording(Recorder::type type); virtual bool isRecording(Recorder::type type);
// 同步查找流 // 同步查找流
static Ptr find(const string &schema, const string &vhost, const string &app, const string &id, bool bMake = true) ; static Ptr find(const string &schema, const string &vhost, const string &app, const string &id);
// 异步查找流 // 异步查找流
static void findAsync(const MediaInfo &info, const std::shared_ptr<TcpSession> &session, const function<void(const Ptr &src)> &cb); static void findAsync(const MediaInfo &info, const std::shared_ptr<TcpSession> &session, const function<void(const Ptr &src)> &cb);
// 遍历所有流 // 遍历所有流
...@@ -142,9 +142,14 @@ public: ...@@ -142,9 +142,14 @@ public:
// 从mp4文件生成MediaSource // 从mp4文件生成MediaSource
static MediaSource::Ptr createFromMP4(const string &schema, const string &vhost, const string &app, const string &stream, const string &filePath = "", bool checkApp = true); static MediaSource::Ptr createFromMP4(const string &schema, const string &vhost, const string &app, const string &stream, const string &filePath = "", bool checkApp = true);
protected: protected:
void regist() ; void regist() ;
bool unregist() ; bool unregist();
private:
static Ptr find_l(const string &schema, const string &vhost, const string &app, const string &id, bool bMake);
static void findAsync_l(const MediaInfo &info, const std::shared_ptr<TcpSession> &session, bool retry, const function<void(const MediaSource::Ptr &src)> &cb);
private: private:
string _strSchema; string _strSchema;
string _strVhost; string _strVhost;
...@@ -159,10 +164,7 @@ private: ...@@ -159,10 +164,7 @@ private:
///缓存刷新策略类 ///缓存刷新策略类
class FlushPolicy { class FlushPolicy {
public: public:
FlushPolicy(bool is_audio) { FlushPolicy() = default;
_is_audio = is_audio;
};
~FlushPolicy() = default; ~FlushPolicy() = default;
uint32_t getStamp(const RtpPacket::Ptr &packet) { uint32_t getStamp(const RtpPacket::Ptr &packet) {
...@@ -173,45 +175,45 @@ public: ...@@ -173,45 +175,45 @@ public:
return packet->timeStamp; return packet->timeStamp;
} }
bool isFlushAble(uint32_t new_stamp, int cache_size); bool isFlushAble(bool is_video, bool is_key, uint32_t new_stamp, int cache_size);
private: private:
bool _is_audio; uint32_t _last_stamp[2] = {0, 0};
uint32_t _last_stamp= 0;
}; };
/// 视频合并写缓存模板 /// 合并写缓存模板
/// \tparam packet 包类型 /// \tparam packet 包类型
/// \tparam policy 刷新缓存策略 /// \tparam policy 刷新缓存策略
/// \tparam packet_list 包缓存类型 /// \tparam packet_list 包缓存类型
template<typename packet, typename policy = FlushPolicy, typename packet_list = List<std::shared_ptr<packet> > > template<typename packet, typename policy = FlushPolicy, typename packet_list = List<std::shared_ptr<packet> > >
class VideoPacketCache { class PacketCache {
public: public:
VideoPacketCache() : _policy(false) { PacketCache(){
_cache = std::make_shared<packet_list>(); _cache = std::make_shared<packet_list>();
} }
virtual ~VideoPacketCache() = default; virtual ~PacketCache() = default;
void inputVideo(const std::shared_ptr<packet> &rtp, bool key_pos) { void inputPacket(bool is_video, const std::shared_ptr<packet> &pkt, bool key_pos) {
if (_policy.isFlushAble(_policy.getStamp(rtp), _cache->size())) { if (_policy.isFlushAble(is_video, key_pos, _policy.getStamp(pkt), _cache->size())) {
flushAll(); flushAll();
} }
//追加数据到最后 //追加数据到最后
_cache->emplace_back(rtp); _cache->emplace_back(pkt);
if (key_pos) { if (key_pos) {
_key_pos = key_pos; _key_pos = key_pos;
} }
} }
virtual void onFlushVideo(std::shared_ptr<packet_list> &, bool key_pos) = 0; virtual void onFlush(std::shared_ptr<packet_list> &, bool key_pos) = 0;
private: private:
void flushAll() { void flushAll() {
if (_cache->empty()) { if (_cache->empty()) {
return; return;
} }
onFlushVideo(_cache, _key_pos); onFlush(_cache, _key_pos);
_cache = std::make_shared<packet_list>(); _cache = std::make_shared<packet_list>();
_key_pos = false; _key_pos = false;
} }
...@@ -222,44 +224,5 @@ private: ...@@ -222,44 +224,5 @@ private:
bool _key_pos = false; bool _key_pos = false;
}; };
/// 音频频合并写缓存模板
/// \tparam packet 包类型
/// \tparam policy 刷新缓存策略
/// \tparam packet_list 包缓存类型
template<typename packet, typename policy = FlushPolicy, typename packet_list = List<std::shared_ptr<packet> > >
class AudioPacketCache {
public:
AudioPacketCache() : _policy(true) {
_cache = std::make_shared<packet_list>();
}
virtual ~AudioPacketCache() = default;
void inputAudio(const std::shared_ptr<packet> &rtp) {
if (_policy.isFlushAble(_policy.getStamp(rtp), _cache->size())) {
flushAll();
}
//追加数据到最后
_cache->emplace_back(rtp);
}
virtual void onFlushAudio(std::shared_ptr<packet_list> &) = 0;
private:
void flushAll() {
if (_cache->empty()) {
return;
}
onFlushAudio(_cache);
_cache = std::make_shared<packet_list>();
}
private:
policy _policy;
std::shared_ptr<packet_list> _cache;
};
} /* namespace mediakit */ } /* namespace mediakit */
#endif //ZLMEDIAKIT_MEDIASOURCE_H #endif //ZLMEDIAKIT_MEDIASOURCE_H
\ No newline at end of file
...@@ -298,8 +298,69 @@ void MultiMediaSourceMuxer::resetTracks() { ...@@ -298,8 +298,69 @@ void MultiMediaSourceMuxer::resetTracks() {
_muxer->resetTracks(); _muxer->resetTracks();
} }
//该类实现frame级别的时间戳覆盖
class FrameModifyStamp : public Frame{
public:
typedef std::shared_ptr<FrameModifyStamp> Ptr;
FrameModifyStamp(const Frame::Ptr &frame, Stamp &stamp){
_frame = frame;
//覆盖时间戳
stamp.revise(frame->dts(), frame->pts(), _dts, _pts, true);
}
~FrameModifyStamp() override {}
uint32_t dts() const override{
return _dts;
}
uint32_t pts() const override{
return _pts;
}
uint32_t prefixSize() const override {
return _frame->prefixSize();
}
bool keyFrame() const override {
return _frame->keyFrame();
}
bool configFrame() const override {
return _frame->configFrame();
}
bool cacheAble() const override {
return _frame->cacheAble();
}
char *data() const override {
return _frame->data();
}
uint32_t size() const override {
return _frame->size();
}
CodecId getCodecId() const override {
return _frame->getCodecId();
}
private:
Frame::Ptr _frame;
int64_t _dts;
int64_t _pts;
};
void MultiMediaSourceMuxer::inputFrame(const Frame::Ptr &frame) { void MultiMediaSourceMuxer::inputFrame(const Frame::Ptr &frame) {
GET_CONFIG(bool,modify_stamp,General::kModifyStamp);
if(!modify_stamp){
//未开启时间戳覆盖
_muxer->inputFrame(frame); _muxer->inputFrame(frame);
}else{
//开启了时间戳覆盖
FrameModifyStamp::Ptr new_frame = std::make_shared<FrameModifyStamp>(frame,_stamp[frame->getTrackType()]);
//输入时间戳覆盖后的帧
_muxer->inputFrame(new_frame);
}
} }
bool MultiMediaSourceMuxer::isEnabled(){ bool MultiMediaSourceMuxer::isEnabled(){
......
...@@ -178,6 +178,7 @@ public: ...@@ -178,6 +178,7 @@ public:
private: private:
MultiMuxerPrivate::Ptr _muxer; MultiMuxerPrivate::Ptr _muxer;
std::weak_ptr<MediaSourceEvent> _listener; std::weak_ptr<MediaSourceEvent> _listener;
Stamp _stamp[2];
}; };
}//namespace mediakit }//namespace mediakit
......
...@@ -67,6 +67,7 @@ const string kPublishToRtxp = GENERAL_FIELD"publishToRtxp"; ...@@ -67,6 +67,7 @@ const string kPublishToRtxp = GENERAL_FIELD"publishToRtxp";
const string kPublishToHls = GENERAL_FIELD"publishToHls"; const string kPublishToHls = GENERAL_FIELD"publishToHls";
const string kPublishToMP4 = GENERAL_FIELD"publishToMP4"; const string kPublishToMP4 = GENERAL_FIELD"publishToMP4";
const string kMergeWriteMS = GENERAL_FIELD"mergeWriteMS"; const string kMergeWriteMS = GENERAL_FIELD"mergeWriteMS";
const string kModifyStamp = GENERAL_FIELD"modifyStamp";
onceToken token([](){ onceToken token([](){
mINI::Instance()[kFlowThreshold] = 1024; mINI::Instance()[kFlowThreshold] = 1024;
...@@ -79,6 +80,7 @@ onceToken token([](){ ...@@ -79,6 +80,7 @@ onceToken token([](){
mINI::Instance()[kPublishToHls] = 1; mINI::Instance()[kPublishToHls] = 1;
mINI::Instance()[kPublishToMP4] = 0; mINI::Instance()[kPublishToMP4] = 0;
mINI::Instance()[kMergeWriteMS] = 0; mINI::Instance()[kMergeWriteMS] = 0;
mINI::Instance()[kModifyStamp] = 0;
},nullptr); },nullptr);
}//namespace General }//namespace General
...@@ -293,3 +295,10 @@ const string kBenchmarkMode = "benchmark_mode"; ...@@ -293,3 +295,10 @@ const string kBenchmarkMode = "benchmark_mode";
} // namespace mediakit } // namespace mediakit
void Assert_Throw(int failed, const char *exp, const char *func, const char *file, int line){
if(failed) {
_StrPrinter printer;
printer << "Assertion failed: (" << exp << "), function " << func << ", file " << file << ", line " << line << ".";
throw std::runtime_error(printer);
}
}
...@@ -174,6 +174,8 @@ extern const string kPublishToMP4 ; ...@@ -174,6 +174,8 @@ extern const string kPublishToMP4 ;
//合并写缓存大小(单位毫秒),合并写指服务器缓存一定的数据后才会一次性写入socket,这样能提高性能,但是会提高延时 //合并写缓存大小(单位毫秒),合并写指服务器缓存一定的数据后才会一次性写入socket,这样能提高性能,但是会提高延时
//开启后会同时关闭TCP_NODELAY并开启MSG_MORE //开启后会同时关闭TCP_NODELAY并开启MSG_MORE
extern const string kMergeWriteMS ; extern const string kMergeWriteMS ;
//全局的时间戳覆盖开关,在转协议时,对frame进行时间戳覆盖
extern const string kModifyStamp;
}//namespace General }//namespace General
...@@ -217,6 +219,7 @@ extern const string kDirectProxy; ...@@ -217,6 +219,7 @@ extern const string kDirectProxy;
////////////RTMP服务器配置/////////// ////////////RTMP服务器配置///////////
namespace Rtmp { namespace Rtmp {
//rtmp推流时间戳覆盖开关
extern const string kModifyStamp; extern const string kModifyStamp;
//握手超时时间,默认15秒 //握手超时时间,默认15秒
extern const string kHandshakeSecond; extern const string kHandshakeSecond;
......
...@@ -9,65 +9,63 @@ ...@@ -9,65 +9,63 @@
*/ */
#include "AAC.h" #include "AAC.h"
#ifdef ENABLE_MP4
#include "mpeg4-aac.h"
#endif
namespace mediakit{ namespace mediakit{
void writeAdtsHeader(const AACFrame &hed, uint8_t *pcAdts) { unsigned const samplingFrequencyTable[16] = { 96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000, 7350, 0, 0, 0 };
pcAdts[0] = (hed.syncword >> 4 & 0xFF); //8bit
pcAdts[1] = (hed.syncword << 4 & 0xF0); //4 bit class AdtsHeader{
pcAdts[1] |= (hed.id << 3 & 0x08); //1 bit public:
pcAdts[1] |= (hed.layer << 1 & 0x06); //2bit unsigned int syncword = 0; //12 bslbf 同步字The bit string ‘1111 1111 1111’,说明一个ADTS帧的开始
pcAdts[1] |= (hed.protection_absent & 0x01); //1 bit unsigned int id; //1 bslbf MPEG 标示符, 设置为1
unsigned int layer; //2 uimsbf Indicates which layer is used. Set to ‘00’
pcAdts[2] = (hed.profile << 6 & 0xC0); // 2 bit unsigned int protection_absent; //1 bslbf 表示是否误码校验
pcAdts[2] |= (hed.sf_index << 2 & 0x3C); //4bit unsigned int profile; //2 uimsbf 表示使用哪个级别的AAC,如01 Low Complexity(LC)--- AACLC
pcAdts[2] |= (hed.private_bit << 1 & 0x02); //1 bit unsigned int sf_index; //4 uimsbf 表示使用的采样率下标
pcAdts[2] |= (hed.channel_configuration >> 2 & 0x03); //1 bit unsigned int private_bit; //1 bslbf
unsigned int channel_configuration; //3 uimsbf 表示声道数
pcAdts[3] = (hed.channel_configuration << 6 & 0xC0); // 2 bit unsigned int original; //1 bslbf
pcAdts[3] |= (hed.original << 5 & 0x20); //1 bit unsigned int home; //1 bslbf
pcAdts[3] |= (hed.home << 4 & 0x10); //1 bit //下面的为改变的参数即每一帧都不同
pcAdts[3] |= (hed.copyright_identification_bit << 3 & 0x08); //1 bit unsigned int copyright_identification_bit; //1 bslbf
pcAdts[3] |= (hed.copyright_identification_start << 2 & 0x04); //1 bit unsigned int copyright_identification_start; //1 bslbf
pcAdts[3] |= (hed.aac_frame_length >> 11 & 0x03); //2 bit unsigned int aac_frame_length; // 13 bslbf 一个ADTS帧的长度包括ADTS头和raw data block
unsigned int adts_buffer_fullness; //11 bslbf 0x7FF 说明是码率可变的码流
pcAdts[4] = (hed.aac_frame_length >> 3 & 0xFF); //8 bit //no_raw_data_blocks_in_frame 表示ADTS帧中有number_of_raw_data_blocks_in_frame + 1个AAC原始帧.
//所以说number_of_raw_data_blocks_in_frame == 0
pcAdts[5] = (hed.aac_frame_length << 5 & 0xE0); //3 bit //表示说ADTS帧中有一个AAC数据块并不是说没有。(一个AAC原始帧包含一段时间内1024个采样及相关数据)
pcAdts[5] |= (hed.adts_buffer_fullness >> 6 & 0x1F); //5 bit unsigned int no_raw_data_blocks_in_frame; //2 uimsfb
};
pcAdts[6] = (hed.adts_buffer_fullness << 2 & 0xFC); //6 bit
pcAdts[6] |= (hed.no_raw_data_blocks_in_frame & 0x03); //2 bit static void dumpAdtsHeader(const AdtsHeader &hed, uint8_t *out) {
out[0] = (hed.syncword >> 4 & 0xFF); //8bit
out[1] = (hed.syncword << 4 & 0xF0); //4 bit
out[1] |= (hed.id << 3 & 0x08); //1 bit
out[1] |= (hed.layer << 1 & 0x06); //2bit
out[1] |= (hed.protection_absent & 0x01); //1 bit
out[2] = (hed.profile << 6 & 0xC0); // 2 bit
out[2] |= (hed.sf_index << 2 & 0x3C); //4bit
out[2] |= (hed.private_bit << 1 & 0x02); //1 bit
out[2] |= (hed.channel_configuration >> 2 & 0x03); //1 bit
out[3] = (hed.channel_configuration << 6 & 0xC0); // 2 bit
out[3] |= (hed.original << 5 & 0x20); //1 bit
out[3] |= (hed.home << 4 & 0x10); //1 bit
out[3] |= (hed.copyright_identification_bit << 3 & 0x08); //1 bit
out[3] |= (hed.copyright_identification_start << 2 & 0x04); //1 bit
out[3] |= (hed.aac_frame_length >> 11 & 0x03); //2 bit
out[4] = (hed.aac_frame_length >> 3 & 0xFF); //8 bit
out[5] = (hed.aac_frame_length << 5 & 0xE0); //3 bit
out[5] |= (hed.adts_buffer_fullness >> 6 & 0x1F); //5 bit
out[6] = (hed.adts_buffer_fullness << 2 & 0xFC); //6 bit
out[6] |= (hed.no_raw_data_blocks_in_frame & 0x03); //2 bit
} }
string makeAdtsConfig(const uint8_t *pcAdts){
if (!(pcAdts[0] == 0xFF && (pcAdts[1] & 0xF0) == 0xF0)) {
return "";
}
// Get and check the 'profile':
unsigned char profile = (pcAdts[2] & 0xC0) >> 6; // 2 bits
if (profile == 3) {
return "";
}
// Get and check the 'sampling_frequency_index':
unsigned char sampling_frequency_index = (pcAdts[2] & 0x3C) >> 2; // 4 bits
if (samplingFrequencyTable[sampling_frequency_index] == 0) {
return "";
}
// Get and check the 'channel_configuration':
unsigned char channel_configuration = ((pcAdts[2] & 0x01) << 2)
| ((pcAdts[3] & 0xC0) >> 6); // 3 bits
unsigned char audioSpecificConfig[2]; static void parseAacConfig(const string &config, AdtsHeader &adts) {
unsigned char const audioObjectType = profile + 1; uint8_t cfg1 = config[0];
audioSpecificConfig[0] = (audioObjectType << 3) | (sampling_frequency_index >> 1); uint8_t cfg2 = config[1];
audioSpecificConfig[1] = (sampling_frequency_index << 7) | (channel_configuration << 3);
return string((char *)audioSpecificConfig,2);
}
void makeAdtsHeader(const string &strAudioCfg,AACFrame &adts) {
uint8_t cfg1 = strAudioCfg[0];
uint8_t cfg2 = strAudioCfg[1];
int audioObjectType; int audioObjectType;
int sampling_frequency_index; int sampling_frequency_index;
...@@ -93,9 +91,83 @@ void makeAdtsHeader(const string &strAudioCfg,AACFrame &adts) { ...@@ -93,9 +91,83 @@ void makeAdtsHeader(const string &strAudioCfg,AACFrame &adts) {
adts.adts_buffer_fullness = 2047; adts.adts_buffer_fullness = 2047;
adts.no_raw_data_blocks_in_frame = 0; adts.no_raw_data_blocks_in_frame = 0;
} }
void getAACInfo(const AACFrame &adts,int &iSampleRate,int &iChannel){
iSampleRate = samplingFrequencyTable[adts.sf_index]; string makeAacConfig(const uint8_t *hex, int length){
iChannel = adts.channel_configuration; #ifndef ENABLE_MP4
if (!(hex[0] == 0xFF && (hex[1] & 0xF0) == 0xF0)) {
return "";
}
// Get and check the 'profile':
unsigned char profile = (hex[2] & 0xC0) >> 6; // 2 bits
if (profile == 3) {
return "";
}
// Get and check the 'sampling_frequency_index':
unsigned char sampling_frequency_index = (hex[2] & 0x3C) >> 2; // 4 bits
if (samplingFrequencyTable[sampling_frequency_index] == 0) {
return "";
}
// Get and check the 'channel_configuration':
unsigned char channel_configuration = ((hex[2] & 0x01) << 2) | ((hex[3] & 0xC0) >> 6); // 3 bits
unsigned char audioSpecificConfig[2];
unsigned char const audioObjectType = profile + 1;
audioSpecificConfig[0] = (audioObjectType << 3) | (sampling_frequency_index >> 1);
audioSpecificConfig[1] = (sampling_frequency_index << 7) | (channel_configuration << 3);
return string((char *)audioSpecificConfig,2);
#else
struct mpeg4_aac_t aac = {0};
if (mpeg4_aac_adts_load(hex, length, &aac) > 0) {
char buf[32] = {0};
int len = mpeg4_aac_audio_specific_config_save(&aac, (uint8_t *) buf, sizeof(buf));
if (len > 0) {
return string(buf, len);
}
}
WarnL << "生成aac config失败, adts header:" << hexdump(hex, length);
return "";
#endif
}
int dumpAacConfig(const string &config, int length, uint8_t *out, int out_size) {
#ifndef ENABLE_MP4
AdtsHeader header;
parseAacConfig(config, header);
header.aac_frame_length = length;
dumpAdtsHeader(header, out);
return ADTS_HEADER_LEN;
#else
struct mpeg4_aac_t aac = {0};
int ret = mpeg4_aac_audio_specific_config_load((uint8_t *) config.data(), config.size(), &aac);
if (ret > 0) {
ret = mpeg4_aac_adts_save(&aac, length, out, out_size);
}
if (ret < 0) {
WarnL << "生成adts头失败:" << ret << ", aac config:" << hexdump(config.data(), config.size());
}
return ret;
#endif
}
bool parseAacConfig(const string &config, int &samplerate, int &channels){
#ifndef ENABLE_MP4
AdtsHeader header;
parseAacConfig(config, header);
samplerate = samplingFrequencyTable[header.sf_index];
channels = header.channel_configuration;
return true;
#else
struct mpeg4_aac_t aac = {0};
int ret = mpeg4_aac_audio_specific_config_load((uint8_t *) config.data(), config.size(), &aac);
if (ret > 0) {
samplerate = aac.sampling_frequency;
channels = aac.channels;
return true;
}
WarnL << "获取aac采样率、声道数失败:" << hexdump(config.data(), config.size());
return false;
#endif
} }
Sdp::Ptr AACTrack::getSdp() { Sdp::Ptr AACTrack::getSdp() {
...@@ -103,9 +175,7 @@ Sdp::Ptr AACTrack::getSdp() { ...@@ -103,9 +175,7 @@ Sdp::Ptr AACTrack::getSdp() {
WarnL << getCodecName() << " Track未准备好"; WarnL << getCodecName() << " Track未准备好";
return nullptr; return nullptr;
} }
return std::make_shared<AACSdp>(getAacCfg(),getAudioSampleRate()); return std::make_shared<AACSdp>(getAacCfg(),getAudioSampleRate(), getAudioChannel());
} }
}//namespace mediakit }//namespace mediakit
\ No newline at end of file
...@@ -13,98 +13,34 @@ ...@@ -13,98 +13,34 @@
#include "Frame.h" #include "Frame.h"
#include "Track.h" #include "Track.h"
#define ADTS_HEADER_LEN 7
namespace mediakit{ namespace mediakit{
class AACFrame; string makeAacConfig(const uint8_t *hex, int length);
int dumpAacConfig(const string &config, int length, uint8_t *out, int out_size);
unsigned const samplingFrequencyTable[16] = { 96000, 88200, bool parseAacConfig(const string &config, int &samplerate, int &channels);
64000, 48000,
44100, 32000,
24000, 22050,
16000, 12000,
11025, 8000,
7350, 0, 0, 0 };
void makeAdtsHeader(const string &strAudioCfg,AACFrame &adts);
void writeAdtsHeader(const AACFrame &adts, uint8_t *pcAdts) ;
string makeAdtsConfig(const uint8_t *pcAdts);
void getAACInfo(const AACFrame &adts,int &iSampleRate,int &iChannel);
/** /**
* aac帧,包含adts头 * aac帧,包含adts头
*/ */
class AACFrame : public Frame { class AACFrame : public FrameImp {
public: public:
typedef std::shared_ptr<AACFrame> Ptr; typedef std::shared_ptr<AACFrame> Ptr;
AACFrame(){
char *data() const override{ _codecid = CodecAAC;
return (char *)buffer;
}
uint32_t size() const override {
return aac_frame_length;
}
uint32_t dts() const override {
return timeStamp;
}
uint32_t prefixSize() const override{
return iPrefixSize;
}
TrackType getTrackType() const override{
return TrackAudio;
}
CodecId getCodecId() const override{
return CodecAAC;
} }
};
bool keyFrame() const override { class AACFrameNoCacheAble : public FrameFromPtr {
return false;
}
bool configFrame() const override{
return false;
}
public:
unsigned int syncword = 0; //12 bslbf 同步字The bit string ‘1111 1111 1111’,说明一个ADTS帧的开始
unsigned int id; //1 bslbf MPEG 标示符, 设置为1
unsigned int layer; //2 uimsbf Indicates which layer is used. Set to ‘00’
unsigned int protection_absent; //1 bslbf 表示是否误码校验
unsigned int profile; //2 uimsbf 表示使用哪个级别的AAC,如01 Low Complexity(LC)--- AACLC
unsigned int sf_index; //4 uimsbf 表示使用的采样率下标
unsigned int private_bit; //1 bslbf
unsigned int channel_configuration; //3 uimsbf 表示声道数
unsigned int original; //1 bslbf
unsigned int home; //1 bslbf
//下面的为改变的参数即每一帧都不同
unsigned int copyright_identification_bit; //1 bslbf
unsigned int copyright_identification_start; //1 bslbf
unsigned int aac_frame_length; // 13 bslbf 一个ADTS帧的长度包括ADTS头和raw data block
unsigned int adts_buffer_fullness; //11 bslbf 0x7FF 说明是码率可变的码流
//no_raw_data_blocks_in_frame 表示ADTS帧中有number_of_raw_data_blocks_in_frame + 1个AAC原始帧.
//所以说number_of_raw_data_blocks_in_frame == 0
//表示说ADTS帧中有一个AAC数据块并不是说没有。(一个AAC原始帧包含一段时间内1024个采样及相关数据)
unsigned int no_raw_data_blocks_in_frame; //2 uimsfb
unsigned char buffer[2 * 1024 + 7];
uint32_t timeStamp;
uint32_t iPrefixSize = 7;
} ;
class AACFrameNoCacheAble : public FrameNoCacheAble {
public: public:
typedef std::shared_ptr<AACFrameNoCacheAble> Ptr; typedef std::shared_ptr<AACFrameNoCacheAble> Ptr;
AACFrameNoCacheAble(char *ptr,uint32_t size,uint32_t dts,uint32_t pts = 0,int prefixeSize = 7){ AACFrameNoCacheAble(char *ptr,uint32_t size,uint32_t dts,uint32_t pts = 0,int prefix_size = ADTS_HEADER_LEN){
_ptr = ptr; _ptr = ptr;
_size = size; _size = size;
_dts = dts; _dts = dts;
_prefixSize = prefixeSize; _prefix_size = prefix_size;
}
TrackType getTrackType() const override{
return TrackAudio;
} }
CodecId getCodecId() const override{ CodecId getCodecId() const override{
...@@ -118,8 +54,7 @@ public: ...@@ -118,8 +54,7 @@ public:
bool configFrame() const override{ bool configFrame() const override{
return false; return false;
} }
} ; };
/** /**
* aac音频通道 * aac音频通道
...@@ -136,44 +71,25 @@ public: ...@@ -136,44 +71,25 @@ public:
/** /**
* 构造aac类型的媒体 * 构造aac类型的媒体
* @param aac_cfg aac两个字节的配置信息 * @param aac_cfg aac配置信息
*/ */
AACTrack(const string &aac_cfg){ AACTrack(const string &aac_cfg){
if(aac_cfg.size() < 2){ setAacCfg(aac_cfg);
throw std::invalid_argument("adts配置必须最少2个字节");
}
_cfg = aac_cfg.substr(0,2);
onReady();
} }
/** /**
* 构造aac类型的媒体 * 设置aac 配置信息
* @param adts_header adts头,7个字节
* @param adts_header_len adts头长度,不少于7个字节
*/ */
AACTrack(const char *adts_header,int adts_header_len = 7){ void setAacCfg(const string &aac_cfg){
if(adts_header_len < 7){ if (aac_cfg.size() < 2) {
throw std::invalid_argument("adts头必须不少于7个字节"); throw std::invalid_argument("adts配置必须最少2个字节");
}
_cfg = makeAdtsConfig((uint8_t*)adts_header);
onReady();
}
/**
* 构造aac类型的媒体
* @param aac_frame_with_adts 带adts头的aac帧
*/
AACTrack(const Frame::Ptr &aac_frame_with_adts){
if(aac_frame_with_adts->getCodecId() != CodecAAC || aac_frame_with_adts->prefixSize() < 7){
throw std::invalid_argument("必须输入带adts头的aac帧");
} }
_cfg = makeAdtsConfig((uint8_t*)aac_frame_with_adts->data()); _cfg = aac_cfg;
onReady(); onReady();
} }
/** /**
* 获取aac两个字节的配置 * 获取aac 配置信息
* @return
*/ */
const string &getAacCfg() const{ const string &getAacCfg() const{
return _cfg; return _cfg;
...@@ -181,7 +97,6 @@ public: ...@@ -181,7 +97,6 @@ public:
/** /**
* 返回编码类型 * 返回编码类型
* @return
*/ */
CodecId getCodecId() const override{ CodecId getCodecId() const override{
return CodecAAC; return CodecAAC;
...@@ -189,30 +104,27 @@ public: ...@@ -189,30 +104,27 @@ public:
/** /**
* 在获取aac_cfg前是无效的Track * 在获取aac_cfg前是无效的Track
* @return
*/ */
bool ready() override { bool ready() override {
return !_cfg.empty(); return !_cfg.empty();
} }
/** /**
* 返回音频采样率 * 返回音频采样率
* @return
*/ */
int getAudioSampleRate() const override{ int getAudioSampleRate() const override{
return _sampleRate; return _sampleRate;
} }
/** /**
* 返回音频采样位数,一般为16或8 * 返回音频采样位数,一般为16或8
* @return
*/ */
int getAudioSampleBit() const override{ int getAudioSampleBit() const override{
return _sampleBit; return _sampleBit;
} }
/** /**
* 返回音频通道数 * 返回音频通道数
* @return
*/ */
int getAudioChannel() const override{ int getAudioChannel() const override{
return _channel; return _channel;
...@@ -225,9 +137,9 @@ public: ...@@ -225,9 +137,9 @@ public:
void inputFrame(const Frame::Ptr &frame) override{ void inputFrame(const Frame::Ptr &frame) override{
if (_cfg.empty()) { if (_cfg.empty()) {
//未获取到aac_cfg信息 //未获取到aac_cfg信息
if (frame->prefixSize() >= 7) { if (frame->prefixSize()) {
//7个字节的adts头 //7个字节的adts头
_cfg = makeAdtsConfig((uint8_t *)(frame->data())); _cfg = makeAacConfig((uint8_t *) (frame->data()), frame->prefixSize());
onReady(); onReady();
} else { } else {
WarnL << "无法获取adts头!"; WarnL << "无法获取adts头!";
...@@ -240,13 +152,12 @@ private: ...@@ -240,13 +152,12 @@ private:
* 解析2个字节的aac配置 * 解析2个字节的aac配置
*/ */
void onReady(){ void onReady(){
if(_cfg.size() < 2){ if (_cfg.size() < 2) {
return; return;
} }
AACFrame aacFrame; parseAacConfig(_cfg, _sampleRate, _channel);
makeAdtsHeader(_cfg,aacFrame);
getAACInfo(aacFrame,_sampleRate,_channel);
} }
Track::Ptr clone() override { Track::Ptr clone() override {
return std::make_shared<std::remove_reference<decltype(*this)>::type >(*this); return std::make_shared<std::remove_reference<decltype(*this)>::type >(*this);
} }
...@@ -260,43 +171,42 @@ private: ...@@ -260,43 +171,42 @@ private:
int _channel = 0; int _channel = 0;
}; };
/** /**
* aac类型SDP * aac类型SDP
*/ */
class AACSdp : public Sdp { class AACSdp : public Sdp {
public: public:
/** /**
* * 构造函数
* @param aac_cfg aac两个字节的配置描述 * @param aac_cfg aac两个字节的配置描述
* @param sample_rate 音频采样率 * @param sample_rate 音频采样率
* @param playload_type rtp playload type 默认98 * @param payload_type rtp payload type 默认98
* @param bitrate 比特率 * @param bitrate 比特率
*/ */
AACSdp(const string &aac_cfg, AACSdp(const string &aac_cfg,
int sample_rate, int sample_rate,
int playload_type = 98, int channels,
int bitrate = 128) : Sdp(sample_rate,playload_type){ int payload_type = 98,
_printer << "m=audio 0 RTP/AVP " << playload_type << "\r\n"; int bitrate = 128) : Sdp(sample_rate,payload_type){
_printer << "m=audio 0 RTP/AVP " << payload_type << "\r\n";
_printer << "b=AS:" << bitrate << "\r\n"; _printer << "b=AS:" << bitrate << "\r\n";
_printer << "a=rtpmap:" << playload_type << " MPEG4-GENERIC/" << sample_rate << "\r\n"; _printer << "a=rtpmap:" << payload_type << " MPEG4-GENERIC/" << sample_rate << "/" << channels << "\r\n";
char configStr[32] = {0}; string configStr;
snprintf(configStr, sizeof(configStr), "%02X%02X", (uint8_t)aac_cfg[0], (uint8_t)aac_cfg[1]); char buf[4] = {0};
_printer << "a=fmtp:" << playload_type << " streamtype=5;profile-level-id=1;mode=AAC-hbr;" for(auto &ch : aac_cfg){
<< "sizelength=13;indexlength=3;indexdeltalength=3;config=" snprintf(buf, sizeof(buf), "%02X", (uint8_t)ch);
<< configStr << "\r\n"; configStr.append(buf);
_printer << "a=control:trackID=" << getTrackType() << "\r\n"; }
_printer << "a=fmtp:" << payload_type << " streamtype=5;profile-level-id=1;mode=AAC-hbr;"
<< "sizelength=13;indexlength=3;indexdeltalength=3;config=" << configStr << "\r\n";
_printer << "a=control:trackID=" << (int)TrackAudio << "\r\n";
} }
string getSdp() const override { string getSdp() const override {
return _printer; return _printer;
} }
TrackType getTrackType() const override {
return TrackAudio;
}
CodecId getCodecId() const override { CodecId getCodecId() const override {
return CodecAAC; return CodecAAC;
} }
...@@ -305,6 +215,4 @@ private: ...@@ -305,6 +215,4 @@ private:
}; };
}//namespace mediakit }//namespace mediakit
#endif //ZLMEDIAKIT_AAC_H #endif //ZLMEDIAKIT_AAC_H
\ No newline at end of file
...@@ -13,18 +13,6 @@ ...@@ -13,18 +13,6 @@
namespace mediakit{ namespace mediakit{
AACRtmpDecoder::AACRtmpDecoder() {
_adts = obtainFrame();
}
AACFrame::Ptr AACRtmpDecoder::obtainFrame() {
//从缓存池重新申请对象,防止覆盖已经写入环形缓存的对象
auto frame = ResourcePoolHelper<AACFrame>::obtainObj();
frame->aac_frame_length = 7;
frame->iPrefixSize = 7;
return frame;
}
static string getAacCfg(const RtmpPacket &thiz) { static string getAacCfg(const RtmpPacket &thiz) {
string ret; string ret;
if (thiz.getMediaType() != FLV_CODEC_AAC) { if (thiz.getMediaType() != FLV_CODEC_AAC) {
...@@ -37,11 +25,11 @@ static string getAacCfg(const RtmpPacket &thiz) { ...@@ -37,11 +25,11 @@ static string getAacCfg(const RtmpPacket &thiz) {
WarnL << "bad aac cfg!"; WarnL << "bad aac cfg!";
return ret; return ret;
} }
ret = thiz.strBuf.substr(2, 2); ret = thiz.strBuf.substr(2);
return ret; return ret;
} }
bool AACRtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt, bool key_pos) { bool AACRtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt, bool) {
if (pkt->isCfgFrame()) { if (pkt->isCfgFrame()) {
_aac_cfg = getAacCfg(*pkt); _aac_cfg = getAacCfg(*pkt);
return false; return false;
...@@ -52,26 +40,28 @@ bool AACRtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt, bool key_pos) { ...@@ -52,26 +40,28 @@ bool AACRtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt, bool key_pos) {
return false; return false;
} }
void AACRtmpDecoder::onGetAAC(const char* pcData, int iLen, uint32_t ui32TimeStamp) { void AACRtmpDecoder::onGetAAC(const char* data, int len, uint32_t stamp) {
if(iLen + 7 > sizeof(_adts->buffer)){ auto frame = ResourcePoolHelper<AACFrame>::obtainObj();
WarnL << "Illegal adts data, exceeding the length limit.";
return;
}
//写adts结构头
makeAdtsHeader(_aac_cfg,*_adts);
//拷贝aac负载 //生成adts头
memcpy(_adts->buffer + 7, pcData, iLen); char adts_header[32] = {0};
_adts->aac_frame_length = 7 + iLen; auto size = dumpAacConfig(_aac_cfg, len, (uint8_t *) adts_header, sizeof(adts_header));
_adts->timeStamp = ui32TimeStamp; if (size > 0) {
frame->_buffer.assign(adts_header, size);
frame->_prefix_size = size;
} else {
frame->_buffer.clear();
frame->_prefix_size = 0;
}
//adts结构头转成头7个字节 //追加负载数据
writeAdtsHeader(*_adts, _adts->buffer); frame->_buffer.append(data, len);
frame->_dts = stamp;
//写入环形缓存 //写入环形缓存
RtmpCodec::inputFrame(_adts); RtmpCodec::inputFrame(frame);
_adts = obtainFrame();
} }
///////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////
AACRtmpEncoder::AACRtmpEncoder(const Track::Ptr &track) { AACRtmpEncoder::AACRtmpEncoder(const Track::Ptr &track) {
...@@ -91,9 +81,9 @@ void AACRtmpEncoder::makeConfigPacket() { ...@@ -91,9 +81,9 @@ void AACRtmpEncoder::makeConfigPacket() {
void AACRtmpEncoder::inputFrame(const Frame::Ptr &frame) { void AACRtmpEncoder::inputFrame(const Frame::Ptr &frame) {
if (_aac_cfg.empty()) { if (_aac_cfg.empty()) {
if (frame->prefixSize() >= 7) { if (frame->prefixSize()) {
//包含adts头,从adts头获取aac配置信息 //包含adts头,从adts头获取aac配置信息
_aac_cfg = makeAdtsConfig((uint8_t *)(frame->data())); _aac_cfg = makeAacConfig((uint8_t *) (frame->data()), frame->prefixSize());
} }
makeConfigPacket(); makeConfigPacket();
} }
......
...@@ -23,7 +23,7 @@ class AACRtmpDecoder : public RtmpCodec , public ResourcePoolHelper<AACFrame> { ...@@ -23,7 +23,7 @@ class AACRtmpDecoder : public RtmpCodec , public ResourcePoolHelper<AACFrame> {
public: public:
typedef std::shared_ptr<AACRtmpDecoder> Ptr; typedef std::shared_ptr<AACRtmpDecoder> Ptr;
AACRtmpDecoder(); AACRtmpDecoder() {}
~AACRtmpDecoder() {} ~AACRtmpDecoder() {}
/** /**
...@@ -33,19 +33,14 @@ public: ...@@ -33,19 +33,14 @@ public:
*/ */
bool inputRtmp(const RtmpPacket::Ptr &Rtmp, bool key_pos = false) override; bool inputRtmp(const RtmpPacket::Ptr &Rtmp, bool key_pos = false) override;
TrackType getTrackType() const override{
return TrackAudio;
}
CodecId getCodecId() const override{ CodecId getCodecId() const override{
return CodecAAC; return CodecAAC;
} }
protected: private:
void onGetAAC(const char* pcData, int iLen, uint32_t ui32TimeStamp); void onGetAAC(const char *data, int len, uint32_t stamp);
AACFrame::Ptr obtainFrame();
protected: private:
AACFrame::Ptr _adts;
string _aac_cfg; string _aac_cfg;
}; };
...@@ -76,11 +71,14 @@ public: ...@@ -76,11 +71,14 @@ public:
* 生成config包 * 生成config包
*/ */
void makeConfigPacket() override; void makeConfigPacket() override;
private: private:
void makeAudioConfigPkt(); void makeAudioConfigPkt();
private: private:
uint8_t _audio_flv_flags; uint8_t _audio_flv_flags;
AACTrack::Ptr _track; AACTrack::Ptr _track;
string _aac_cfg;
}; };
}//namespace mediakit }//namespace mediakit
......
...@@ -9,19 +9,19 @@ ...@@ -9,19 +9,19 @@
*/ */
#include "AACRtp.h" #include "AACRtp.h"
#define ADTS_HEADER_LEN 7 #define AAC_MAX_FRAME_SIZE (2 * 1024)
namespace mediakit{ namespace mediakit{
AACRtpEncoder::AACRtpEncoder(uint32_t ui32Ssrc, AACRtpEncoder::AACRtpEncoder(uint32_t ui32Ssrc,
uint32_t ui32MtuSize, uint32_t ui32MtuSize,
uint32_t ui32SampleRate, uint32_t ui32SampleRate,
uint8_t ui8PlayloadType, uint8_t ui8PayloadType,
uint8_t ui8Interleaved) : uint8_t ui8Interleaved) :
RtpInfo(ui32Ssrc, RtpInfo(ui32Ssrc,
ui32MtuSize, ui32MtuSize,
ui32SampleRate, ui32SampleRate,
ui8PlayloadType, ui8PayloadType,
ui8Interleaved){ ui8Interleaved){
} }
...@@ -56,32 +56,30 @@ void AACRtpEncoder::inputFrame(const Frame::Ptr &frame) { ...@@ -56,32 +56,30 @@ void AACRtpEncoder::inputFrame(const Frame::Ptr &frame) {
} }
void AACRtpEncoder::makeAACRtp(const void *data, unsigned int len, bool mark, uint32_t uiStamp) { void AACRtpEncoder::makeAACRtp(const void *data, unsigned int len, bool mark, uint32_t uiStamp) {
RtpCodec::inputRtp(makeRtp(getTrackType(),data,len,mark,uiStamp), false); RtpCodec::inputRtp(makeRtp(getTrackType(), data, len, mark, uiStamp), false);
} }
///////////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////
AACRtpDecoder::AACRtpDecoder(const Track::Ptr &track){ AACRtpDecoder::AACRtpDecoder(const Track::Ptr &track) {
auto aacTrack = dynamic_pointer_cast<AACTrack>(track); auto aacTrack = dynamic_pointer_cast<AACTrack>(track);
if(!aacTrack || !aacTrack->ready()){ if (!aacTrack || !aacTrack->ready()) {
WarnL << "该aac track无效!"; WarnL << "该aac track无效!";
}else{ } else {
_aac_cfg = aacTrack->getAacCfg(); _aac_cfg = aacTrack->getAacCfg();
} }
_adts = obtainFrame(); _frame = obtainFrame();
} }
AACRtpDecoder::AACRtpDecoder() { AACRtpDecoder::AACRtpDecoder() {
_adts = obtainFrame(); _frame = obtainFrame();
} }
AACFrame::Ptr AACRtpDecoder::obtainFrame() { AACFrame::Ptr AACRtpDecoder::obtainFrame() {
//从缓存池重新申请对象,防止覆盖已经写入环形缓存的对象 //从缓存池重新申请对象,防止覆盖已经写入环形缓存的对象
auto frame = ResourcePoolHelper<AACFrame>::obtainObj(); auto frame = ResourcePoolHelper<AACFrame>::obtainObj();
frame->aac_frame_length = ADTS_HEADER_LEN; frame->_prefix_size = 0;
frame->iPrefixSize = ADTS_HEADER_LEN; frame->_buffer.clear();
if(frame->syncword == 0 && !_aac_cfg.empty()) {
makeAdtsHeader(_aac_cfg,*frame);
}
return frame; return frame;
} }
...@@ -96,20 +94,18 @@ bool AACRtpDecoder::inputRtp(const RtpPacket::Ptr &rtppack, bool key_pos) { ...@@ -96,20 +94,18 @@ bool AACRtpDecoder::inputRtp(const RtpPacket::Ptr &rtppack, bool key_pos) {
//忽略Au-Header区 //忽略Au-Header区
ptr += 2 + au_header_count * 2; ptr += 2 + au_header_count * 2;
static const uint32_t max_size = sizeof(AACFrame::buffer) - ADTS_HEADER_LEN;
while (ptr < end) { while (ptr < end) {
auto size = (uint32_t) (end - ptr); auto size = (uint32_t) (end - ptr);
if(size > max_size){ if (size > AAC_MAX_FRAME_SIZE) {
size = max_size; size = AAC_MAX_FRAME_SIZE;
} }
if (_adts->aac_frame_length + size > sizeof(AACFrame::buffer)) { if (_frame->size() + size > AAC_MAX_FRAME_SIZE) {
//数据太多了,先清空 //数据太多了,先清空
flushData(); flushData();
} }
//追加aac数据 //追加aac数据
memcpy(_adts->buffer + _adts->aac_frame_length, ptr, size); _frame->_buffer.append((char *) ptr, size);
_adts->aac_frame_length += size; _frame->_dts = rtppack->timeStamp;
_adts->timeStamp = rtppack->timeStamp;
ptr += size; ptr += size;
} }
...@@ -120,15 +116,22 @@ bool AACRtpDecoder::inputRtp(const RtpPacket::Ptr &rtppack, bool key_pos) { ...@@ -120,15 +116,22 @@ bool AACRtpDecoder::inputRtp(const RtpPacket::Ptr &rtppack, bool key_pos) {
return false; return false;
} }
void AACRtpDecoder::flushData() { void AACRtpDecoder::flushData() {
if(_adts->aac_frame_length == ADTS_HEADER_LEN){ if (_frame->_buffer.empty()) {
//没有有效数据 //没有有效数据
return; return;
} }
writeAdtsHeader(*_adts, _adts->buffer);
RtpCodec::inputFrame(_adts); //插入adts头
_adts = obtainFrame(); char adts_header[32] = {0};
auto size = dumpAacConfig(_aac_cfg, _frame->_buffer.size(), (uint8_t *) adts_header, sizeof(adts_header));
if (size > 0) {
//插入adts头
_frame->_buffer.insert(0, adts_header, size);
_frame->_prefix_size = size;
}
RtpCodec::inputFrame(_frame);
_frame = obtainFrame();
} }
......
...@@ -31,19 +31,19 @@ public: ...@@ -31,19 +31,19 @@ public:
*/ */
bool inputRtp(const RtpPacket::Ptr &rtp, bool key_pos = false) override; bool inputRtp(const RtpPacket::Ptr &rtp, bool key_pos = false) override;
TrackType getTrackType() const override{ CodecId getCodecId() const override {
return TrackAudio;
}
CodecId getCodecId() const override{
return CodecAAC; return CodecAAC;
} }
protected: protected:
AACRtpDecoder(); AACRtpDecoder();
private: private:
AACFrame::Ptr obtainFrame(); AACFrame::Ptr obtainFrame();
void flushData(); void flushData();
private: private:
AACFrame::Ptr _adts; AACFrame::Ptr _frame;
string _aac_cfg; string _aac_cfg;
}; };
...@@ -59,13 +59,13 @@ public: ...@@ -59,13 +59,13 @@ public:
* @param ui32Ssrc ssrc * @param ui32Ssrc ssrc
* @param ui32MtuSize mtu 大小 * @param ui32MtuSize mtu 大小
* @param ui32SampleRate 采样率 * @param ui32SampleRate 采样率
* @param ui8PlayloadType pt类型 * @param ui8PayloadType pt类型
* @param ui8Interleaved rtsp interleaved 值 * @param ui8Interleaved rtsp interleaved 值
*/ */
AACRtpEncoder(uint32_t ui32Ssrc, AACRtpEncoder(uint32_t ui32Ssrc,
uint32_t ui32MtuSize, uint32_t ui32MtuSize,
uint32_t ui32SampleRate, uint32_t ui32SampleRate,
uint8_t ui8PlayloadType = 97, uint8_t ui8PayloadType = 97,
uint8_t ui8Interleaved = TrackAudio * 2); uint8_t ui8Interleaved = TrackAudio * 2);
~AACRtpEncoder() {} ~AACRtpEncoder() {}
...@@ -74,8 +74,10 @@ public: ...@@ -74,8 +74,10 @@ public:
* @param frame 带dats头的aac数据 * @param frame 带dats头的aac数据
*/ */
void inputFrame(const Frame::Ptr &frame) override; void inputFrame(const Frame::Ptr &frame) override;
private: private:
void makeAACRtp(const void *pData, unsigned int uiLen, bool bMark, uint32_t uiStamp); void makeAACRtp(const void *pData, unsigned int uiLen, bool bMark, uint32_t uiStamp);
private: private:
unsigned char _aucSectionBuf[1600]; unsigned char _aucSectionBuf[1600];
}; };
......
...@@ -33,17 +33,12 @@ Track::Ptr Factory::getTrackBySdp(const SdpTrack::Ptr &track) { ...@@ -33,17 +33,12 @@ Track::Ptr Factory::getTrackBySdp(const SdpTrack::Ptr &track) {
return nullptr; return nullptr;
} }
string aac_cfg; string aac_cfg;
for(int i = 0 ; i < aac_cfg_str.size() / 2 ; ++i ){
unsigned int cfg1; unsigned int cfg;
sscanf(aac_cfg_str.substr(0, 2).data(), "%02X", &cfg1); sscanf(aac_cfg_str.substr(i * 2, 2).data(), "%02X", &cfg);
cfg1 &= 0x00FF; cfg &= 0x00FF;
aac_cfg.push_back(cfg1); aac_cfg.push_back((char)cfg);
}
unsigned int cfg2;
sscanf(aac_cfg_str.substr(2, 2).data(), "%02X", &cfg2);
cfg2 &= 0x00FF;
aac_cfg.push_back(cfg2);
return std::make_shared<AACTrack>(aac_cfg); return std::make_shared<AACTrack>(aac_cfg);
} }
...@@ -115,7 +110,7 @@ RtpCodec::Ptr Factory::getRtpEncoderBySdp(const Sdp::Ptr &sdp) { ...@@ -115,7 +110,7 @@ RtpCodec::Ptr Factory::getRtpEncoderBySdp(const Sdp::Ptr &sdp) {
} }
auto mtu = (sdp->getTrackType() == TrackVideo ? video_mtu : audio_mtu); auto mtu = (sdp->getTrackType() == TrackVideo ? video_mtu : audio_mtu);
auto sample_rate = sdp->getSampleRate(); auto sample_rate = sdp->getSampleRate();
auto pt = sdp->getPlayloadType(); auto pt = sdp->getPayloadType();
auto interleaved = sdp->getTrackType() * 2; auto interleaved = sdp->getTrackType() * 2;
auto codec_id = sdp->getCodecId(); auto codec_id = sdp->getCodecId();
switch (codec_id){ switch (codec_id){
...@@ -221,13 +216,27 @@ Track::Ptr Factory::getAudioTrackByAmf(const AMFValue& amf, int sample_rate, int ...@@ -221,13 +216,27 @@ Track::Ptr Factory::getAudioTrackByAmf(const AMFValue& amf, int sample_rate, int
return getTrackByCodecId(codecId, sample_rate, channels, sample_bit); return getTrackByCodecId(codecId, sample_rate, channels, sample_bit);
} }
RtmpCodec::Ptr Factory::getRtmpCodecByTrack(const Track::Ptr &track) { RtmpCodec::Ptr Factory::getRtmpCodecByTrack(const Track::Ptr &track, bool is_encode) {
switch (track->getCodecId()){ switch (track->getCodecId()){
case CodecH264 : return std::make_shared<H264RtmpEncoder>(track); case CodecH264 : return std::make_shared<H264RtmpEncoder>(track);
case CodecAAC : return std::make_shared<AACRtmpEncoder>(track); case CodecAAC : return std::make_shared<AACRtmpEncoder>(track);
case CodecH265 : return std::make_shared<H265RtmpEncoder>(track); case CodecH265 : return std::make_shared<H265RtmpEncoder>(track);
case CodecG711A : case CodecG711A :
case CodecG711U : return std::make_shared<G711RtmpEncoder>(track); case CodecG711U : {
auto audio_track = dynamic_pointer_cast<AudioTrack>(track);
if (is_encode && (audio_track->getAudioSampleRate() != 8000 ||
audio_track->getAudioChannel() != 1 ||
audio_track->getAudioSampleBit() != 16)) {
//rtmp对g711只支持8000/1/16规格,但是ZLMediaKit可以解析其他规格的G711
WarnL << "RTMP只支持8000/1/16规格的G711,目前规格是:"
<< audio_track->getAudioSampleRate() << "/"
<< audio_track->getAudioChannel() << "/"
<< audio_track->getAudioSampleBit()
<< ",该音频已被忽略";
return nullptr;
}
return std::make_shared<G711RtmpEncoder>(track);
}
default : WarnL << "暂不支持该CodecId:" << track->getCodecName(); return nullptr; default : WarnL << "暂不支持该CodecId:" << track->getCodecName(); return nullptr;
} }
} }
......
...@@ -59,8 +59,9 @@ public: ...@@ -59,8 +59,9 @@ public:
/** /**
* 根据Track获取Rtmp的编解码器 * 根据Track获取Rtmp的编解码器
* @param track 媒体描述对象 * @param track 媒体描述对象
* @param is_encode 是否为编码器还是解码器
*/ */
static RtmpCodec::Ptr getRtmpCodecByTrack(const Track::Ptr &track); static RtmpCodec::Ptr getRtmpCodecByTrack(const Track::Ptr &track, bool is_encode);
/** /**
* 根据codecId获取rtmp的codec描述 * 根据codecId获取rtmp的codec描述
......
...@@ -15,6 +15,59 @@ using namespace toolkit; ...@@ -15,6 +15,59 @@ using namespace toolkit;
namespace mediakit{ namespace mediakit{
/**
* 该对象的功能是把一个不可缓存的帧转换成可缓存的帧
*/
class FrameCacheAble : public FrameFromPtr {
public:
typedef std::shared_ptr<FrameCacheAble> Ptr;
FrameCacheAble(const Frame::Ptr &frame){
if(frame->cacheAble()){
_frame = frame;
_ptr = frame->data();
}else{
_buffer = std::make_shared<BufferRaw>();
_buffer->assign(frame->data(),frame->size());
_ptr = _buffer->data();
}
_size = frame->size();
_dts = frame->dts();
_pts = frame->pts();
_prefix_size = frame->prefixSize();
_codecid = frame->getCodecId();
_key = frame->keyFrame();
_config = frame->configFrame();
}
virtual ~FrameCacheAble() = default;
/**
* 可以被缓存
*/
bool cacheAble() const override {
return true;
}
CodecId getCodecId() const override{
return _codecid;
}
bool keyFrame() const override{
return _key;
}
bool configFrame() const override{
return _config;
}
private:
Frame::Ptr _frame;
BufferRaw::Ptr _buffer;
CodecId _codecid;
bool _key;
bool _config;
};
Frame::Ptr Frame::getCacheAbleFrame(const Frame::Ptr &frame){ Frame::Ptr Frame::getCacheAbleFrame(const Frame::Ptr &frame){
if(frame->cacheAble()){ if(frame->cacheAble()){
return frame; return frame;
...@@ -23,17 +76,35 @@ Frame::Ptr Frame::getCacheAbleFrame(const Frame::Ptr &frame){ ...@@ -23,17 +76,35 @@ Frame::Ptr Frame::getCacheAbleFrame(const Frame::Ptr &frame){
} }
#define SWITCH_CASE(codec_id) case codec_id : return #codec_id #define SWITCH_CASE(codec_id) case codec_id : return #codec_id
const char *CodecInfo::getCodecName() { const char *getCodecName(CodecId codecId) {
switch (getCodecId()) { switch (codecId) {
SWITCH_CASE(CodecH264); SWITCH_CASE(CodecH264);
SWITCH_CASE(CodecH265); SWITCH_CASE(CodecH265);
SWITCH_CASE(CodecAAC); SWITCH_CASE(CodecAAC);
SWITCH_CASE(CodecG711A); SWITCH_CASE(CodecG711A);
SWITCH_CASE(CodecG711U); SWITCH_CASE(CodecG711U);
default: SWITCH_CASE(CodecOpus);
return "unknown codec"; default : return "unknown codec";
} }
} }
}//namespace mediakit TrackType getTrackType(CodecId codecId){
switch (codecId){
case CodecH264:
case CodecH265: return TrackVideo;
case CodecAAC:
case CodecG711A:
case CodecG711U:
case CodecOpus: return TrackAudio;
default: return TrackInvalid;
}
}
const char *CodecInfo::getCodecName() {
return mediakit::getCodecName(getCodecId());
}
TrackType CodecInfo::getTrackType() {
return mediakit::getTrackType(getCodecId());
}
}//namespace mediakit
...@@ -28,6 +28,7 @@ typedef enum { ...@@ -28,6 +28,7 @@ typedef enum {
CodecAAC, CodecAAC,
CodecG711A, CodecG711A,
CodecG711U, CodecG711U,
CodecOpus,
CodecMax = 0x7FFF CodecMax = 0x7FFF
} CodecId; } CodecId;
...@@ -40,6 +41,16 @@ typedef enum { ...@@ -40,6 +41,16 @@ typedef enum {
} TrackType; } TrackType;
/** /**
* 获取编码器名称
*/
const char *getCodecName(CodecId codecId);
/**
* 获取音视频类型
*/
TrackType getTrackType(CodecId codecId);
/**
* 编码信息的抽象接口 * 编码信息的抽象接口
*/ */
class CodecInfo { class CodecInfo {
...@@ -50,20 +61,19 @@ public: ...@@ -50,20 +61,19 @@ public:
virtual ~CodecInfo(){} virtual ~CodecInfo(){}
/** /**
* 获取音视频类型
*/
virtual TrackType getTrackType() const = 0;
/**
* 获取编解码器类型 * 获取编解码器类型
*/ */
virtual CodecId getCodecId() const = 0; virtual CodecId getCodecId() const = 0;
/** /**
* 获取编码器名称 * 获取编码器名称
* @return 编码器名称
*/ */
const char *getCodecName(); const char *getCodecName();
/**
* 获取音视频类型
*/
TrackType getTrackType();
}; };
/** /**
...@@ -76,15 +86,11 @@ public: ...@@ -76,15 +86,11 @@ public:
/** /**
* 返回解码时间戳,单位毫秒 * 返回解码时间戳,单位毫秒
* @return
*/ */
virtual uint32_t dts() const = 0; virtual uint32_t dts() const = 0;
/** /**
* 返回显示时间戳,单位毫秒 * 返回显示时间戳,单位毫秒
* @return
*/ */
virtual uint32_t pts() const { virtual uint32_t pts() const {
return dts(); return dts();
...@@ -98,13 +104,11 @@ public: ...@@ -98,13 +104,11 @@ public:
/** /**
* 返回是否为关键帧 * 返回是否为关键帧
* @return
*/ */
virtual bool keyFrame() const = 0; virtual bool keyFrame() const = 0;
/** /**
* 是否为配置帧,譬如sps pps vps * 是否为配置帧,譬如sps pps vps
* @return
*/ */
virtual bool configFrame() const = 0; virtual bool configFrame() const = 0;
...@@ -115,14 +119,77 @@ public: ...@@ -115,14 +119,77 @@ public:
/** /**
* 返回可缓存的frame * 返回可缓存的frame
* @return
*/ */
static Ptr getCacheAbleFrame(const Ptr &frame); static Ptr getCacheAbleFrame(const Ptr &frame);
}; };
class FrameImp : public Frame {
public:
typedef std::shared_ptr<FrameImp> Ptr;
char *data() const override{
return (char *)_buffer.data();
}
uint32_t size() const override {
return _buffer.size();
}
uint32_t dts() const override {
return _dts;
}
uint32_t pts() const override{
return _pts ? _pts : _dts;
}
uint32_t prefixSize() const override{
return _prefix_size;
}
CodecId getCodecId() const override{
return _codecid;
}
bool keyFrame() const override {
return false;
}
bool configFrame() const override{
return false;
}
public:
CodecId _codecid = CodecInvalid;
string _buffer;
uint32_t _dts = 0;
uint32_t _pts = 0;
uint32_t _prefix_size = 0;
};
/**
* 一个Frame类中可以有多个帧,他们通过 0x 00 00 01 分隔
* ZLMediaKit会先把这种复合帧split成单个帧然后再处理
* 一个复合帧可以通过无内存拷贝的方式切割成多个子Frame
* 提供该类的目的是切割复合帧时防止内存拷贝,提高性能
*/
template<typename Parent>
class FrameInternal : public Parent{
public:
typedef std::shared_ptr<FrameInternal> Ptr;
FrameInternal(const Frame::Ptr &parent_frame, char *ptr, uint32_t size, int prefix_size)
: Parent(ptr, size, parent_frame->dts(), parent_frame->pts(), prefix_size) {
_parent_frame = parent_frame;
}
bool cacheAble() const override {
return _parent_frame->cacheAble();
}
private:
Frame::Ptr _parent_frame;
};
/** /**
* 循环池辅助类 * 循环池辅助类
* @tparam T
*/ */
template <typename T> template <typename T>
class ResourcePoolHelper{ class ResourcePoolHelper{
...@@ -140,17 +207,16 @@ private: ...@@ -140,17 +207,16 @@ private:
}; };
/** /**
* 写帧接口的抽闲接口 * 写帧接口的抽象接口类
*/ */
class FrameWriterInterface { class FrameWriterInterface {
public: public:
typedef std::shared_ptr<FrameWriterInterface> Ptr; typedef std::shared_ptr<FrameWriterInterface> Ptr;
FrameWriterInterface(){} FrameWriterInterface(){}
virtual ~FrameWriterInterface(){} virtual ~FrameWriterInterface(){}
/** /**
* 写入帧数据 * 写入帧数据
* @param frame 帧
*/ */
virtual void inputFrame(const Frame::Ptr &frame) = 0; virtual void inputFrame(const Frame::Ptr &frame) = 0;
}; };
...@@ -165,15 +231,15 @@ public: ...@@ -165,15 +231,15 @@ public:
/** /**
* inputFrame后触发onWriteFrame回调 * inputFrame后触发onWriteFrame回调
* @param cb
*/ */
FrameWriterInterfaceHelper(const onWriteFrame& cb){ FrameWriterInterfaceHelper(const onWriteFrame& cb){
_writeCallback = cb; _writeCallback = cb;
} }
virtual ~FrameWriterInterfaceHelper(){} virtual ~FrameWriterInterfaceHelper(){}
/** /**
* 写入帧数据 * 写入帧数据
* @param frame 帧
*/ */
void inputFrame(const Frame::Ptr &frame) override { void inputFrame(const Frame::Ptr &frame) override {
_writeCallback(frame); _writeCallback(frame);
...@@ -182,7 +248,6 @@ private: ...@@ -182,7 +248,6 @@ private:
onWriteFrame _writeCallback; onWriteFrame _writeCallback;
}; };
/** /**
* 支持代理转发的帧环形缓存 * 支持代理转发的帧环形缓存
*/ */
...@@ -193,6 +258,9 @@ public: ...@@ -193,6 +258,9 @@ public:
FrameDispatcher(){} FrameDispatcher(){}
virtual ~FrameDispatcher(){} virtual ~FrameDispatcher(){}
/**
* 添加代理
*/
void addDelegate(const FrameWriterInterface::Ptr &delegate){ void addDelegate(const FrameWriterInterface::Ptr &delegate){
//_delegates_write可能多线程同时操作 //_delegates_write可能多线程同时操作
lock_guard<mutex> lck(_mtx); lock_guard<mutex> lck(_mtx);
...@@ -200,7 +268,10 @@ public: ...@@ -200,7 +268,10 @@ public:
_need_update = true; _need_update = true;
} }
void delDelegate(void *ptr){ /**
* 删除代理
*/
void delDelegate(FrameWriterInterface *ptr){
//_delegates_write可能多线程同时操作 //_delegates_write可能多线程同时操作
lock_guard<mutex> lck(_mtx); lock_guard<mutex> lck(_mtx);
_delegates_write.erase(ptr); _delegates_write.erase(ptr);
...@@ -208,8 +279,7 @@ public: ...@@ -208,8 +279,7 @@ public:
} }
/** /**
* 写入帧数据 * 写入帧并派发
* @param frame 帧
*/ */
void inputFrame(const Frame::Ptr &frame) override{ void inputFrame(const Frame::Ptr &frame) override{
if(_need_update){ if(_need_update){
...@@ -223,7 +293,13 @@ public: ...@@ -223,7 +293,13 @@ public:
for(auto &pr : _delegates_read){ for(auto &pr : _delegates_read){
pr.second->inputFrame(frame); pr.second->inputFrame(frame);
} }
}
/**
* 返回代理个数
*/
int size() const {
return _delegates_write.size();
} }
private: private:
mutex _mtx; mutex _mtx;
...@@ -250,105 +326,23 @@ public: ...@@ -250,105 +326,23 @@ public:
} }
uint32_t pts() const override{ uint32_t pts() const override{
if(_pts){ return _pts ? _pts : dts();
return _pts;
}
return dts();
} }
uint32_t prefixSize() const override{ uint32_t prefixSize() const override{
return _prefixSize; return _prefix_size;
}
bool cacheAble() const override {
return false;
} }
protected: protected:
char *_ptr; char *_ptr;
uint32_t _size; uint32_t _size;
uint32_t _dts; uint32_t _dts;
uint32_t _pts = 0; uint32_t _pts = 0;
uint32_t _prefixSize; uint32_t _prefix_size;
}; };
/**
* 不可缓存的帧,在DevChannel类中有用到。
* 该帧类型用于防止内存拷贝,直接使用指针传递数据
* 在大多数情况下,ZLMediaKit是同步对帧数据进行使用和处理的
* 所以提供此类型的帧很有必要,但是有时又无法避免缓存帧做后续处理
* 所以可以通过Frame::getCacheAbleFrame方法拷贝一个可缓存的帧
*/
class FrameNoCacheAble : public FrameFromPtr{
public:
typedef std::shared_ptr<FrameNoCacheAble> Ptr;
/**
* 该帧不可缓存
* @return
*/
bool cacheAble() const override {
return false;
}
};
/**
* 该对象的功能是把一个不可缓存的帧转换成可缓存的帧
* @see FrameNoCacheAble
*/
class FrameCacheAble : public FrameFromPtr {
public:
typedef std::shared_ptr<FrameCacheAble> Ptr;
FrameCacheAble(const Frame::Ptr &frame){
if(frame->cacheAble()){
_frame = frame;
_ptr = frame->data();
}else{
_buffer = std::make_shared<BufferRaw>();
_buffer->assign(frame->data(),frame->size());
_ptr = _buffer->data();
}
_size = frame->size();
_dts = frame->dts();
_pts = frame->pts();
_prefixSize = frame->prefixSize();
_trackType = frame->getTrackType();
_codec = frame->getCodecId();
_key = frame->keyFrame();
_config = frame->configFrame();
}
virtual ~FrameCacheAble() = default;
/**
* 可以被缓存
* @return
*/
bool cacheAble() const override {
return true;
}
TrackType getTrackType() const override{
return _trackType;
}
CodecId getCodecId() const override{
return _codec;
}
bool keyFrame() const override{
return _key;
}
bool configFrame() const override{
return _config;
}
private:
Frame::Ptr _frame;
BufferRaw::Ptr _buffer;
TrackType _trackType;
CodecId _codec;
bool _key;
bool _config;
};
}//namespace mediakit }//namespace mediakit
#endif //ZLMEDIAKIT_FRAME_H #endif //ZLMEDIAKIT_FRAME_H
\ No newline at end of file
...@@ -19,76 +19,28 @@ namespace mediakit{ ...@@ -19,76 +19,28 @@ namespace mediakit{
/** /**
* G711帧 * G711帧
*/ */
class G711Frame : public Frame { class G711Frame : public FrameImp {
public: public:
typedef std::shared_ptr<G711Frame> Ptr; G711Frame(){
_codecid = CodecG711A;
char *data() const override{
return (char *)buffer.data();
}
uint32_t size() const override {
return buffer.size();
}
uint32_t dts() const override {
return timeStamp;
}
uint32_t prefixSize() const override{
return 0;
}
TrackType getTrackType() const override{
return TrackAudio;
}
CodecId getCodecId() const override{
return _codecId;
}
bool keyFrame() const override {
return false;
}
bool configFrame() const override{
return false;
} }
public: };
CodecId _codecId = CodecG711A;
string buffer;
uint32_t timeStamp;
} ;
class G711FrameNoCacheAble : public FrameNoCacheAble { class G711FrameNoCacheAble : public FrameFromPtr {
public: public:
typedef std::shared_ptr<G711FrameNoCacheAble> Ptr; typedef std::shared_ptr<G711FrameNoCacheAble> Ptr;
//兼容通用接口 G711FrameNoCacheAble(char *ptr,uint32_t size,uint32_t dts, uint32_t pts = 0,int prefix_size = 0){
G711FrameNoCacheAble(char *ptr,uint32_t size,uint32_t dts, uint32_t pts = 0,int prefixeSize = 0){
_ptr = ptr; _ptr = ptr;
_size = size; _size = size;
_dts = dts; _dts = dts;
_prefixSize = prefixeSize; _prefix_size = prefix_size;
} }
//兼容通用接口
void setCodec(CodecId codecId){ void setCodec(CodecId codecId){
_codecId = codecId; _codecId = codecId;
} }
G711FrameNoCacheAble(CodecId codecId, char *ptr,uint32_t size,uint32_t dts,int prefixeSize = 0){
_codecId = codecId;
_ptr = ptr;
_size = size;
_dts = dts;
_prefixSize = prefixeSize;
}
TrackType getTrackType() const override{
return TrackAudio;
}
CodecId getCodecId() const override{ CodecId getCodecId() const override{
return _codecId; return _codecId;
} }
...@@ -108,67 +60,18 @@ private: ...@@ -108,67 +60,18 @@ private:
/** /**
* G711音频通道 * G711音频通道
*/ */
class G711Track : public AudioTrack{ class G711Track : public AudioTrackImp{
public: public:
typedef std::shared_ptr<G711Track> Ptr; typedef std::shared_ptr<G711Track> Ptr;
G711Track(CodecId codecId,int sample_rate, int channels, int sample_bit) : AudioTrackImp(codecId,sample_rate,channels,sample_bit){}
/**
* G711A G711U
*/
G711Track(CodecId codecId,int sample_rate, int channels, int sample_bit){
_codecid = codecId;
_sample_rate = sample_rate;
_channels = channels;
_sample_bit = sample_bit;
}
/**
* 返回编码类型
*/
CodecId getCodecId() const override{
return _codecid;
}
/**
* 是否已经初始化
*/
bool ready() override {
return true;
}
/**
* 返回音频采样率
*/
int getAudioSampleRate() const override{
return _sample_rate;
}
/**
* 返回音频采样位数,一般为16或8
*/
int getAudioSampleBit() const override{
return _sample_bit;
}
/**
* 返回音频通道数
*/
int getAudioChannel() const override{
return _channels;
}
private: private:
//克隆该Track
Track::Ptr clone() override { Track::Ptr clone() override {
return std::make_shared<std::remove_reference<decltype(*this)>::type >(*this); return std::make_shared<std::remove_reference<decltype(*this)>::type >(*this);
} }
//生成sdp //生成sdp
Sdp::Ptr getSdp() override ; Sdp::Ptr getSdp() override ;
private:
CodecId _codecid;
int _sample_rate;
int _channels;
int _sample_bit;
}; };
/** /**
...@@ -180,37 +83,30 @@ public: ...@@ -180,37 +83,30 @@ public:
* G711采样率固定为8000 * G711采样率固定为8000
* @param codecId G711A G711U * @param codecId G711A G711U
* @param sample_rate 音频采样率 * @param sample_rate 音频采样率
* @param playload_type rtp playload * @param payload_type rtp payload
* @param bitrate 比特率 * @param bitrate 比特率
*/ */
G711Sdp(CodecId codecId, G711Sdp(CodecId codecId,
int sample_rate, int sample_rate,
int channels, int channels,
int playload_type = 98, int payload_type = 98,
int bitrate = 128) : Sdp(sample_rate,playload_type), _codecId(codecId){ int bitrate = 128) : Sdp(sample_rate,payload_type), _codecId(codecId){
_printer << "m=audio 0 RTP/AVP " << playload_type << "\r\n"; _printer << "m=audio 0 RTP/AVP " << payload_type << "\r\n";
_printer << "a=rtpmap:" << playload_type << (codecId == CodecG711A ? " PCMA/" : " PCMU/") << sample_rate << "/" << channels << "\r\n"; _printer << "a=rtpmap:" << payload_type << (codecId == CodecG711A ? " PCMA/" : " PCMU/") << sample_rate << "/" << channels << "\r\n";
_printer << "a=control:trackID=" << getTrackType() << "\r\n"; _printer << "a=control:trackID=" << (int)TrackAudio << "\r\n";
} }
string getSdp() const override { string getSdp() const override {
return _printer; return _printer;
} }
TrackType getTrackType() const override {
return TrackAudio;
}
CodecId getCodecId() const override { CodecId getCodecId() const override {
return _codecId; return _codecId;
} }
private: private:
_StrPrinter _printer; _StrPrinter _printer;
CodecId _codecId; CodecId _codecId;
}; };
}//namespace mediakit }//namespace mediakit
#endif //ZLMEDIAKIT_G711_H
\ No newline at end of file
#endif //ZLMEDIAKIT_AAC_H
...@@ -20,15 +20,15 @@ G711RtmpDecoder::G711RtmpDecoder(CodecId codecId) { ...@@ -20,15 +20,15 @@ G711RtmpDecoder::G711RtmpDecoder(CodecId codecId) {
G711Frame::Ptr G711RtmpDecoder::obtainFrame() { G711Frame::Ptr G711RtmpDecoder::obtainFrame() {
//从缓存池重新申请对象,防止覆盖已经写入环形缓存的对象 //从缓存池重新申请对象,防止覆盖已经写入环形缓存的对象
auto frame = ResourcePoolHelper<G711Frame>::obtainObj(); auto frame = ResourcePoolHelper<G711Frame>::obtainObj();
frame->buffer.clear(); frame->_buffer.clear();
frame->_codecId = _codecId; frame->_codecid = _codecId;
return frame; return frame;
} }
bool G711RtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt, bool) { bool G711RtmpDecoder::inputRtmp(const RtmpPacket::Ptr &pkt, bool) {
//拷贝G711负载 //拷贝G711负载
_frame->buffer.assign(pkt->strBuf.data() + 1, pkt->strBuf.size() - 1); _frame->_buffer.assign(pkt->strBuf.data() + 1, pkt->strBuf.size() - 1);
_frame->timeStamp = pkt->timeStamp; _frame->_dts = pkt->timeStamp;
//写入环形缓存 //写入环形缓存
RtmpCodec::inputFrame(_frame); RtmpCodec::inputFrame(_frame);
_frame = obtainFrame(); _frame = obtainFrame();
......
...@@ -33,10 +33,6 @@ public: ...@@ -33,10 +33,6 @@ public:
*/ */
bool inputRtmp(const RtmpPacket::Ptr &Rtmp, bool key_pos = false) override; bool inputRtmp(const RtmpPacket::Ptr &Rtmp, bool key_pos = false) override;
TrackType getTrackType() const override{
return TrackAudio;
}
CodecId getCodecId() const override{ CodecId getCodecId() const override{
return _codecId; return _codecId;
} }
......
...@@ -20,9 +20,9 @@ G711RtpDecoder::G711RtpDecoder(const Track::Ptr &track){ ...@@ -20,9 +20,9 @@ G711RtpDecoder::G711RtpDecoder(const Track::Ptr &track){
G711Frame::Ptr G711RtpDecoder::obtainFrame() { G711Frame::Ptr G711RtpDecoder::obtainFrame() {
//从缓存池重新申请对象,防止覆盖已经写入环形缓存的对象 //从缓存池重新申请对象,防止覆盖已经写入环形缓存的对象
auto frame = ResourcePoolHelper<G711Frame>::obtainObj(); auto frame = ResourcePoolHelper<G711Frame>::obtainObj();
frame->buffer.clear(); frame->_buffer.clear();
frame->_codecId = _codecid; frame->_codecid = _codecid;
frame->timeStamp = 0; frame->_dts = 0;
return frame; return frame;
} }
...@@ -32,17 +32,17 @@ bool G711RtpDecoder::inputRtp(const RtpPacket::Ptr &rtppack, bool) { ...@@ -32,17 +32,17 @@ bool G711RtpDecoder::inputRtp(const RtpPacket::Ptr &rtppack, bool) {
// 获取rtp数据 // 获取rtp数据
const char *rtp_packet_buf = rtppack->data() + rtppack->offset; const char *rtp_packet_buf = rtppack->data() + rtppack->offset;
if (rtppack->timeStamp != _frame->timeStamp) { if (rtppack->timeStamp != _frame->_dts) {
//时间戳变更,清空上一帧 //时间戳变更,清空上一帧
onGetG711(_frame); onGetG711(_frame);
} }
//追加数据 //追加数据
_frame->buffer.append(rtp_packet_buf, length); _frame->_buffer.append(rtp_packet_buf, length);
//赋值时间戳 //赋值时间戳
_frame->timeStamp = rtppack->timeStamp; _frame->_dts = rtppack->timeStamp;
if (rtppack->mark || _frame->buffer.size() > 10 * 1024) { if (rtppack->mark || _frame->_buffer.size() > 10 * 1024) {
//标记为mark时,或者内存快溢出时,我们认为这是该帧最后一个包 //标记为mark时,或者内存快溢出时,我们认为这是该帧最后一个包
onGetG711(_frame); onGetG711(_frame);
} }
...@@ -50,7 +50,7 @@ bool G711RtpDecoder::inputRtp(const RtpPacket::Ptr &rtppack, bool) { ...@@ -50,7 +50,7 @@ bool G711RtpDecoder::inputRtp(const RtpPacket::Ptr &rtppack, bool) {
} }
void G711RtpDecoder::onGetG711(const G711Frame::Ptr &frame) { void G711RtpDecoder::onGetG711(const G711Frame::Ptr &frame) {
if(!frame->buffer.empty()){ if(!frame->_buffer.empty()){
//写入环形缓存 //写入环形缓存
RtpCodec::inputFrame(frame); RtpCodec::inputFrame(frame);
_frame = obtainFrame(); _frame = obtainFrame();
...@@ -62,12 +62,12 @@ void G711RtpDecoder::onGetG711(const G711Frame::Ptr &frame) { ...@@ -62,12 +62,12 @@ void G711RtpDecoder::onGetG711(const G711Frame::Ptr &frame) {
G711RtpEncoder::G711RtpEncoder(uint32_t ui32Ssrc, G711RtpEncoder::G711RtpEncoder(uint32_t ui32Ssrc,
uint32_t ui32MtuSize, uint32_t ui32MtuSize,
uint32_t ui32SampleRate, uint32_t ui32SampleRate,
uint8_t ui8PlayloadType, uint8_t ui8PayloadType,
uint8_t ui8Interleaved) : uint8_t ui8Interleaved) :
RtpInfo(ui32Ssrc, RtpInfo(ui32Ssrc,
ui32MtuSize, ui32MtuSize,
ui32SampleRate, ui32SampleRate,
ui8PlayloadType, ui8PayloadType,
ui8Interleaved) { ui8Interleaved) {
} }
...@@ -96,6 +96,3 @@ void G711RtpEncoder::makeG711Rtp(const void *data, unsigned int len, bool mark, ...@@ -96,6 +96,3 @@ void G711RtpEncoder::makeG711Rtp(const void *data, unsigned int len, bool mark,
} }
}//namespace mediakit }//namespace mediakit
...@@ -31,10 +31,6 @@ public: ...@@ -31,10 +31,6 @@ public:
*/ */
bool inputRtp(const RtpPacket::Ptr &rtp, bool key_pos = false) override; bool inputRtp(const RtpPacket::Ptr &rtp, bool key_pos = false) override;
TrackType getTrackType() const override{
return TrackAudio;
}
CodecId getCodecId() const override{ CodecId getCodecId() const override{
return _codecid; return _codecid;
} }
...@@ -62,13 +58,13 @@ public: ...@@ -62,13 +58,13 @@ public:
* @param ui32Ssrc ssrc * @param ui32Ssrc ssrc
* @param ui32MtuSize mtu 大小 * @param ui32MtuSize mtu 大小
* @param ui32SampleRate 采样率 * @param ui32SampleRate 采样率
* @param ui8PlayloadType pt类型 * @param ui8PayloadType pt类型
* @param ui8Interleaved rtsp interleaved 值 * @param ui8Interleaved rtsp interleaved 值
*/ */
G711RtpEncoder(uint32_t ui32Ssrc, G711RtpEncoder(uint32_t ui32Ssrc,
uint32_t ui32MtuSize, uint32_t ui32MtuSize,
uint32_t ui32SampleRate, uint32_t ui32SampleRate,
uint8_t ui8PlayloadType = 0, uint8_t ui8PayloadType = 0,
uint8_t ui8Interleaved = TrackAudio * 2); uint8_t ui8Interleaved = TrackAudio * 2);
~G711RtpEncoder() {} ~G711RtpEncoder() {}
......
...@@ -44,34 +44,77 @@ const char *memfind(const char *buf, int len, const char *subbuf, int sublen) { ...@@ -44,34 +44,77 @@ const char *memfind(const char *buf, int len, const char *subbuf, int sublen) {
return NULL; return NULL;
} }
void splitH264(const char *ptr, int len, const std::function<void(const char *, int)> &cb) { void splitH264(const char *ptr, int len, int prefix, const std::function<void(const char *, int, int)> &cb) {
auto nal = ptr; auto start = ptr + prefix;
auto end = ptr + len; auto end = ptr + len;
while(true) { int next_prefix;
auto next_nal = memfind(nal + 3,end - nal - 3,"\x0\x0\x1",3); while (true) {
if(next_nal){ auto next_start = memfind(start, end - start, "\x00\x00\x01", 3);
if(*(next_nal - 1) == 0x00){ if (next_start) {
next_nal -= 1; //找到下一帧
if (*(next_start - 1) == 0x00) {
//这个是00 00 00 01开头
next_start -= 1;
next_prefix = 4;
} else {
//这个是00 00 01开头
next_prefix = 3;
} }
cb(nal,next_nal - nal); //记得加上本帧prefix长度
nal = next_nal; cb(start - prefix, next_start - start + prefix, prefix);
//搜索下一帧末尾的起始位置
start = next_start + next_prefix;
//记录下一帧的prefix长度
prefix = next_prefix;
continue; continue;
} }
cb(nal,end - nal); //未找到下一帧,这是最后一帧
cb(start - prefix, end - start + prefix, prefix);
break; break;
} }
} }
int prefixSize(const char *ptr, int len){
if (len < 4) {
return 0;
}
if (ptr[0] != 0x00 || ptr[1] != 0x00) {
//不是0x00 00开头
return 0;
}
if (ptr[2] == 0x00 && ptr[3] == 0x01) {
//是0x00 00 00 01
return 4;
}
if (ptr[2] == 0x01) {
//是0x00 00 01
return 3;
}
return 0;
}
#if 0 #if 0
//splitH264函数测试程序 //splitH264函数测试程序
static onceToken s_token([](){ static onceToken s_token([](){
{
char buf[] = "\x00\x00\x00\x01\x12\x23\x34\x45\x56" char buf[] = "\x00\x00\x00\x01\x12\x23\x34\x45\x56"
"\x00\x00\x00\x01\x12\x23\x34\x45\x56" "\x00\x00\x00\x01\x23\x34\x45\x56"
"\x00\x00\x00\x01\x12\x23\x34\x45\x56" "\x00\x00\x00\x01x34\x45\x56"
"\x00\x00\x01\x12\x23\x34\x45\x56"; "\x00\x00\x01\x12\x23\x34\x45\x56";
splitH264(buf, sizeof(buf) - 1, [](const char *ptr, int len){ splitH264(buf, sizeof(buf) - 1, 4, [](const char *ptr, int len, int prefix) {
cout << hexdump(ptr, len) << endl; cout << prefix << " " << hexdump(ptr, len) << endl;
});
}
{
char buf[] = "\x00\x00\x00\x01\x12\x23\x34\x45\x56";
splitH264(buf, sizeof(buf) - 1, 4, [](const char *ptr, int len, int prefix) {
cout << prefix << " " << hexdump(ptr, len) << endl;
}); });
}
}); });
#endif //0 #endif //0
......
...@@ -20,12 +20,12 @@ using namespace toolkit; ...@@ -20,12 +20,12 @@ using namespace toolkit;
namespace mediakit{ namespace mediakit{
bool getAVCInfo(const string &strSps,int &iVideoWidth, int &iVideoHeight, float &iVideoFps); bool getAVCInfo(const string &strSps,int &iVideoWidth, int &iVideoHeight, float &iVideoFps);
void splitH264(const char *ptr, int len, const std::function<void(const char *, int)> &cb); void splitH264(const char *ptr, int len, int prefix, const std::function<void(const char *, int, int)> &cb);
int prefixSize(const char *ptr, int len);
/** /**
* 264帧类 * 264帧类
*/ */
class H264Frame : public Frame { class H264Frame : public FrameImp {
public: public:
typedef std::shared_ptr<H264Frame> Ptr; typedef std::shared_ptr<H264Frame> Ptr;
...@@ -36,30 +36,8 @@ public: ...@@ -36,30 +36,8 @@ public:
NAL_SEI = 6, NAL_SEI = 6,
} NalType; } NalType;
char *data() const override{ H264Frame(){
return (char *)_buffer.data(); _codecid = CodecH264;
}
uint32_t size() const override {
return _buffer.size();
}
uint32_t dts() const override {
return _dts;
}
uint32_t pts() const override {
return _pts ? _pts : _dts;
}
uint32_t prefixSize() const override{
return _prefix_size;
}
TrackType getTrackType() const override{
return TrackVideo;
}
CodecId getCodecId() const override{
return CodecH264;
} }
bool keyFrame() const override { bool keyFrame() const override {
...@@ -69,39 +47,27 @@ public: ...@@ -69,39 +47,27 @@ public:
bool configFrame() const override{ bool configFrame() const override{
switch(H264_TYPE(_buffer[_prefix_size]) ){ switch(H264_TYPE(_buffer[_prefix_size]) ){
case H264Frame::NAL_SPS: case H264Frame::NAL_SPS:
case H264Frame::NAL_PPS: case H264Frame::NAL_PPS:return true;
return true; default:return false;
default:
return false;
} }
} }
public:
uint32_t _dts = 0;
uint32_t _pts = 0;
uint32_t _prefix_size = 4;
string _buffer;
}; };
/** /**
* 防止内存拷贝的H264类 * 防止内存拷贝的H264类
* 用户可以通过该类型快速把一个指针无拷贝的包装成Frame类 * 用户可以通过该类型快速把一个指针无拷贝的包装成Frame类
* 该类型在DevChannel中有使用 * 该类型在DevChannel中有使用
*/ */
class H264FrameNoCacheAble : public FrameNoCacheAble { class H264FrameNoCacheAble : public FrameFromPtr {
public: public:
typedef std::shared_ptr<H264FrameNoCacheAble> Ptr; typedef std::shared_ptr<H264FrameNoCacheAble> Ptr;
H264FrameNoCacheAble(char *ptr,uint32_t size,uint32_t dts , uint32_t pts ,int prefixeSize = 4){ H264FrameNoCacheAble(char *ptr,uint32_t size,uint32_t dts , uint32_t pts ,int prefix_size = 4){
_ptr = ptr; _ptr = ptr;
_size = size; _size = size;
_dts = dts; _dts = dts;
_pts = pts; _pts = pts;
_prefixSize = prefixeSize; _prefix_size = prefix_size;
}
TrackType getTrackType() const override{
return TrackVideo;
} }
CodecId getCodecId() const override{ CodecId getCodecId() const override{
...@@ -109,43 +75,18 @@ public: ...@@ -109,43 +75,18 @@ public:
} }
bool keyFrame() const override { bool keyFrame() const override {
return H264_TYPE(_ptr[_prefixSize]) == H264Frame::NAL_IDR; return H264_TYPE(_ptr[_prefix_size]) == H264Frame::NAL_IDR;
} }
bool configFrame() const override{ bool configFrame() const override{
switch(H264_TYPE(_ptr[_prefixSize])){ switch(H264_TYPE(_ptr[_prefix_size])){
case H264Frame::NAL_SPS: case H264Frame::NAL_SPS:
case H264Frame::NAL_PPS: case H264Frame::NAL_PPS:return true;
return true; default:return false;
default:
return false;
} }
} }
}; };
/**
* 一个H264Frame类中可以有多个帧,他们通过 0x 00 00 01 分隔
* ZLMediaKit会先把这种复合帧split成单个帧然后再处理
* 一个复合帧可以通过无内存拷贝的方式切割成多个H264FrameSubFrame
* 提供该类的目的是切换复合帧时防止内存拷贝,提高性能
*/
template<typename Parent>
class FrameInternal : public Parent{
public:
typedef std::shared_ptr<FrameInternal> Ptr;
FrameInternal(const Frame::Ptr &parent_frame,
char *ptr,
uint32_t size,
int prefixeSize) : Parent(ptr,size,parent_frame->dts(),parent_frame->pts(),prefixeSize){
_parent_frame = parent_frame;
}
bool cacheAble() const override {
return _parent_frame->cacheAble();
}
private:
Frame::Ptr _parent_frame;
};
typedef FrameInternal<H264FrameNoCacheAble> H264FrameInternal; typedef FrameInternal<H264FrameNoCacheAble> H264FrameInternal;
/** /**
...@@ -243,24 +184,9 @@ public: ...@@ -243,24 +184,9 @@ public:
int type = H264_TYPE(*((uint8_t *)frame->data() + frame->prefixSize())); int type = H264_TYPE(*((uint8_t *)frame->data() + frame->prefixSize()));
if(type == H264Frame::NAL_SPS || type == H264Frame::NAL_SEI){ if(type == H264Frame::NAL_SPS || type == H264Frame::NAL_SEI){
//有些设备会把SPS PPS IDR帧当做一个帧打包,所以我们要split一下 //有些设备会把SPS PPS IDR帧当做一个帧打包,所以我们要split一下
bool first_frame = true; splitH264(frame->data(), frame->size(), frame->prefixSize(), [&](const char *ptr, int len, int prefix) {
splitH264(frame->data() + frame->prefixSize(), H264FrameInternal::Ptr sub_frame = std::make_shared<H264FrameInternal>(frame, (char *)ptr, len, prefix);
frame->size() - frame->prefixSize(),
[&](const char *ptr, int len){
if(first_frame){
H264FrameInternal::Ptr sub_frame = std::make_shared<H264FrameInternal>(frame,
frame->data(),
len + frame->prefixSize(),
frame->prefixSize());
inputFrame_l(sub_frame);
first_frame = false;
}else{
H264FrameInternal::Ptr sub_frame = std::make_shared<H264FrameInternal>(frame,
(char *)ptr,
len ,
3);
inputFrame_l(sub_frame); inputFrame_l(sub_frame);
}
}); });
} else{ } else{
inputFrame_l(frame); inputFrame_l(frame);
...@@ -302,6 +228,11 @@ private: ...@@ -302,6 +228,11 @@ private:
} }
break; break;
case H264Frame::NAL_SEI:{
//忽略SEI
break;
}
default: default:
VideoTrack::inputFrame(frame); VideoTrack::inputFrame(frame);
break; break;
...@@ -349,29 +280,27 @@ private: ...@@ -349,29 +280,27 @@ private:
bool _last_frame_is_idr = false; bool _last_frame_is_idr = false;
}; };
/** /**
* h264类型sdp * h264类型sdp
*/ */
class H264Sdp : public Sdp { class H264Sdp : public Sdp {
public: public:
/** /**
* *
* @param sps 264 sps,不带0x00000001头 * @param sps 264 sps,不带0x00000001头
* @param pps 264 pps,不带0x00000001头 * @param pps 264 pps,不带0x00000001头
* @param playload_type rtp playload type 默认96 * @param payload_type rtp payload type 默认96
* @param bitrate 比特率 * @param bitrate 比特率
*/ */
H264Sdp(const string &strSPS, H264Sdp(const string &strSPS,
const string &strPPS, const string &strPPS,
int playload_type = 96, int payload_type = 96,
int bitrate = 4000) : Sdp(90000,playload_type) { int bitrate = 4000) : Sdp(90000,payload_type) {
//视频通道 //视频通道
_printer << "m=video 0 RTP/AVP " << playload_type << "\r\n"; _printer << "m=video 0 RTP/AVP " << payload_type << "\r\n";
_printer << "b=AS:" << bitrate << "\r\n"; _printer << "b=AS:" << bitrate << "\r\n";
_printer << "a=rtpmap:" << playload_type << " H264/" << 90000 << "\r\n"; _printer << "a=rtpmap:" << payload_type << " H264/" << 90000 << "\r\n";
_printer << "a=fmtp:" << playload_type << " packetization-mode=1; profile-level-id="; _printer << "a=fmtp:" << payload_type << " packetization-mode=1; profile-level-id=";
char strTemp[100]; char strTemp[100];
uint32_t profile_level_id = 0; uint32_t profile_level_id = 0;
...@@ -390,17 +319,13 @@ public: ...@@ -390,17 +319,13 @@ public:
memset(strTemp, 0, 100); memset(strTemp, 0, 100);
av_base64_encode(strTemp, 100, (uint8_t *) strPPS.data(), strPPS.size()); av_base64_encode(strTemp, 100, (uint8_t *) strPPS.data(), strPPS.size());
_printer << strTemp << "\r\n"; _printer << strTemp << "\r\n";
_printer << "a=control:trackID=" << getTrackType() << "\r\n"; _printer << "a=control:trackID=" << (int)TrackVideo << "\r\n";
} }
string getSdp() const override { string getSdp() const override {
return _printer; return _printer;
} }
TrackType getTrackType() const override {
return TrackVideo;
}
CodecId getCodecId() const override { CodecId getCodecId() const override {
return CodecH264; return CodecH264;
} }
...@@ -408,8 +333,5 @@ private: ...@@ -408,8 +333,5 @@ private:
_StrPrinter _printer; _StrPrinter _printer;
}; };
}//namespace mediakit }//namespace mediakit
#endif //ZLMEDIAKIT_H264_H #endif //ZLMEDIAKIT_H264_H
\ No newline at end of file
...@@ -36,10 +36,6 @@ public: ...@@ -36,10 +36,6 @@ public:
*/ */
bool inputRtmp(const RtmpPacket::Ptr &rtmp, bool key_pos = true) override; bool inputRtmp(const RtmpPacket::Ptr &rtmp, bool key_pos = true) override;
TrackType getTrackType() const override{
return TrackVideo;
}
CodecId getCodecId() const override{ CodecId getCodecId() const override{
return CodecH264; return CodecH264;
} }
......
...@@ -157,7 +157,7 @@ bool H264RtpDecoder::decodeRtp(const RtpPacket::Ptr &rtppack) { ...@@ -157,7 +157,7 @@ bool H264RtpDecoder::decodeRtp(const RtpPacket::Ptr &rtppack) {
if (rtppack->sequence != _lastSeq + 1 && rtppack->sequence != 0) { if (rtppack->sequence != _lastSeq + 1 && rtppack->sequence != 0) {
//中间的或末尾的rtp包,其seq必须连续(如果回环了则判定为连续),否则说明rtp丢包,那么该帧不完整,必须得丢弃 //中间的或末尾的rtp包,其seq必须连续(如果回环了则判定为连续),否则说明rtp丢包,那么该帧不完整,必须得丢弃
_h264frame->_buffer.clear(); _h264frame->_buffer.clear();
WarnL << "rtp sequence不连续: " << rtppack->sequence << " != " << _lastSeq << " + 1,该帧被废弃"; WarnL << "rtp丢包: " << rtppack->sequence << " != " << _lastSeq << " + 1,该帧被废弃";
return false; return false;
} }
...@@ -204,12 +204,12 @@ void H264RtpDecoder::onGetH264(const H264Frame::Ptr &frame) { ...@@ -204,12 +204,12 @@ void H264RtpDecoder::onGetH264(const H264Frame::Ptr &frame) {
H264RtpEncoder::H264RtpEncoder(uint32_t ui32Ssrc, H264RtpEncoder::H264RtpEncoder(uint32_t ui32Ssrc,
uint32_t ui32MtuSize, uint32_t ui32MtuSize,
uint32_t ui32SampleRate, uint32_t ui32SampleRate,
uint8_t ui8PlayloadType, uint8_t ui8PayloadType,
uint8_t ui8Interleaved) : uint8_t ui8Interleaved) :
RtpInfo(ui32Ssrc, RtpInfo(ui32Ssrc,
ui32MtuSize, ui32MtuSize,
ui32SampleRate, ui32SampleRate,
ui8PlayloadType, ui8PayloadType,
ui8Interleaved) { ui8Interleaved) {
} }
......
...@@ -38,10 +38,6 @@ public: ...@@ -38,10 +38,6 @@ public:
*/ */
bool inputRtp(const RtpPacket::Ptr &rtp, bool key_pos = true) override; bool inputRtp(const RtpPacket::Ptr &rtp, bool key_pos = true) override;
TrackType getTrackType() const override{
return TrackVideo;
}
CodecId getCodecId() const override{ CodecId getCodecId() const override{
return CodecH264; return CodecH264;
} }
...@@ -66,13 +62,13 @@ public: ...@@ -66,13 +62,13 @@ public:
* @param ui32Ssrc ssrc * @param ui32Ssrc ssrc
* @param ui32MtuSize mtu大小 * @param ui32MtuSize mtu大小
* @param ui32SampleRate 采样率,强制为90000 * @param ui32SampleRate 采样率,强制为90000
* @param ui8PlayloadType pt类型 * @param ui8PayloadType pt类型
* @param ui8Interleaved rtsp interleaved * @param ui8Interleaved rtsp interleaved
*/ */
H264RtpEncoder(uint32_t ui32Ssrc, H264RtpEncoder(uint32_t ui32Ssrc,
uint32_t ui32MtuSize = 1400, uint32_t ui32MtuSize = 1400,
uint32_t ui32SampleRate = 90000, uint32_t ui32SampleRate = 90000,
uint8_t ui8PlayloadType = 96, uint8_t ui8PayloadType = 96,
uint8_t ui8Interleaved = TrackVideo * 2); uint8_t ui8Interleaved = TrackVideo * 2);
~H264RtpEncoder() {} ~H264RtpEncoder() {}
......
...@@ -23,9 +23,9 @@ namespace mediakit { ...@@ -23,9 +23,9 @@ namespace mediakit {
bool getHEVCInfo(const string &strVps, const string &strSps, int &iVideoWidth, int &iVideoHeight, float &iVideoFps); bool getHEVCInfo(const string &strVps, const string &strSps, int &iVideoWidth, int &iVideoHeight, float &iVideoFps);
/** /**
* 265帧类 * 265帧类
*/ */
class H265Frame : public Frame { class H265Frame : public FrameImp {
public: public:
typedef std::shared_ptr<H265Frame> Ptr; typedef std::shared_ptr<H265Frame> Ptr;
...@@ -60,32 +60,8 @@ public: ...@@ -60,32 +60,8 @@ public:
NAL_SEI_SUFFIX = 40, NAL_SEI_SUFFIX = 40,
} NaleType; } NaleType;
char *data() const override { H265Frame(){
return (char *) _buffer.data(); _codecid = CodecH265;
}
uint32_t size() const override {
return _buffer.size();
}
uint32_t dts() const override {
return _dts;
}
uint32_t pts() const override {
return _pts ? _pts : _dts;
}
uint32_t prefixSize() const override {
return _prefix_size;
}
TrackType getTrackType() const override {
return TrackVideo;
}
CodecId getCodecId() const override {
return CodecH265;
} }
bool keyFrame() const override { bool keyFrame() const override {
...@@ -96,39 +72,26 @@ public: ...@@ -96,39 +72,26 @@ public:
switch(H265_TYPE(_buffer[_prefix_size])){ switch(H265_TYPE(_buffer[_prefix_size])){
case H265Frame::NAL_VPS: case H265Frame::NAL_VPS:
case H265Frame::NAL_SPS: case H265Frame::NAL_SPS:
case H265Frame::NAL_PPS: case H265Frame::NAL_PPS : return true;
return true; default : return false;
default:
return false;
} }
} }
static bool isKeyFrame(int type) { static bool isKeyFrame(int type) {
return type >= NAL_BLA_W_LP && type <= NAL_RSV_IRAP_VCL23; return type >= NAL_BLA_W_LP && type <= NAL_RSV_IRAP_VCL23;
} }
public:
uint32_t _dts = 0;
uint32_t _pts = 0;
uint32_t _prefix_size = 4;
string _buffer;
}; };
class H265FrameNoCacheAble : public FrameFromPtr {
class H265FrameNoCacheAble : public FrameNoCacheAble {
public: public:
typedef std::shared_ptr<H265FrameNoCacheAble> Ptr; typedef std::shared_ptr<H265FrameNoCacheAble> Ptr;
H265FrameNoCacheAble(char *ptr, uint32_t size, uint32_t dts,uint32_t pts, int prefixeSize = 4) { H265FrameNoCacheAble(char *ptr, uint32_t size, uint32_t dts,uint32_t pts, int prefix_size = 4) {
_ptr = ptr; _ptr = ptr;
_size = size; _size = size;
_dts = dts; _dts = dts;
_pts = pts; _pts = pts;
_prefixSize = prefixeSize; _prefix_size = prefix_size;
}
TrackType getTrackType() const override {
return TrackVideo;
} }
CodecId getCodecId() const override { CodecId getCodecId() const override {
...@@ -136,17 +99,15 @@ public: ...@@ -136,17 +99,15 @@ public:
} }
bool keyFrame() const override { bool keyFrame() const override {
return H265Frame::isKeyFrame(H265_TYPE(((uint8_t *) _ptr)[_prefixSize])); return H265Frame::isKeyFrame(H265_TYPE(((uint8_t *) _ptr)[_prefix_size]));
} }
bool configFrame() const override{ bool configFrame() const override{
switch(H265_TYPE(((uint8_t *) _ptr)[_prefixSize])){ switch(H265_TYPE(((uint8_t *) _ptr)[_prefix_size])){
case H265Frame::NAL_VPS: case H265Frame::NAL_VPS:
case H265Frame::NAL_SPS: case H265Frame::NAL_SPS:
case H265Frame::NAL_PPS: case H265Frame::NAL_PPS:return true;
return true; default:return false;
default:
return false;
} }
} }
}; };
...@@ -184,7 +145,6 @@ public: ...@@ -184,7 +145,6 @@ public:
/** /**
* 返回不带0x00 00 00 01头的vps * 返回不带0x00 00 00 01头的vps
* @return
*/ */
const string &getVps() const { const string &getVps() const {
return _vps; return _vps;
...@@ -192,7 +152,6 @@ public: ...@@ -192,7 +152,6 @@ public:
/** /**
* 返回不带0x00 00 00 01头的sps * 返回不带0x00 00 00 01头的sps
* @return
*/ */
const string &getSps() const { const string &getSps() const {
return _sps; return _sps;
...@@ -200,7 +159,6 @@ public: ...@@ -200,7 +159,6 @@ public:
/** /**
* 返回不带0x00 00 00 01头的pps * 返回不带0x00 00 00 01头的pps
* @return
*/ */
const string &getPps() const { const string &getPps() const {
return _pps; return _pps;
...@@ -212,7 +170,6 @@ public: ...@@ -212,7 +170,6 @@ public:
/** /**
* 返回视频高度 * 返回视频高度
* @return
*/ */
int getVideoHeight() const override{ int getVideoHeight() const override{
return _height ; return _height ;
...@@ -220,7 +177,6 @@ public: ...@@ -220,7 +177,6 @@ public:
/** /**
* 返回视频宽度 * 返回视频宽度
* @return
*/ */
int getVideoWidth() const override{ int getVideoWidth() const override{
return _width; return _width;
...@@ -228,7 +184,6 @@ public: ...@@ -228,7 +184,6 @@ public:
/** /**
* 返回视频fps * 返回视频fps
* @return
*/ */
float getVideoFps() const override{ float getVideoFps() const override{
return _fps; return _fps;
...@@ -238,34 +193,18 @@ public: ...@@ -238,34 +193,18 @@ public:
return !_vps.empty() && !_sps.empty() && !_pps.empty(); return !_vps.empty() && !_sps.empty() && !_pps.empty();
} }
/** /**
* 输入数据帧,并获取sps pps * 输入数据帧,并获取sps pps
* @param frame 数据帧 * @param frame 数据帧
*/ */
void inputFrame(const Frame::Ptr &frame) override{ void inputFrame(const Frame::Ptr &frame) override{
int type = H265_TYPE(*((uint8_t *)frame->data() + frame->prefixSize())); int type = H265_TYPE(*((uint8_t *)frame->data() + frame->prefixSize()));
if(frame->configFrame()){ if(frame->configFrame() || type == H265Frame::NAL_SEI_PREFIX){
bool first_frame = true; splitH264(frame->data(), frame->size(), frame->prefixSize(), [&](const char *ptr, int len, int prefix){
splitH264(frame->data() + frame->prefixSize(), H265FrameInternal::Ptr sub_frame = std::make_shared<H265FrameInternal>(frame, (char*)ptr, len, prefix);
frame->size() - frame->prefixSize(),
[&](const char *ptr, int len){
if(first_frame){
H265FrameInternal::Ptr sub_frame = std::make_shared<H265FrameInternal>(frame,
frame->data(),
len + frame->prefixSize(),
frame->prefixSize());
inputFrame_l(sub_frame);
first_frame = false;
}else{
H265FrameInternal::Ptr sub_frame = std::make_shared<H265FrameInternal>(frame,
(char *)ptr,
len ,
3);
inputFrame_l(sub_frame); inputFrame_l(sub_frame);
}
}); });
}else{ } else {
inputFrame_l(frame); inputFrame_l(frame);
} }
} }
...@@ -367,47 +306,41 @@ private: ...@@ -367,47 +306,41 @@ private:
bool _last_frame_is_idr = false; bool _last_frame_is_idr = false;
}; };
/** /**
* h265类型sdp * h265类型sdp
*/ */
class H265Sdp : public Sdp { class H265Sdp : public Sdp {
public: public:
/** /**
* * 构造函数
* @param sps 265 sps,不带0x00000001头 * @param sps 265 sps,不带0x00000001头
* @param pps 265 pps,不带0x00000001头 * @param pps 265 pps,不带0x00000001头
* @param playload_type rtp playload type 默认96 * @param payload_type rtp payload type 默认96
* @param bitrate 比特率 * @param bitrate 比特率
*/ */
H265Sdp(const string &strVPS, H265Sdp(const string &strVPS,
const string &strSPS, const string &strSPS,
const string &strPPS, const string &strPPS,
int playload_type = 96, int payload_type = 96,
int bitrate = 4000) : Sdp(90000,playload_type) { int bitrate = 4000) : Sdp(90000,payload_type) {
//视频通道 //视频通道
_printer << "m=video 0 RTP/AVP " << playload_type << "\r\n"; _printer << "m=video 0 RTP/AVP " << payload_type << "\r\n";
_printer << "b=AS:" << bitrate << "\r\n"; _printer << "b=AS:" << bitrate << "\r\n";
_printer << "a=rtpmap:" << playload_type << " H265/" << 90000 << "\r\n"; _printer << "a=rtpmap:" << payload_type << " H265/" << 90000 << "\r\n";
_printer << "a=fmtp:" << playload_type << " "; _printer << "a=fmtp:" << payload_type << " ";
_printer << "sprop-vps="; _printer << "sprop-vps=";
_printer << encodeBase64(strVPS) << "; "; _printer << encodeBase64(strVPS) << "; ";
_printer << "sprop-sps="; _printer << "sprop-sps=";
_printer << encodeBase64(strSPS) << "; "; _printer << encodeBase64(strSPS) << "; ";
_printer << "sprop-pps="; _printer << "sprop-pps=";
_printer << encodeBase64(strPPS) << "\r\n"; _printer << encodeBase64(strPPS) << "\r\n";
_printer << "a=control:trackID=" << getTrackType() << "\r\n"; _printer << "a=control:trackID=" << (int)TrackVideo << "\r\n";
} }
string getSdp() const override { string getSdp() const override {
return _printer; return _printer;
} }
TrackType getTrackType() const override {
return TrackVideo;
}
CodecId getCodecId() const override { CodecId getCodecId() const override {
return CodecH265; return CodecH265;
} }
...@@ -415,9 +348,5 @@ private: ...@@ -415,9 +348,5 @@ private:
_StrPrinter _printer; _StrPrinter _printer;
}; };
}//namespace mediakit }//namespace mediakit
#endif //ZLMEDIAKIT_H265_H #endif //ZLMEDIAKIT_H265_H
\ No newline at end of file
...@@ -36,10 +36,6 @@ public: ...@@ -36,10 +36,6 @@ public:
*/ */
bool inputRtmp(const RtmpPacket::Ptr &rtmp, bool key_pos = true) override; bool inputRtmp(const RtmpPacket::Ptr &rtmp, bool key_pos = true) override;
TrackType getTrackType() const override{
return TrackVideo;
}
CodecId getCodecId() const override{ CodecId getCodecId() const override{
return CodecH265; return CodecH265;
} }
......
...@@ -96,7 +96,7 @@ bool H265RtpDecoder::decodeRtp(const RtpPacket::Ptr &rtppack) { ...@@ -96,7 +96,7 @@ bool H265RtpDecoder::decodeRtp(const RtpPacket::Ptr &rtppack) {
if (rtppack->sequence != _lastSeq + 1 && rtppack->sequence != 0) { if (rtppack->sequence != _lastSeq + 1 && rtppack->sequence != 0) {
//中间的或末尾的rtp包,其seq必须连续(如果回环了则判定为连续),否则说明rtp丢包,那么该帧不完整,必须得丢弃 //中间的或末尾的rtp包,其seq必须连续(如果回环了则判定为连续),否则说明rtp丢包,那么该帧不完整,必须得丢弃
_h265frame->_buffer.clear(); _h265frame->_buffer.clear();
WarnL << "rtp sequence不连续: " << rtppack->sequence << " != " << _lastSeq << " + 1,该帧被废弃"; WarnL << "rtp丢包: " << rtppack->sequence << " != " << _lastSeq << " + 1,该帧被废弃";
return false; return false;
} }
...@@ -140,12 +140,12 @@ void H265RtpDecoder::onGetH265(const H265Frame::Ptr &frame) { ...@@ -140,12 +140,12 @@ void H265RtpDecoder::onGetH265(const H265Frame::Ptr &frame) {
H265RtpEncoder::H265RtpEncoder(uint32_t ui32Ssrc, H265RtpEncoder::H265RtpEncoder(uint32_t ui32Ssrc,
uint32_t ui32MtuSize, uint32_t ui32MtuSize,
uint32_t ui32SampleRate, uint32_t ui32SampleRate,
uint8_t ui8PlayloadType, uint8_t ui8PayloadType,
uint8_t ui8Interleaved) : uint8_t ui8Interleaved) :
RtpInfo(ui32Ssrc, RtpInfo(ui32Ssrc,
ui32MtuSize, ui32MtuSize,
ui32SampleRate, ui32SampleRate,
ui8PlayloadType, ui8PayloadType,
ui8Interleaved) { ui8Interleaved) {
} }
......
...@@ -39,10 +39,6 @@ public: ...@@ -39,10 +39,6 @@ public:
*/ */
bool inputRtp(const RtpPacket::Ptr &rtp, bool key_pos = true) override; bool inputRtp(const RtpPacket::Ptr &rtp, bool key_pos = true) override;
TrackType getTrackType() const override{
return TrackVideo;
}
CodecId getCodecId() const override{ CodecId getCodecId() const override{
return CodecH265; return CodecH265;
} }
...@@ -67,13 +63,13 @@ public: ...@@ -67,13 +63,13 @@ public:
* @param ui32Ssrc ssrc * @param ui32Ssrc ssrc
* @param ui32MtuSize mtu大小 * @param ui32MtuSize mtu大小
* @param ui32SampleRate 采样率,强制为90000 * @param ui32SampleRate 采样率,强制为90000
* @param ui8PlayloadType pt类型 * @param ui8PayloadType pt类型
* @param ui8Interleaved rtsp interleaved * @param ui8Interleaved rtsp interleaved
*/ */
H265RtpEncoder(uint32_t ui32Ssrc, H265RtpEncoder(uint32_t ui32Ssrc,
uint32_t ui32MtuSize = 1400, uint32_t ui32MtuSize = 1400,
uint32_t ui32SampleRate = 90000, uint32_t ui32SampleRate = 90000,
uint8_t ui8PlayloadType = 96, uint8_t ui8PayloadType = 96,
uint8_t ui8Interleaved = TrackVideo * 2); uint8_t ui8Interleaved = TrackVideo * 2);
~H265RtpEncoder() {} ~H265RtpEncoder() {}
......
/*
* Copyright (c) 2016 The ZLMediaKit project authors. All Rights Reserved.
*
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
*
* Use of this source code is governed by MIT license that can be found in the
* LICENSE file in the root of the source tree. All contributing project authors
* may be found in the AUTHORS file in the root of the source tree.
*/
#include "Opus.h"
namespace mediakit{
Sdp::Ptr OpusTrack::getSdp() {
if(!ready()){
WarnL << getCodecName() << " Track未准备好";
return nullptr;
}
return std::make_shared<OpusSdp>(getAudioSampleRate(), getAudioChannel());
}
}//namespace mediakit
\ No newline at end of file
/*
* Copyright (c) 2016 The ZLMediaKit project authors. All Rights Reserved.
*
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
*
* Use of this source code is governed by MIT license that can be found in the
* LICENSE file in the root of the source tree. All contributing project authors
* may be found in the AUTHORS file in the root of the source tree.
*/
#ifndef ZLMEDIAKIT_OPUS_H
#define ZLMEDIAKIT_OPUS_H
#include "Frame.h"
#include "Track.h"
namespace mediakit{
/**
* Opus帧
*/
class OpusFrame : public FrameImp {
public:
typedef std::shared_ptr<OpusFrame> Ptr;
OpusFrame(){
_codecid = CodecOpus;
}
};
/**
* 不可缓存的Opus帧
*/
class OpusFrameNoCacheAble : public FrameFromPtr {
public:
typedef std::shared_ptr<OpusFrameNoCacheAble> Ptr;
OpusFrameNoCacheAble(char *ptr,uint32_t size,uint32_t dts, uint32_t pts = 0,int prefix_size = 0){
_ptr = ptr;
_size = size;
_dts = dts;
_prefix_size = prefix_size;
}
CodecId getCodecId() const override{
return CodecOpus;
}
bool keyFrame() const override {
return false;
}
bool configFrame() const override{
return false;
}
};
/**
* Opus帧音频通道
*/
class OpusTrack : public AudioTrackImp{
public:
typedef std::shared_ptr<OpusTrack> Ptr;
OpusTrack(int sample_rate, int channels, int sample_bit) : AudioTrackImp(CodecOpus,sample_rate,channels,sample_bit){}
private:
//克隆该Track
Track::Ptr clone() override {
return std::make_shared<std::remove_reference<decltype(*this)>::type >(*this);
}
//生成sdp
Sdp::Ptr getSdp() override ;
};
/**
* Opus类型SDP
*/
class OpusSdp : public Sdp {
public:
/**
* 构造opus sdp
* @param sample_rate 音频采样率
* @param payload_type rtp payload
* @param bitrate 比特率
*/
OpusSdp(int sample_rate,
int channels,
int payload_type = 98,
int bitrate = 128) : Sdp(sample_rate,payload_type){
_printer << "m=audio 0 RTP/AVP " << payload_type << "\r\n";
_printer << "a=rtpmap:" << payload_type << " opus/" << sample_rate << "/" << channels << "\r\n";
_printer << "a=control:trackID=" << (int)TrackAudio << "\r\n";
}
string getSdp() const override {
return _printer;
}
CodecId getCodecId() const override {
return CodecOpus;
}
private:
_StrPrinter _printer;
};
}//namespace mediakit
#endif //ZLMEDIAKIT_OPUS_H
...@@ -65,8 +65,6 @@ class VideoTrack : public Track { ...@@ -65,8 +65,6 @@ class VideoTrack : public Track {
public: public:
typedef std::shared_ptr<VideoTrack> Ptr; typedef std::shared_ptr<VideoTrack> Ptr;
TrackType getTrackType() const override { return TrackVideo;};
/** /**
* 返回视频高度 * 返回视频高度
* @return * @return
...@@ -93,8 +91,6 @@ class AudioTrack : public Track { ...@@ -93,8 +91,6 @@ class AudioTrack : public Track {
public: public:
typedef std::shared_ptr<AudioTrack> Ptr; typedef std::shared_ptr<AudioTrack> Ptr;
TrackType getTrackType() const override { return TrackAudio;};
/** /**
* 返回音频采样率 * 返回音频采样率
* @return * @return
...@@ -114,6 +110,64 @@ public: ...@@ -114,6 +110,64 @@ public:
virtual int getAudioChannel() const {return 0;}; virtual int getAudioChannel() const {return 0;};
}; };
class AudioTrackImp : public AudioTrack{
public:
typedef std::shared_ptr<AudioTrackImp> Ptr;
/**
* 构造函数
* @param codecId 编码类型
* @param sample_rate 采样率(HZ)
* @param channels 通道数
* @param sample_bit 采样位数,一般为16
*/
AudioTrackImp(CodecId codecId,int sample_rate, int channels, int sample_bit){
_codecid = codecId;
_sample_rate = sample_rate;
_channels = channels;
_sample_bit = sample_bit;
}
/**
* 返回编码类型
*/
CodecId getCodecId() const override{
return _codecid;
}
/**
* 是否已经初始化
*/
bool ready() override {
return true;
}
/**
* 返回音频采样率
*/
int getAudioSampleRate() const override{
return _sample_rate;
}
/**
* 返回音频采样位数,一般为16或8
*/
int getAudioSampleBit() const override{
return _sample_bit;
}
/**
* 返回音频通道数
*/
int getAudioChannel() const override{
return _channels;
}
private:
CodecId _codecid;
int _sample_rate;
int _channels;
int _sample_bit;
};
class TrackSource{ class TrackSource{
public: public:
...@@ -123,7 +177,6 @@ public: ...@@ -123,7 +177,6 @@ public:
/** /**
* 获取全部的Track * 获取全部的Track
* @param trackReady 是否获取全部已经准备好的Track * @param trackReady 是否获取全部已经准备好的Track
* @return
*/ */
virtual vector<Track::Ptr> getTracks(bool trackReady = true) const = 0; virtual vector<Track::Ptr> getTracks(bool trackReady = true) const = 0;
...@@ -131,7 +184,6 @@ public: ...@@ -131,7 +184,6 @@ public:
* 获取特定Track * 获取特定Track
* @param type track类型 * @param type track类型
* @param trackReady 是否获取全部已经准备好的Track * @param trackReady 是否获取全部已经准备好的Track
* @return
*/ */
Track::Ptr getTrack(TrackType type , bool trackReady = true) const { Track::Ptr getTrack(TrackType type , bool trackReady = true) const {
auto tracks = getTracks(trackReady); auto tracks = getTracks(trackReady);
...@@ -145,5 +197,4 @@ public: ...@@ -145,5 +197,4 @@ public:
}; };
}//namespace mediakit }//namespace mediakit
#endif //ZLMEDIAKIT_TRACK_H #endif //ZLMEDIAKIT_TRACK_H
\ No newline at end of file
/*
* Copyright (c) 2020 The ZLMediaKit project authors. All Rights Reserved.
*
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
*
* Use of this source code is governed by MIT license that can be found in the
* LICENSE file in the root of the source tree. All contributing project authors
* may be found in the AUTHORS file in the root of the source tree.
*/
#include <cstdlib>
#include "HlsParser.h"
#include "Util/util.h"
#include "Common/Parser.h"
using namespace toolkit;
namespace mediakit {
bool HlsParser::parse(const string &http_url, const string &m3u8) {
float extinf_dur = 0;
ts_segment segment;
map<int, ts_segment> ts_map;
_total_dur = 0;
_is_live = true;
_is_m3u8_inner = false;
int index = 0;
auto lines = split(m3u8, "\n");
for (auto &line : lines) {
trim(line);
if (line.size() < 2) {
continue;
}
if ((_is_m3u8_inner || extinf_dur != 0) && line[0] != '#') {
segment.duration = extinf_dur;
if (line.find("http://") == 0 || line.find("https://") == 0) {
segment.url = line;
} else {
if (line.find("/") == 0) {
segment.url = http_url.substr(0, http_url.find("/", 8)) + line;
} else {
segment.url = http_url.substr(0, http_url.rfind("/") + 1) + line;
}
}
if (!_is_m3u8_inner) {
//ts按照先后顺序排序
ts_map.emplace(index++, segment);
} else {
//子m3u8按照带宽排序
ts_map.emplace(segment.bandwidth, segment);
}
extinf_dur = 0;
continue;
}
_is_m3u8_inner = false;
if (line.find("#EXTINF:") == 0) {
sscanf(line.data(), "#EXTINF:%f,", &extinf_dur);
_total_dur += extinf_dur;
continue;
}
static const string s_stream_inf = "#EXT-X-STREAM-INF:";
if (line.find(s_stream_inf) == 0) {
_is_m3u8_inner = true;
auto key_val = Parser::parseArgs(line.substr(s_stream_inf.size()), ",", "=");
segment.program_id = atoi(key_val["PROGRAM-ID"].data());
segment.bandwidth = atoi(key_val["BANDWIDTH"].data());
sscanf(key_val["RESOLUTION"].data(), "%dx%d", &segment.width, &segment.height);
continue;
}
if (line == "#EXTM3U") {
_is_m3u8 = true;
continue;
}
if (line.find("#EXT-X-ALLOW-CACHE:") == 0) {
_allow_cache = (line.find(":YES") != string::npos);
continue;
}
if (line.find("#EXT-X-VERSION:") == 0) {
sscanf(line.data(), "#EXT-X-VERSION:%d", &_version);
continue;
}
if (line.find("#EXT-X-TARGETDURATION:") == 0) {
sscanf(line.data(), "#EXT-X-TARGETDURATION:%d", &_target_dur);
continue;
}
if (line.find("#EXT-X-MEDIA-SEQUENCE:") == 0) {
sscanf(line.data(), "#EXT-X-MEDIA-SEQUENCE:%lld", &_sequence);
continue;
}
if (line.find("#EXT-X-ENDLIST") == 0) {
//点播
_is_live = false;
continue;
}
continue;
}
if (_is_m3u8) {
onParsed(_is_m3u8_inner, _sequence, ts_map);
}
return _is_m3u8;
}
bool HlsParser::isM3u8() const {
return _is_m3u8;
}
bool HlsParser::isLive() const{
return _is_live;
}
bool HlsParser::allowCache() const {
return _allow_cache;
}
int HlsParser::getVersion() const {
return _version;
}
int HlsParser::getTargetDur() const {
return _target_dur;
}
int HlsParser::getSequence() const {
return _sequence;
}
bool HlsParser::isM3u8Inner() const {
return _is_m3u8_inner;
}
}//namespace mediakit
\ No newline at end of file
/*
* Copyright (c) 2020 The ZLMediaKit project authors. All Rights Reserved.
*
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
*
* Use of this source code is governed by MIT license that can be found in the
* LICENSE file in the root of the source tree. All contributing project authors
* may be found in the AUTHORS file in the root of the source tree.
*/
#ifndef HTTP_HLSPARSER_H
#define HTTP_HLSPARSER_H
#include <string>
#include <list>
#include <map>
using namespace std;
namespace mediakit {
typedef struct{
//url地址
string url;
//ts切片长度
float duration;
//////内嵌m3u8//////
//节目id
int program_id;
//带宽
int bandwidth;
//宽度
int width;
//高度
int height;
} ts_segment;
class HlsParser {
public:
HlsParser(){}
~HlsParser(){}
bool parse(const string &http_url,const string &m3u8);
/**
* 是否存在#EXTM3U字段,是否为m3u8文件
*/
bool isM3u8() const;
/**
* #EXT-X-ALLOW-CACHE值,是否允许cache
*/
bool allowCache() const;
/**
* 是否存在#EXT-X-ENDLIST字段,是否为直播
*/
bool isLive() const ;
/**
* #EXT-X-VERSION值,版本号
*/
int getVersion() const;
/**
* #EXT-X-TARGETDURATION字段值
*/
int getTargetDur() const;
/**
* #EXT-X-MEDIA-SEQUENCE字段值,该m3u8序号
*/
int getSequence() const;
/**
* 内部是否含有子m3u8
*/
bool isM3u8Inner() const;
protected:
//解析出ts文件地址回调
virtual void onParsed(bool is_m3u8_inner,int64_t sequence,const map<int,ts_segment> &ts_list) {};
private:
bool _is_m3u8 = false;
bool _allow_cache = false;
bool _is_live = true;
int _version = 0;
int _target_dur = 0;
float _total_dur = 0;
int64_t _sequence = 0;
//每部是否有m3u8
bool _is_m3u8_inner = false;
};
}//namespace mediakit
#endif //HTTP_HLSPARSER_H
/*
* Copyright (c) 2020 The ZLMediaKit project authors. All Rights Reserved.
*
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
*
* Use of this source code is governed by MIT license that can be found in the
* LICENSE file in the root of the source tree. All contributing project authors
* may be found in the AUTHORS file in the root of the source tree.
*/
#include "HlsPlayer.h"
namespace mediakit {
HlsPlayer::HlsPlayer(const EventPoller::Ptr &poller){
_segment.setOnSegment([this](const char *data, uint64_t len) { onPacket(data, len); });
_poller = poller ? poller : EventPollerPool::Instance().getPoller();
}
HlsPlayer::~HlsPlayer() {}
void HlsPlayer::play(const string &strUrl) {
_m3u8_list.emplace_back(strUrl);
play_l();
}
void HlsPlayer::play_l(){
if (_m3u8_list.empty()) {
teardown_l(SockException(Err_shutdown, "所有hls url都尝试播放失败!"));
return;
}
float playTimeOutSec = (*this)[Client::kTimeoutMS].as<int>() / 1000.0;
setMethod("GET");
if(!(*this)[kNetAdapter].empty()) {
setNetAdapter((*this)[kNetAdapter]);
}
sendRequest(_m3u8_list.back(), playTimeOutSec);
}
void HlsPlayer::teardown_l(const SockException &ex){
_timer.reset();
_timer_ts.reset();
_http_ts_player.reset();
shutdown(ex);
}
void HlsPlayer::teardown() {
teardown_l(SockException(Err_shutdown,"teardown"));
}
void HlsPlayer::playNextTs(bool force){
if (_ts_list.empty()) {
//播放列表为空,那么立即重新下载m3u8文件
_timer.reset();
play_l();
return;
}
if (!force && _http_ts_player && _http_ts_player->alive()) {
//播放器目前还存活,正在下载中
return;
}
auto ts_duration = _ts_list.front().duration * 1000;
weak_ptr<HlsPlayer> weakSelf = dynamic_pointer_cast<HlsPlayer>(shared_from_this());
std::shared_ptr<Ticker> ticker(new Ticker);
_http_ts_player = std::make_shared<HttpTSPlayer>(getPoller(), false);
_http_ts_player->setOnDisconnect([weakSelf, ticker, ts_duration](const SockException &err) {
auto strongSelf = weakSelf.lock();
if (!strongSelf) {
return;
}
auto delay = ts_duration - 500 - ticker->elapsedTime();
if (delay <= 0) {
//播放这个ts切片花费了太长时间,我们立即下一个切片的播放
strongSelf->playNextTs(true);
} else {
//下一个切片慢点播放
strongSelf->_timer_ts.reset(new Timer(delay / 1000.0, [weakSelf, delay]() {
auto strongSelf = weakSelf.lock();
if (!strongSelf) {
return false;
}
strongSelf->playNextTs(true);
return false;
}, strongSelf->getPoller()));
}
});
_http_ts_player->setOnPacket([weakSelf](const char *data, uint64_t len) {
auto strongSelf = weakSelf.lock();
if (!strongSelf) {
return;
}
//收到ts包
strongSelf->onPacket_l(data, len);
});
_http_ts_player->setMethod("GET");
if(!(*this)[kNetAdapter].empty()) {
_http_ts_player->setNetAdapter((*this)[Client::kNetAdapter]);
}
_http_ts_player->sendRequest(_ts_list.front().url, 2 * _ts_list.front().duration);
_ts_list.pop_front();
}
void HlsPlayer::onParsed(bool is_m3u8_inner,int64_t sequence,const map<int,ts_segment> &ts_map){
if (!is_m3u8_inner) {
//这是ts播放列表
if (_last_sequence == sequence) {
return;
}
_last_sequence = sequence;
for (auto &pr : ts_map) {
auto &ts = pr.second;
if (_ts_url_cache.emplace(ts.url).second) {
//该ts未重复
_ts_list.emplace_back(ts);
//按时间排序
_ts_url_sort.emplace_back(ts.url);
}
}
if (_ts_url_sort.size() > 2 * ts_map.size()) {
//去除防重列表中过多的数据
_ts_url_cache.erase(_ts_url_sort.front());
_ts_url_sort.pop_front();
}
playNextTs();
} else {
//这是m3u8列表,我们播放最高清的子hls
if (ts_map.empty()) {
teardown_l(SockException(Err_shutdown, StrPrinter << "empty sub hls list:" + getUrl()));
return;
}
_timer.reset();
weak_ptr<HlsPlayer> weakSelf = dynamic_pointer_cast<HlsPlayer>(shared_from_this());
auto url = ts_map.rbegin()->second.url;
getPoller()->async([weakSelf, url]() {
auto strongSelf = weakSelf.lock();
if (strongSelf) {
strongSelf->play(url);
}
}, false);
}
}
int64_t HlsPlayer::onResponseHeader(const string &status, const HttpClient::HttpHeader &headers) {
if (status != "200" && status != "206") {
//失败
teardown_l(SockException(Err_shutdown, StrPrinter << "bad http status code:" + status));
return 0;
}
auto contet_type = const_cast< HttpClient::HttpHeader &>(headers)["Content-Type"];
_is_m3u8 = (contet_type.find("application/vnd.apple.mpegurl") == 0);
return -1;
}
void HlsPlayer::onResponseBody(const char *buf, int64_t size, int64_t recvedSize, int64_t totalSize) {
if (recvedSize == size) {
//刚开始
_m3u8.clear();
}
_m3u8.append(buf, size);
}
void HlsPlayer::onResponseCompleted() {
if (HlsParser::parse(getUrl(), _m3u8)) {
playDelay();
if (_first) {
_first = false;
onPlayResult(SockException(Err_success, "play success"));
}
} else {
teardown_l(SockException(Err_shutdown, "解析m3u8文件失败"));
}
}
float HlsPlayer::delaySecond(){
if (HlsParser::isM3u8() && HlsParser::getTargetDur() > 0) {
return HlsParser::getTargetDur();
}
return 1;
}
void HlsPlayer::onDisconnect(const SockException &ex) {
if (_first) {
//第一次失败,则播放失败
_first = false;
onPlayResult(ex);
return;
}
//主动shutdown
if (ex.getErrCode() == Err_shutdown) {
if (_m3u8_list.size() <= 1) {
//全部url都播放失败
onShutdown(ex);
} else {
_m3u8_list.pop_back();
//还有上一级url可以重试播放
play_l();
}
return;
}
//eof等,之后播放失败,那么重试播放m3u8
playDelay();
}
bool HlsPlayer::onRedirectUrl(const string &url,bool temporary) {
_m3u8_list.emplace_back(url);
return true;
}
void HlsPlayer::playDelay(){
weak_ptr<HlsPlayer> weakSelf = dynamic_pointer_cast<HlsPlayer>(shared_from_this());
_timer.reset(new Timer(delaySecond(), [weakSelf]() {
auto strongSelf = weakSelf.lock();
if (strongSelf) {
strongSelf->play_l();
}
return false;
}, getPoller()));
}
void HlsPlayer::onPacket_l(const char *data, uint64_t len){
_segment.input(data,len);
}
//////////////////////////////////////////////////////////////////////////
HlsPlayerImp::HlsPlayerImp(const EventPoller::Ptr &poller) : PlayerImp<HlsPlayer, PlayerBase>(poller) {
}
void HlsPlayerImp::setOnPacket(const TSSegment::onSegment &cb){
_on_ts = cb;
}
void HlsPlayerImp::onPacket(const char *data,uint64_t len) {
if (_on_ts) {
_on_ts(data, len);
}
if (!_decoder) {
_decoder = DecoderImp::createDecoder(DecoderImp::decoder_ts, this);
}
if (_decoder) {
_decoder->input((uint8_t *) data, len);
}
}
void HlsPlayerImp::onAllTrackReady() {
PlayerImp<HlsPlayer, PlayerBase>::onPlayResult(SockException(Err_success,"play hls success"));
}
void HlsPlayerImp::onPlayResult(const SockException &ex) {
if(ex){
PlayerImp<HlsPlayer, PlayerBase>::onPlayResult(ex);
}else{
_stamp[TrackAudio].syncTo(_stamp[TrackVideo]);
_ticker.resetTime();
weak_ptr<HlsPlayerImp> weakSelf = dynamic_pointer_cast<HlsPlayerImp>(shared_from_this());
//每50毫秒执行一次
_timer = std::make_shared<Timer>(0.05, [weakSelf]() {
auto strongSelf = weakSelf.lock();
if (!strongSelf) {
return false;
}
strongSelf->onTick();
return true;
}, getPoller());
}
}
void HlsPlayerImp::onShutdown(const SockException &ex) {
PlayerImp<HlsPlayer, PlayerBase>::onShutdown(ex);
_timer = nullptr;
}
vector<Track::Ptr> HlsPlayerImp::getTracks(bool trackReady) const {
return MediaSink::getTracks(trackReady);
}
void HlsPlayerImp::inputFrame(const Frame::Ptr &frame) {
//计算相对时间戳
int64_t dts, pts;
_stamp[frame->getTrackType()].revise(frame->dts(), frame->pts(), dts, pts);
//根据时间戳缓存frame
_frame_cache.emplace(dts, Frame::getCacheAbleFrame(frame));
while (!_frame_cache.empty()) {
if (_frame_cache.rbegin()->first - _frame_cache.begin()->first > 30 * 1000) {
//缓存超过30秒,强制消费掉
MediaSink::inputFrame(_frame_cache.begin()->second);
_frame_cache.erase(_frame_cache.begin());
continue;
}
//缓存小于30秒
break;
}
}
void HlsPlayerImp::onTick() {
auto it = _frame_cache.begin();
while (it != _frame_cache.end()) {
if (it->first > _ticker.elapsedTime()) {
//这些帧还未到时间播放
break;
}
//消费掉已经到期的帧
MediaSink::inputFrame(it->second);
it = _frame_cache.erase(it);
}
}
}//namespace mediakit
\ No newline at end of file
/*
* Copyright (c) 2020 The ZLMediaKit project authors. All Rights Reserved.
*
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
*
* Use of this source code is governed by MIT license that can be found in the
* LICENSE file in the root of the source tree. All contributing project authors
* may be found in the AUTHORS file in the root of the source tree.
*/
#ifndef HTTP_HLSPLAYER_H
#define HTTP_HLSPLAYER_H
#include <unordered_set>
#include "Util/util.h"
#include "Poller/Timer.h"
#include "Http/HttpDownloader.h"
#include "Player/MediaPlayer.h"
#include "HlsParser.h"
#include "HttpTSPlayer.h"
#include "Rtp/Decoder.h"
#include "Rtp/TSDecoder.h"
using namespace toolkit;
namespace mediakit {
class HlsPlayer : public HttpClientImp , public PlayerBase , public HlsParser{
public:
HlsPlayer(const EventPoller::Ptr &poller);
~HlsPlayer() override;
/**
* 开始播放
* @param strUrl
*/
void play(const string &strUrl) override;
/**
* 停止播放
*/
void teardown() override;
protected:
/**
* 收到ts包
* @param data ts数据负载
* @param len ts包长度
*/
virtual void onPacket(const char *data, uint64_t len) = 0;
private:
/**
* 解析m3u8成功
* @param is_m3u8_inner 是否为m3u8列表
* @param sequence ts列表seq
* @param ts_map ts列表或m3u8列表
*/
void onParsed(bool is_m3u8_inner,int64_t sequence,const map<int,ts_segment> &ts_map) override;
/**
* 收到http回复头
* @param status 状态码,譬如:200 OK
* @param headers http头
* @return 返回后续content的长度;-1:后续数据全是content;>=0:固定长度content
* 需要指出的是,在http头中带有Content-Length字段时,该返回值无效
*/
int64_t onResponseHeader(const string &status,const HttpHeader &headers) override;
/**
* 收到http conten数据
* @param buf 数据指针
* @param size 数据大小
* @param recvedSize 已收数据大小(包含本次数据大小),当其等于totalSize时将触发onResponseCompleted回调
* @param totalSize 总数据大小
*/
void onResponseBody(const char *buf,int64_t size,int64_t recvedSize,int64_t totalSize) override;
/**
* 接收http回复完毕,
*/
void onResponseCompleted() override;
/**
* http链接断开回调
* @param ex 断开原因
*/
void onDisconnect(const SockException &ex) override;
/**
* 重定向事件
* @param url 重定向url
* @param temporary 是否为临时重定向
* @return 是否继续
*/
bool onRedirectUrl(const string &url,bool temporary) override;
private:
void playDelay();
float delaySecond();
void playNextTs(bool force = false);
void teardown_l(const SockException &ex);
void play_l();
void onPacket_l(const char *data, uint64_t len);
private:
struct UrlComp {
//url忽略?后面的参数
bool operator()(const string& __x, const string& __y) const {
return split(__x,"?")[0] < split(__y,"?")[0];
}
};
private:
bool _is_m3u8 = false;
bool _first = true;
int64_t _last_sequence = -1;
string _m3u8;
Timer::Ptr _timer;
Timer::Ptr _timer_ts;
list<ts_segment> _ts_list;
list<string> _ts_url_sort;
list<string> _m3u8_list;
set<string, UrlComp> _ts_url_cache;
HttpTSPlayer::Ptr _http_ts_player;
TSSegment _segment;
};
class HlsPlayerImp : public PlayerImp<HlsPlayer, PlayerBase> , public MediaSink{
public:
typedef std::shared_ptr<HlsPlayerImp> Ptr;
HlsPlayerImp(const EventPoller::Ptr &poller = nullptr);
~HlsPlayerImp() override {};
void setOnPacket(const TSSegment::onSegment &cb);
private:
void onPacket(const char *data, uint64_t len) override;
void onAllTrackReady() override;
void onPlayResult(const SockException &ex) override;
vector<Track::Ptr> getTracks(bool trackReady = true) const override;
void inputFrame(const Frame::Ptr &frame) override;
void onShutdown(const SockException &ex) override;
void onTick();
private:
TSSegment::onSegment _on_ts;
DecoderImp::Ptr _decoder;
multimap<int64_t, Frame::Ptr> _frame_cache;
Timer::Ptr _timer;
Ticker _ticker;
Stamp _stamp[2];
};
}//namespace mediakit
#endif //HTTP_HLSPLAYER_H
...@@ -48,8 +48,7 @@ public: ...@@ -48,8 +48,7 @@ public:
} }
}; };
class HttpClient : public TcpClient , public HttpRequestSplitter class HttpClient : public TcpClient , public HttpRequestSplitter{
{
public: public:
typedef StrCaseMap HttpHeader; typedef StrCaseMap HttpHeader;
typedef std::shared_ptr<HttpClient> Ptr; typedef std::shared_ptr<HttpClient> Ptr;
......
...@@ -13,9 +13,7 @@ ...@@ -13,9 +13,7 @@
#include "HttpClient.h" #include "HttpClient.h"
#include "Util/SSLBox.h" #include "Util/SSLBox.h"
using namespace toolkit; using namespace toolkit;
namespace mediakit { namespace mediakit {
class HttpClientImp: public TcpClientWithSSL<HttpClient> { class HttpClientImp: public TcpClientWithSSL<HttpClient> {
...@@ -28,5 +26,4 @@ protected: ...@@ -28,5 +26,4 @@ protected:
}; };
} /* namespace mediakit */ } /* namespace mediakit */
#endif /* SRC_HTTP_HTTPCLIENTIMP_H_ */ #endif /* SRC_HTTP_HTTPCLIENTIMP_H_ */
...@@ -188,65 +188,65 @@ bool HttpSession::checkLiveFlvStream(const function<void()> &cb){ ...@@ -188,65 +188,65 @@ bool HttpSession::checkLiveFlvStream(const function<void()> &cb){
bool bClose = !strcasecmp(_parser["Connection"].data(),"close"); bool bClose = !strcasecmp(_parser["Connection"].data(),"close");
weak_ptr<HttpSession> weakSelf = dynamic_pointer_cast<HttpSession>(shared_from_this()); weak_ptr<HttpSession> weakSelf = dynamic_pointer_cast<HttpSession>(shared_from_this());
MediaSource::findAsync(_mediaInfo,weakSelf.lock(),[weakSelf,bClose,this,cb](const MediaSource::Ptr &src){
//鉴权结果回调
auto onRes = [cb, weakSelf, bClose](const string &err){
auto strongSelf = weakSelf.lock();
if (!strongSelf) {
//本对象已经销毁
return;
}
if(!err.empty()){
//播放鉴权失败
strongSelf->sendResponse("401 Unauthorized", bClose, nullptr, KeyValue(), std::make_shared<HttpStringBody>(err));
return;
}
//异步查找rtmp流
MediaSource::findAsync(strongSelf->_mediaInfo, strongSelf, [weakSelf, bClose, cb](const MediaSource::Ptr &src) {
auto strongSelf = weakSelf.lock(); auto strongSelf = weakSelf.lock();
if(!strongSelf){ if (!strongSelf) {
//本对象已经销毁 //本对象已经销毁
return; return;
} }
auto rtmp_src = dynamic_pointer_cast<RtmpMediaSource>(src); auto rtmp_src = dynamic_pointer_cast<RtmpMediaSource>(src);
if(!rtmp_src){ if (!rtmp_src) {
//未找到该流 //未找到该流
sendNotFound(bClose); strongSelf->sendNotFound(bClose);
return; return;
} }
//找到流了
auto onRes = [this,rtmp_src,cb](const string &err){
bool authSuccess = err.empty();
if(!authSuccess){
sendResponse("401 Unauthorized", true, nullptr, KeyValue(), std::make_shared<HttpStringBody>(err));
return ;
}
if(!cb) { if (!cb) {
//找到rtmp源,发送http头,负载后续发送 //找到rtmp源,发送http头,负载后续发送
sendResponse("200 OK", false, "video/x-flv",KeyValue(),nullptr,true); strongSelf->sendResponse("200 OK", false, "video/x-flv", KeyValue(), nullptr, true);
}else{ } else {
//自定义发送http头
cb(); cb();
} }
//http-flv直播牺牲延时提升发送性能 //http-flv直播牺牲延时提升发送性能
setSocketFlags(); strongSelf->setSocketFlags();
strongSelf->start(strongSelf->getPoller(), rtmp_src);
try{ strongSelf->_is_flv_stream = true;
start(getPoller(),rtmp_src); });
_is_flv_stream = true;
}catch (std::exception &ex){
//该rtmp源不存在
shutdown(SockException(Err_shutdown,"rtmp mediasource released"));
}
}; };
weak_ptr<HttpSession> weakSelf = dynamic_pointer_cast<HttpSession>(shared_from_this()); Broadcast::AuthInvoker invoker = [weakSelf, onRes](const string &err) {
Broadcast::AuthInvoker invoker = [weakSelf,onRes](const string &err){
auto strongSelf = weakSelf.lock(); auto strongSelf = weakSelf.lock();
if(!strongSelf){ if (!strongSelf) {
return;
}
strongSelf->async([weakSelf,onRes,err](){
auto strongSelf = weakSelf.lock();
if(!strongSelf){
return; return;
} }
strongSelf->async([onRes, err]() {
onRes(err); onRes(err);
}); });
}; };
auto flag = NoticeCenter::Instance().emitEvent(Broadcast::kBroadcastMediaPlayed,_mediaInfo,invoker,static_cast<SockInfo &>(*this));
if(!flag){ auto flag = NoticeCenter::Instance().emitEvent(Broadcast::kBroadcastMediaPlayed, _mediaInfo, invoker, static_cast<SockInfo &>(*this));
if (!flag) {
//该事件无人监听,默认不鉴权 //该事件无人监听,默认不鉴权
onRes(""); onRes("");
} }
});
return true; return true;
} }
......
/*
* Copyright (c) 2020 The ZLMediaKit project authors. All Rights Reserved.
*
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
*
* Use of this source code is governed by MIT license that can be found in the
* LICENSE file in the root of the source tree. All contributing project authors
* may be found in the AUTHORS file in the root of the source tree.
*/
#include "HttpTSPlayer.h"
namespace mediakit {
HttpTSPlayer::HttpTSPlayer(const EventPoller::Ptr &poller, bool split_ts){
_segment.setOnSegment([this](const char *data, uint64_t len) { onPacket(data, len); });
_poller = poller ? poller : EventPollerPool::Instance().getPoller();
_split_ts = split_ts;
}
HttpTSPlayer::~HttpTSPlayer() {}
int64_t HttpTSPlayer::onResponseHeader(const string &status, const HttpClient::HttpHeader &headers) {
if (status != "200" && status != "206") {
//http状态码不符合预期
shutdown(SockException(Err_other, StrPrinter << "bad http status code:" + status));
return 0;
}
auto contet_type = const_cast< HttpClient::HttpHeader &>(headers)["Content-Type"];
if (contet_type.find("video/mp2t") == 0 || contet_type.find("video/mpeg") == 0) {
_is_ts_content = true;
}
//后续是不定长content
return -1;
}
void HttpTSPlayer::onResponseBody(const char *buf, int64_t size, int64_t recvedSize, int64_t totalSize) {
if (recvedSize == size) {
//开始接收数据
if (buf[0] == TS_SYNC_BYTE) {
//这是ts头
_is_first_packet_ts = true;
} else {
WarnL << "可能不是http-ts流";
}
}
if (_split_ts) {
_segment.input(buf, size);
} else {
onPacket(buf, size);
}
}
void HttpTSPlayer::onResponseCompleted() {
//接收完毕
shutdown(SockException(Err_success, "play completed"));
}
void HttpTSPlayer::onDisconnect(const SockException &ex) {
if (_on_disconnect) {
_on_disconnect(ex);
_on_disconnect = nullptr;
}
}
void HttpTSPlayer::onPacket(const char *data, uint64_t len) {
if (_on_segment) {
_on_segment(data, len);
}
}
void HttpTSPlayer::setOnDisconnect(const HttpTSPlayer::onShutdown &cb) {
_on_disconnect = cb;
}
void HttpTSPlayer::setOnPacket(const TSSegment::onSegment &cb) {
_on_segment = cb;
}
}//namespace mediakit
\ No newline at end of file
/*
* Copyright (c) 2020 The ZLMediaKit project authors. All Rights Reserved.
*
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
*
* Use of this source code is governed by MIT license that can be found in the
* LICENSE file in the root of the source tree. All contributing project authors
* may be found in the AUTHORS file in the root of the source tree.
*/
#ifndef HTTP_HTTPTSPLAYER_H
#define HTTP_HTTPTSPLAYER_H
#include "Http/HttpDownloader.h"
#include "Player/MediaPlayer.h"
#include "Rtp/TSDecoder.h"
using namespace toolkit;
namespace mediakit {
//http-ts播发器,未实现ts解复用
class HttpTSPlayer : public HttpClientImp{
public:
typedef function<void(const SockException &)> onShutdown;
typedef std::shared_ptr<HttpTSPlayer> Ptr;
HttpTSPlayer(const EventPoller::Ptr &poller = nullptr, bool split_ts = true);
~HttpTSPlayer() override ;
//设置异常断开回调
void setOnDisconnect(const onShutdown &cb);
//设置接收ts包回调
void setOnPacket(const TSSegment::onSegment &cb);
protected:
///HttpClient override///
int64_t onResponseHeader(const string &status,const HttpHeader &headers) override;
void onResponseBody(const char *buf,int64_t size,int64_t recvedSize,int64_t totalSize) override;
void onResponseCompleted() override;
void onDisconnect(const SockException &ex) override ;
//收到ts包
virtual void onPacket(const char *data, uint64_t len);
private:
//是否为mpegts负载
bool _is_ts_content = false;
//第一个包是否为ts包
bool _is_first_packet_ts = false;
//是否判断是否是ts并split
bool _split_ts;
TSSegment _segment;
onShutdown _on_disconnect;
TSSegment::onSegment _on_segment;
};
}//namespace mediakit
#endif //HTTP_HTTPTSPLAYER_H
...@@ -94,6 +94,20 @@ public: ...@@ -94,6 +94,20 @@ public:
_onRecv = nullptr; _onRecv = nullptr;
sendRequest(http_url,fTimeOutSec); sendRequest(http_url,fTimeOutSec);
} }
void closeWsClient(){
if(!_onRecv){
//未连接
return;
}
WebSocketHeader header;
header._fin = true;
header._reserved = 0;
header._opcode = CLOSE;
//客户端需要加密
header._mask_flag = true;
WebSocketSplitter::encode(header, nullptr);
}
protected: protected:
//HttpClientImp override //HttpClientImp override
...@@ -110,7 +124,8 @@ protected: ...@@ -110,7 +124,8 @@ protected:
if(Sec_WebSocket_Accept == const_cast<HttpHeader &>(headers)["Sec-WebSocket-Accept"]){ if(Sec_WebSocket_Accept == const_cast<HttpHeader &>(headers)["Sec-WebSocket-Accept"]){
//success //success
onWebSocketException(SockException()); onWebSocketException(SockException());
return 0; //后续全是websocket负载数据
return -1;
} }
shutdown(SockException(Err_shutdown,StrPrinter << "Sec-WebSocket-Accept mismatch")); shutdown(SockException(Err_shutdown,StrPrinter << "Sec-WebSocket-Accept mismatch"));
return 0; return 0;
...@@ -125,6 +140,16 @@ protected: ...@@ -125,6 +140,16 @@ protected:
*/ */
void onResponseCompleted() override {} void onResponseCompleted() override {}
/**
* 接收websocket负载数据
*/
void onResponseBody(const char *buf,int64_t size,int64_t recvedSize,int64_t totalSize) override{
if(_onRecv){
//完成websocket握手后,拦截websocket数据并解析
_onRecv(buf, size);
}
};
//TcpClient override //TcpClient override
/** /**
...@@ -168,20 +193,6 @@ protected: ...@@ -168,20 +193,6 @@ protected:
} }
/** /**
* tcp收到数据
* @param pBuf
*/
void onRecv(const Buffer::Ptr &pBuf) override{
if(_onRecv){
//完成websocket握手后,拦截websocket数据并解析
_onRecv(pBuf);
}else{
//websocket握手数据
HttpClientImp::onRecv(pBuf);
}
}
/**
* tcp连接断开 * tcp连接断开
* @param ex * @param ex
*/ */
...@@ -193,7 +204,7 @@ protected: ...@@ -193,7 +204,7 @@ protected:
//WebSocketSplitter override //WebSocketSplitter override
/** /**
* 收到一个webSocket数据包包头,后续将继续触发onWebSocketDecodePlayload回调 * 收到一个webSocket数据包包头,后续将继续触发onWebSocketDecodePayload回调
* @param header 数据包头 * @param header 数据包头
*/ */
void onWebSocketDecodeHeader(const WebSocketHeader &header) override{ void onWebSocketDecodeHeader(const WebSocketHeader &header) override{
...@@ -205,9 +216,9 @@ protected: ...@@ -205,9 +216,9 @@ protected:
* @param header 数据包包头 * @param header 数据包包头
* @param ptr 负载数据指针 * @param ptr 负载数据指针
* @param len 负载数据长度 * @param len 负载数据长度
* @param recved 已接收数据长度(包含本次数据长度),等于header._playload_len时则接受完毕 * @param recved 已接收数据长度(包含本次数据长度),等于header._payload_len时则接受完毕
*/ */
void onWebSocketDecodePlayload(const WebSocketHeader &header, const uint8_t *ptr, uint64_t len, uint64_t recved) override{ void onWebSocketDecodePayload(const WebSocketHeader &header, const uint8_t *ptr, uint64_t len, uint64_t recved) override{
_payload.append((char *)ptr,len); _payload.append((char *)ptr,len);
} }
...@@ -285,9 +296,9 @@ private: ...@@ -285,9 +296,9 @@ private:
//触发连接成功事件 //触发连接成功事件
_delegate.onConnect(ex); _delegate.onConnect(ex);
//拦截websocket数据接收 //拦截websocket数据接收
_onRecv = [this](const Buffer::Ptr &pBuf){ _onRecv = [this](const char *data, int len){
//解析websocket数据包 //解析websocket数据包
this->WebSocketSplitter::decode((uint8_t*)pBuf->data(),pBuf->size()); this->WebSocketSplitter::decode((uint8_t *)data, len);
}; };
return; return;
} }
...@@ -306,7 +317,7 @@ private: ...@@ -306,7 +317,7 @@ private:
private: private:
string _Sec_WebSocket_Key; string _Sec_WebSocket_Key;
function<void(const Buffer::Ptr &pBuf)> _onRecv; function<void(const char *data, int len)> _onRecv;
ClientTypeImp<ClientType,DataType> &_delegate; ClientTypeImp<ClientType,DataType> &_delegate;
string _payload; string _payload;
}; };
...@@ -328,7 +339,9 @@ public: ...@@ -328,7 +339,9 @@ public:
WebSocketClient(ArgsType &&...args) : ClientTypeImp<ClientType,DataType>(std::forward<ArgsType>(args)...){ WebSocketClient(ArgsType &&...args) : ClientTypeImp<ClientType,DataType>(std::forward<ArgsType>(args)...){
_wsClient.reset(new HttpWsClient<ClientType,DataType>(*this)); _wsClient.reset(new HttpWsClient<ClientType,DataType>(*this));
} }
~WebSocketClient() override {} ~WebSocketClient() override {
_wsClient->closeWsClient();
}
/** /**
* 重载startConnect方法, * 重载startConnect方法,
......
...@@ -161,7 +161,7 @@ protected: ...@@ -161,7 +161,7 @@ protected:
* @param len * @param len
* @param recved * @param recved
*/ */
void onWebSocketDecodePlayload(const WebSocketHeader &packet,const uint8_t *ptr,uint64_t len,uint64_t recved) override { void onWebSocketDecodePayload(const WebSocketHeader &packet,const uint8_t *ptr,uint64_t len,uint64_t recved) override {
_remian_data.append((char *)ptr,len); _remian_data.append((char *)ptr,len);
} }
...@@ -205,7 +205,7 @@ protected: ...@@ -205,7 +205,7 @@ protected:
* @param buffer * @param buffer
*/ */
void onWebSocketEncodeData(const Buffer::Ptr &buffer) override{ void onWebSocketEncodeData(const Buffer::Ptr &buffer) override{
SocketHelper::send(buffer); HttpSessionType::send(buffer);
} }
private: private:
string _remian_data; string _remian_data;
......
...@@ -72,16 +72,16 @@ begin_decode: ...@@ -72,16 +72,16 @@ begin_decode:
CHECK_LEN(1); CHECK_LEN(1);
_mask_flag = (*ptr & 0x80) >> 7; _mask_flag = (*ptr & 0x80) >> 7;
_playload_len = (*ptr & 0x7F); _payload_len = (*ptr & 0x7F);
ptr += 1; ptr += 1;
if (_playload_len == 126) { if (_payload_len == 126) {
CHECK_LEN(2); CHECK_LEN(2);
_playload_len = (*ptr << 8) | *(ptr + 1); _payload_len = (*ptr << 8) | *(ptr + 1);
ptr += 2; ptr += 2;
} else if (_playload_len == 127) { } else if (_payload_len == 127) {
CHECK_LEN(8); CHECK_LEN(8);
_playload_len = ((uint64_t) ptr[0] << (8 * 7)) | _payload_len = ((uint64_t) ptr[0] << (8 * 7)) |
((uint64_t) ptr[1] << (8 * 6)) | ((uint64_t) ptr[1] << (8 * 6)) |
((uint64_t) ptr[2] << (8 * 5)) | ((uint64_t) ptr[2] << (8 * 5)) |
((uint64_t) ptr[3] << (8 * 4)) | ((uint64_t) ptr[3] << (8 * 4)) |
...@@ -98,9 +98,9 @@ begin_decode: ...@@ -98,9 +98,9 @@ begin_decode:
} }
_got_header = true; _got_header = true;
_mask_offset = 0; _mask_offset = 0;
_playload_offset = 0; _payload_offset = 0;
onWebSocketDecodeHeader(*this); onWebSocketDecodeHeader(*this);
if(_playload_len == 0){ if(_payload_len == 0){
onWebSocketDecodeComplete(*this); onWebSocketDecodeComplete(*this);
} }
} }
...@@ -109,19 +109,19 @@ begin_decode: ...@@ -109,19 +109,19 @@ begin_decode:
uint64_t remain = len - (ptr - data); uint64_t remain = len - (ptr - data);
if(remain > 0){ if(remain > 0){
uint64_t playload_slice_len = remain; uint64_t payload_slice_len = remain;
if(playload_slice_len + _playload_offset > _playload_len){ if(payload_slice_len + _payload_offset > _payload_len){
playload_slice_len = _playload_len - _playload_offset; payload_slice_len = _payload_len - _payload_offset;
} }
_playload_offset += playload_slice_len; _payload_offset += payload_slice_len;
onPlayloadData(ptr,playload_slice_len); onPayloadData(ptr, payload_slice_len);
if(_playload_offset == _playload_len){ if(_payload_offset == _payload_len){
onWebSocketDecodeComplete(*this); onWebSocketDecodeComplete(*this);
//这是下一个包 //这是下一个包
remain -= playload_slice_len; remain -= payload_slice_len;
ptr += playload_slice_len; ptr += payload_slice_len;
_got_header = false; _got_header = false;
if(remain > 0){ if(remain > 0){
...@@ -138,14 +138,14 @@ begin_decode: ...@@ -138,14 +138,14 @@ begin_decode:
_remain_data.clear(); _remain_data.clear();
} }
void WebSocketSplitter::onPlayloadData(uint8_t *ptr, uint64_t len) { void WebSocketSplitter::onPayloadData(uint8_t *data, uint64_t len) {
if(_mask_flag){ if(_mask_flag){
for(int i = 0; i < len ; ++i,++ptr){ for(int i = 0; i < len ; ++i,++data){
*(ptr) ^= _mask[(i + _mask_offset) % 4]; *(data) ^= _mask[(i + _mask_offset) % 4];
} }
_mask_offset = (_mask_offset + len) % 4; _mask_offset = (_mask_offset + len) % 4;
} }
onWebSocketDecodePlayload(*this, _mask_flag ? ptr - len : ptr, len, _playload_offset); onWebSocketDecodePayload(*this, _mask_flag ? data - len : data, len, _payload_offset);
} }
void WebSocketSplitter::encode(const WebSocketHeader &header,const Buffer::Ptr &buffer) { void WebSocketSplitter::encode(const WebSocketHeader &header,const Buffer::Ptr &buffer) {
......
...@@ -44,14 +44,19 @@ public: ...@@ -44,14 +44,19 @@ public:
CONTROL_RSVF = 0xF CONTROL_RSVF = 0xF
} Type; } Type;
public: public:
WebSocketHeader() : _mask(4){} WebSocketHeader() : _mask(4){
//获取_mask内部buffer的内存地址,该内存是malloc开辟的,地址为随机
uint64_t ptr = (uint64_t)(&_mask[0]);
//根据内存地址设置掩码随机数
_mask.assign((uint8_t*)(&ptr), (uint8_t*)(&ptr) + 4);
}
virtual ~WebSocketHeader(){} virtual ~WebSocketHeader(){}
public: public:
bool _fin; bool _fin;
uint8_t _reserved; uint8_t _reserved;
Type _opcode; Type _opcode;
bool _mask_flag; bool _mask_flag;
uint64_t _playload_len; uint64_t _payload_len;
vector<uint8_t > _mask; vector<uint8_t > _mask;
}; };
...@@ -62,7 +67,7 @@ public: ...@@ -62,7 +67,7 @@ public:
/** /**
* 输入数据以便解包webSocket数据以及处理粘包问题 * 输入数据以便解包webSocket数据以及处理粘包问题
* 可能触发onWebSocketDecodeHeader和onWebSocketDecodePlayload回调 * 可能触发onWebSocketDecodeHeader和onWebSocketDecodePayload回调
* @param data 需要解包的数据,可能是不完整的包或多个包 * @param data 需要解包的数据,可能是不完整的包或多个包
* @param len 数据长度 * @param len 数据长度
*/ */
...@@ -77,7 +82,7 @@ public: ...@@ -77,7 +82,7 @@ public:
void encode(const WebSocketHeader &header,const Buffer::Ptr &buffer); void encode(const WebSocketHeader &header,const Buffer::Ptr &buffer);
protected: protected:
/** /**
* 收到一个webSocket数据包包头,后续将继续触发onWebSocketDecodePlayload回调 * 收到一个webSocket数据包包头,后续将继续触发onWebSocketDecodePayload回调
* @param header 数据包头 * @param header 数据包头
*/ */
virtual void onWebSocketDecodeHeader(const WebSocketHeader &header) {}; virtual void onWebSocketDecodeHeader(const WebSocketHeader &header) {};
...@@ -87,9 +92,9 @@ protected: ...@@ -87,9 +92,9 @@ protected:
* @param header 数据包包头 * @param header 数据包包头
* @param ptr 负载数据指针 * @param ptr 负载数据指针
* @param len 负载数据长度 * @param len 负载数据长度
* @param recved 已接收数据长度(包含本次数据长度),等于header._playload_len时则接受完毕 * @param recved 已接收数据长度(包含本次数据长度),等于header._payload_len时则接受完毕
*/ */
virtual void onWebSocketDecodePlayload(const WebSocketHeader &header, const uint8_t *ptr, uint64_t len, uint64_t recved) {}; virtual void onWebSocketDecodePayload(const WebSocketHeader &header, const uint8_t *ptr, uint64_t len, uint64_t recved) {};
/** /**
...@@ -105,12 +110,12 @@ protected: ...@@ -105,12 +110,12 @@ protected:
*/ */
virtual void onWebSocketEncodeData(const Buffer::Ptr &buffer){}; virtual void onWebSocketEncodeData(const Buffer::Ptr &buffer){};
private: private:
void onPlayloadData(uint8_t *data,uint64_t len); void onPayloadData(uint8_t *data, uint64_t len);
private: private:
string _remain_data; string _remain_data;
int _mask_offset = 0; int _mask_offset = 0;
bool _got_header = false; bool _got_header = false;
uint64_t _playload_offset = 0; uint64_t _payload_offset = 0;
}; };
} /* namespace mediakit */ } /* namespace mediakit */
......
...@@ -12,18 +12,31 @@ ...@@ -12,18 +12,31 @@
#include "PlayerBase.h" #include "PlayerBase.h"
#include "Rtsp/RtspPlayerImp.h" #include "Rtsp/RtspPlayerImp.h"
#include "Rtmp/RtmpPlayerImp.h" #include "Rtmp/RtmpPlayerImp.h"
#include "Http/HlsPlayer.h"
using namespace toolkit; using namespace toolkit;
namespace mediakit { namespace mediakit {
PlayerBase::Ptr PlayerBase::createPlayer(const EventPoller::Ptr &poller,const string &strUrl) { //字符串是否以xx结尾
static bool end_of(const string &str, const string &substr){
auto pos = str.rfind(substr);
return pos != string::npos && pos == str.size() - substr.size();
}
PlayerBase::Ptr PlayerBase::createPlayer(const EventPoller::Ptr &poller,const string &url_in) {
static auto releasePlayer = [](PlayerBase *ptr){ static auto releasePlayer = [](PlayerBase *ptr){
onceToken token(nullptr,[&](){ onceToken token(nullptr,[&](){
delete ptr; delete ptr;
}); });
ptr->teardown(); ptr->teardown();
}; };
string prefix = FindField(strUrl.data(), NULL, "://"); string url = url_in;
string prefix = FindField(url.data(), NULL, "://");
auto pos = url.find('?');
if (pos != string::npos) {
//去除?后面的字符串
url = url.substr(0, pos);
}
if (strcasecmp("rtsps",prefix.data()) == 0) { if (strcasecmp("rtsps",prefix.data()) == 0) {
return PlayerBase::Ptr(new TcpClientWithSSL<RtspPlayerImp>(poller),releasePlayer); return PlayerBase::Ptr(new TcpClientWithSSL<RtspPlayerImp>(poller),releasePlayer);
...@@ -41,6 +54,10 @@ PlayerBase::Ptr PlayerBase::createPlayer(const EventPoller::Ptr &poller,const st ...@@ -41,6 +54,10 @@ PlayerBase::Ptr PlayerBase::createPlayer(const EventPoller::Ptr &poller,const st
return PlayerBase::Ptr(new RtmpPlayerImp(poller),releasePlayer); return PlayerBase::Ptr(new RtmpPlayerImp(poller),releasePlayer);
} }
if ((strcasecmp("http",prefix.data()) == 0 || strcasecmp("https",prefix.data()) == 0) && end_of(url, ".m3u8")) {
return PlayerBase::Ptr(new HlsPlayerImp(poller),releasePlayer);
}
return PlayerBase::Ptr(new RtspPlayerImp(poller),releasePlayer); return PlayerBase::Ptr(new RtspPlayerImp(poller),releasePlayer);
} }
......
...@@ -13,8 +13,6 @@ namespace mediakit { ...@@ -13,8 +13,6 @@ namespace mediakit {
HlsMaker::HlsMaker(float seg_duration, uint32_t seg_number) { HlsMaker::HlsMaker(float seg_duration, uint32_t seg_number) {
//最小允许设置为0,0个切片代表点播 //最小允许设置为0,0个切片代表点播
seg_number = MAX(0,seg_number);
seg_duration = MAX(1,seg_duration);
_seg_number = seg_number; _seg_number = seg_number;
_seg_duration = seg_duration; _seg_duration = seg_duration;
} }
...@@ -34,6 +32,8 @@ void HlsMaker::makeIndexFile(bool eof) { ...@@ -34,6 +32,8 @@ void HlsMaker::makeIndexFile(bool eof) {
} }
} }
auto sequence = _seg_number ? (_file_index > _seg_number ? _file_index - _seg_number : 0LL) : 0LL;
string m3u8; string m3u8;
snprintf(file_content,sizeof(file_content), snprintf(file_content,sizeof(file_content),
"#EXTM3U\n" "#EXTM3U\n"
...@@ -42,7 +42,7 @@ void HlsMaker::makeIndexFile(bool eof) { ...@@ -42,7 +42,7 @@ void HlsMaker::makeIndexFile(bool eof) {
"#EXT-X-TARGETDURATION:%u\n" "#EXT-X-TARGETDURATION:%u\n"
"#EXT-X-MEDIA-SEQUENCE:%llu\n", "#EXT-X-MEDIA-SEQUENCE:%llu\n",
(maxSegmentDuration + 999) / 1000, (maxSegmentDuration + 999) / 1000,
_seg_number ? _file_index : 0); sequence);
m3u8.assign(file_content); m3u8.assign(file_content);
......
/* /*
* Copyright (c) 2016 The ZLMediaKit project authors. All Rights Reserved. * Copyright (c) 2016 The ZLMediaKit project authors. All Rights Reserved.
* *
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit). * This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
......
/* /*
* Copyright (c) 2016 The ZLMediaKit project authors. All Rights Reserved. * Copyright (c) 2016 The ZLMediaKit project authors. All Rights Reserved.
* *
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit). * This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
......
/* /*
* Copyright (c) 2016 The ZLMediaKit project authors. All Rights Reserved. * Copyright (c) 2016 The ZLMediaKit project authors. All Rights Reserved.
* *
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit). * This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
......
/* /*
* Copyright (c) 2016 The ZLMediaKit project authors. All Rights Reserved. * Copyright (c) 2016 The ZLMediaKit project authors. All Rights Reserved.
* *
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit). * This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
......
...@@ -122,27 +122,68 @@ void MP4Muxer::inputFrame(const Frame::Ptr &frame) { ...@@ -122,27 +122,68 @@ void MP4Muxer::inputFrame(const Frame::Ptr &frame) {
} }
} }
static uint8_t getObject(CodecId codecId){
switch (codecId){
case CodecG711A : return MOV_OBJECT_G711a;
case CodecG711U : return MOV_OBJECT_G711u;
case CodecOpus : return MOV_OBJECT_OPUS;
case CodecAAC : return MOV_OBJECT_AAC;
case CodecH264 : return MOV_OBJECT_H264;
case CodecH265 : return MOV_OBJECT_HEVC;
default : return 0;
}
}
void MP4Muxer::stampSync(){
if(_codec_to_trackid.size() < 2){
return;
}
Stamp *audio = nullptr, *video = nullptr;
for(auto &pr : _codec_to_trackid){
switch (getTrackType((CodecId) pr.first)){
case TrackAudio : audio = &pr.second.stamp; break;
case TrackVideo : video = &pr.second.stamp; break;
default : break;
}
}
if(audio && video){
//音频时间戳同步于视频,因为音频时间戳被修改后不影响播放
audio->syncTo(*video);
}
}
void MP4Muxer::addTrack(const Track::Ptr &track) { void MP4Muxer::addTrack(const Track::Ptr &track) {
auto mp4_object = getObject(track->getCodecId());
if (!mp4_object) {
WarnL << "MP4录制不支持该编码格式:" << track->getCodecName();
return;
}
if (!track->ready()) {
WarnL << "Track[" << track->getCodecName() << "]未就绪";
return;
}
switch (track->getCodecId()) { switch (track->getCodecId()) {
case CodecG711A: case CodecG711A:
case CodecG711U: { case CodecG711U:
auto audio_track = dynamic_pointer_cast<G711Track>(track); case CodecOpus: {
auto audio_track = dynamic_pointer_cast<AudioTrack>(track);
if (!audio_track) { if (!audio_track) {
WarnL << "不是G711 Track"; WarnL << "不是音频Track:" << track->getCodecName();
return;
}
if (!audio_track->ready()) {
WarnL << "G711 Track未就绪";
return; return;
} }
auto track_id = mov_writer_add_audio(_mov_writter.get(), auto track_id = mov_writer_add_audio(_mov_writter.get(),
track->getCodecId() == CodecG711A ? MOV_OBJECT_G711a : MOV_OBJECT_G711u, mp4_object,
audio_track->getAudioChannel(), audio_track->getAudioChannel(),
audio_track->getAudioSampleBit() * audio_track->getAudioChannel(), audio_track->getAudioSampleBit() * audio_track->getAudioChannel(),
audio_track->getAudioSampleRate(), audio_track->getAudioSampleRate(),
nullptr, 0); nullptr, 0);
if (track_id < 0) { if (track_id < 0) {
WarnL << "添加G711 Track失败:" << track_id; WarnL << "添加Track[" << track->getCodecName() << "]失败:" << track_id;
return; return;
} }
_codec_to_trackid[track->getCodecId()].track_id = track_id; _codec_to_trackid[track->getCodecId()].track_id = track_id;
...@@ -155,16 +196,14 @@ void MP4Muxer::addTrack(const Track::Ptr &track) { ...@@ -155,16 +196,14 @@ void MP4Muxer::addTrack(const Track::Ptr &track) {
WarnL << "不是AAC Track"; WarnL << "不是AAC Track";
return; return;
} }
if(!audio_track->ready()){
WarnL << "AAC Track未就绪";
return;
}
auto track_id = mov_writer_add_audio(_mov_writter.get(), auto track_id = mov_writer_add_audio(_mov_writter.get(),
MOV_OBJECT_AAC, mp4_object,
audio_track->getAudioChannel(), audio_track->getAudioChannel(),
audio_track->getAudioSampleBit() * audio_track->getAudioChannel(), audio_track->getAudioSampleBit() * audio_track->getAudioChannel(),
audio_track->getAudioSampleRate(), audio_track->getAudioSampleRate(),
audio_track->getAacCfg().data(), 2); audio_track->getAacCfg().data(),
audio_track->getAacCfg().size());
if(track_id < 0){ if(track_id < 0){
WarnL << "添加AAC Track失败:" << track_id; WarnL << "添加AAC Track失败:" << track_id;
return; return;
...@@ -178,10 +217,6 @@ void MP4Muxer::addTrack(const Track::Ptr &track) { ...@@ -178,10 +217,6 @@ void MP4Muxer::addTrack(const Track::Ptr &track) {
WarnL << "不是H264 Track"; WarnL << "不是H264 Track";
return; return;
} }
if(!h264_track->ready()){
WarnL << "H264 Track未就绪";
return;
}
struct mpeg4_avc_t avc = {0}; struct mpeg4_avc_t avc = {0};
string sps_pps = string("\x00\x00\x00\x01", 4) + h264_track->getSps() + string sps_pps = string("\x00\x00\x00\x01", 4) + h264_track->getSps() +
...@@ -196,7 +231,7 @@ void MP4Muxer::addTrack(const Track::Ptr &track) { ...@@ -196,7 +231,7 @@ void MP4Muxer::addTrack(const Track::Ptr &track) {
} }
auto track_id = mov_writer_add_video(_mov_writter.get(), auto track_id = mov_writer_add_video(_mov_writter.get(),
MOV_OBJECT_H264, mp4_object,
h264_track->getVideoWidth(), h264_track->getVideoWidth(),
h264_track->getVideoHeight(), h264_track->getVideoHeight(),
extra_data, extra_data,
...@@ -216,10 +251,6 @@ void MP4Muxer::addTrack(const Track::Ptr &track) { ...@@ -216,10 +251,6 @@ void MP4Muxer::addTrack(const Track::Ptr &track) {
WarnL << "不是H265 Track"; WarnL << "不是H265 Track";
return; return;
} }
if(!h265_track->ready()){
WarnL << "H265 Track未就绪";
return;
}
struct mpeg4_hevc_t hevc = {0}; struct mpeg4_hevc_t hevc = {0};
string vps_sps_pps = string("\x00\x00\x00\x01", 4) + h265_track->getVps() + string vps_sps_pps = string("\x00\x00\x00\x01", 4) + h265_track->getVps() +
...@@ -235,7 +266,7 @@ void MP4Muxer::addTrack(const Track::Ptr &track) { ...@@ -235,7 +266,7 @@ void MP4Muxer::addTrack(const Track::Ptr &track) {
} }
auto track_id = mov_writer_add_video(_mov_writter.get(), auto track_id = mov_writer_add_video(_mov_writter.get(),
MOV_OBJECT_HEVC, mp4_object,
h265_track->getVideoWidth(), h265_track->getVideoWidth(),
h265_track->getVideoHeight(), h265_track->getVideoHeight(),
extra_data, extra_data,
...@@ -248,10 +279,12 @@ void MP4Muxer::addTrack(const Track::Ptr &track) { ...@@ -248,10 +279,12 @@ void MP4Muxer::addTrack(const Track::Ptr &track) {
_have_video = true; _have_video = true;
} }
break; break;
default:
WarnL << "MP4录制不支持该编码格式:" << track->getCodecName(); default: WarnL << "MP4录制不支持该编码格式:" << track->getCodecName(); break;
break;
} }
//尝试音视频同步
stampSync();
} }
}//namespace mediakit }//namespace mediakit
......
...@@ -45,13 +45,14 @@ public: ...@@ -45,13 +45,14 @@ public:
private: private:
void openMP4(); void openMP4();
void closeMP4(); void closeMP4();
void stampSync();
private: private:
struct track_info{ struct track_info {
int track_id = -1; int track_id = -1;
Stamp stamp; Stamp stamp;
}; };
unordered_map<int,track_info> _codec_to_trackid; unordered_map<int, track_info> _codec_to_trackid;
List<Frame::Ptr> _frameCached; List<Frame::Ptr> _frameCached;
bool _started = false; bool _started = false;
bool _have_video = false; bool _have_video = false;
......
...@@ -80,11 +80,11 @@ std::shared_ptr<MediaSinkInterface> Recorder::createRecorder(type type, const st ...@@ -80,11 +80,11 @@ std::shared_ptr<MediaSinkInterface> Recorder::createRecorder(type type, const st
} }
static MediaSource::Ptr getMediaSource(const string &vhost, const string &app, const string &stream_id){ static MediaSource::Ptr getMediaSource(const string &vhost, const string &app, const string &stream_id){
auto src = MediaSource::find(RTMP_SCHEMA, vhost, app, stream_id, false); auto src = MediaSource::find(RTMP_SCHEMA, vhost, app, stream_id);
if(src){ if(src){
return src; return src;
} }
return MediaSource::find(RTSP_SCHEMA, vhost, app, stream_id, false); return MediaSource::find(RTSP_SCHEMA, vhost, app, stream_id);
} }
bool Recorder::isRecording(type type, const string &vhost, const string &app, const string &stream_id){ bool Recorder::isRecording(type type, const string &vhost, const string &app, const string &stream_id){
......
...@@ -23,6 +23,26 @@ TsMuxer::~TsMuxer() { ...@@ -23,6 +23,26 @@ TsMuxer::~TsMuxer() {
uninit(); uninit();
} }
void TsMuxer::stampSync(){
if(_codec_to_trackid.size() < 2){
return;
}
Stamp *audio = nullptr, *video = nullptr;
for(auto &pr : _codec_to_trackid){
switch (getTrackType((CodecId) pr.first)){
case TrackAudio : audio = &pr.second.stamp; break;
case TrackVideo : video = &pr.second.stamp; break;
default : break;
}
}
if(audio && video){
//音频时间戳同步于视频,因为音频时间戳被修改后不影响播放
audio->syncTo(*video);
}
}
void TsMuxer::addTrack(const Track::Ptr &track) { void TsMuxer::addTrack(const Track::Ptr &track) {
switch (track->getCodecId()) { switch (track->getCodecId()) {
case CodecH264: { case CodecH264: {
...@@ -52,9 +72,11 @@ void TsMuxer::addTrack(const Track::Ptr &track) { ...@@ -52,9 +72,11 @@ void TsMuxer::addTrack(const Track::Ptr &track) {
break; break;
} }
default: default: WarnL << "mpeg-ts 不支持该编码格式,已忽略:" << track->getCodecName(); break;
break;
} }
//尝试音视频同步
stampSync();
} }
void TsMuxer::inputFrame(const Frame::Ptr &frame) { void TsMuxer::inputFrame(const Frame::Ptr &frame) {
......
...@@ -17,33 +17,55 @@ ...@@ -17,33 +17,55 @@
#include "Util/File.h" #include "Util/File.h"
#include "Common/MediaSink.h" #include "Common/MediaSink.h"
#include "Common/Stamp.h" #include "Common/Stamp.h"
using namespace toolkit; using namespace toolkit;
namespace mediakit { namespace mediakit {
//该类用于产生MPEG-TS
class TsMuxer : public MediaSinkInterface { class TsMuxer : public MediaSinkInterface {
public: public:
TsMuxer(); TsMuxer();
virtual ~TsMuxer(); virtual ~TsMuxer();
/**
* 添加音视频轨道
*/
void addTrack(const Track::Ptr &track) override; void addTrack(const Track::Ptr &track) override;
/**
* 重置音视频轨道
*/
void resetTracks() override; void resetTracks() override;
/**
* 输入帧数据
*/
void inputFrame(const Frame::Ptr &frame) override; void inputFrame(const Frame::Ptr &frame) override;
protected: protected:
/**
* 输出mpegts数据回调
* @param packet mpegts数据
* @param bytes mpegts数据长度
* @param timestamp 时间戳,单位毫秒
* @param is_idr_fast_packet 是否为关键帧的第一个TS包,用于确保ts切片第一帧为关键帧
*/
virtual void onTs(const void *packet, int bytes,uint32_t timestamp,bool is_idr_fast_packet) = 0; virtual void onTs(const void *packet, int bytes,uint32_t timestamp,bool is_idr_fast_packet) = 0;
private: private:
void init(); void init();
void uninit(); void uninit();
//音视频时间戳同步用
void stampSync();
private: private:
void *_context = nullptr; void *_context = nullptr;
char *_tsbuf[188]; char _tsbuf[188];
uint32_t _timestamp = 0; uint32_t _timestamp = 0;
struct track_info {
struct track_info{
int track_id = -1; int track_id = -1;
Stamp stamp; Stamp stamp;
}; };
unordered_map<int,track_info> _codec_to_trackid; unordered_map<int, track_info> _codec_to_trackid;
List<Frame::Ptr> _frameCached; List<Frame::Ptr> _frameCached;
bool _is_idr_fast_packet = false; bool _is_idr_fast_packet = false;
bool _have_video = false; bool _have_video = false;
......
...@@ -50,6 +50,9 @@ void FlvMuxer::start(const EventPoller::Ptr &poller,const RtmpMediaSource::Ptr & ...@@ -50,6 +50,9 @@ void FlvMuxer::start(const EventPoller::Ptr &poller,const RtmpMediaSource::Ptr &
} }
strongSelf->onDetach(); strongSelf->onDetach();
}); });
//音频同步于视频
_stamp[0].syncTo(_stamp[1]);
_ring_reader->setReadCB([weakSelf](const RtmpMediaSource::RingDataType &pkt){ _ring_reader->setReadCB([weakSelf](const RtmpMediaSource::RingDataType &pkt){
auto strongSelf = weakSelf.lock(); auto strongSelf = weakSelf.lock();
if(!strongSelf){ if(!strongSelf){
...@@ -164,7 +167,7 @@ void FlvMuxer::stop() { ...@@ -164,7 +167,7 @@ void FlvMuxer::stop() {
///////////////////////////////////////////////////////FlvRecorder///////////////////////////////////////////////////// ///////////////////////////////////////////////////////FlvRecorder/////////////////////////////////////////////////////
void FlvRecorder::startRecord(const EventPoller::Ptr &poller,const string &vhost, const string &app, const string &stream,const string &file_path) { void FlvRecorder::startRecord(const EventPoller::Ptr &poller,const string &vhost, const string &app, const string &stream,const string &file_path) {
startRecord(poller,dynamic_pointer_cast<RtmpMediaSource>(MediaSource::find(RTMP_SCHEMA,vhost,app,stream,false)),file_path); startRecord(poller,dynamic_pointer_cast<RtmpMediaSource>(MediaSource::find(RTMP_SCHEMA,vhost,app,stream)),file_path);
} }
void FlvRecorder::startRecord(const EventPoller::Ptr &poller,const RtmpMediaSource::Ptr &media, const string &file_path) { void FlvRecorder::startRecord(const EventPoller::Ptr &poller,const RtmpMediaSource::Ptr &media, const string &file_path) {
......
...@@ -100,4 +100,23 @@ uint8_t getAudioRtmpFlags(const Track::Ptr &track){ ...@@ -100,4 +100,23 @@ uint8_t getAudioRtmpFlags(const Track::Ptr &track){
} }
void Metadata::addTrack(AMFValue &metadata, const Track::Ptr &track) {
Metadata::Ptr new_metadata;
switch (track->getTrackType()) {
case TrackVideo: {
new_metadata = std::make_shared<VideoMeta>(dynamic_pointer_cast<VideoTrack>(track));
}
break;
case TrackAudio: {
new_metadata = std::make_shared<AudioMeta>(dynamic_pointer_cast<AudioTrack>(track));
}
break;
default:
return;
}
new_metadata->getMetadata().object_for_each([&](const std::string &key, const AMFValue &value) {
metadata.set(key, value);
});
}
}//namespace mediakit }//namespace mediakit
\ No newline at end of file
...@@ -220,6 +220,8 @@ public: ...@@ -220,6 +220,8 @@ public:
const AMFValue &getMetadata() const{ const AMFValue &getMetadata() const{
return _metadata; return _metadata;
} }
static void addTrack(AMFValue &metadata, const Track::Ptr &track);
protected: protected:
AMFValue _metadata; AMFValue _metadata;
}; };
...@@ -242,18 +244,6 @@ public: ...@@ -242,18 +244,6 @@ public:
} }
} }
/**
* 返回音频或视频类型
* @return
*/
TrackType getTrackType() const override {
return TrackTitle;
}
/**
* 返回编码器id
* @return
*/
CodecId getCodecId() const override{ CodecId getCodecId() const override{
return CodecInvalid; return CodecInvalid;
} }
...@@ -266,18 +256,6 @@ public: ...@@ -266,18 +256,6 @@ public:
VideoMeta(const VideoTrack::Ptr &video,int datarate = 5000); VideoMeta(const VideoTrack::Ptr &video,int datarate = 5000);
virtual ~VideoMeta(){} virtual ~VideoMeta(){}
/**
* 返回音频或视频类型
* @return
*/
TrackType getTrackType() const override {
return TrackVideo;
}
/**
* 返回编码器id
* @return
*/
CodecId getCodecId() const override{ CodecId getCodecId() const override{
return _codecId; return _codecId;
} }
...@@ -285,7 +263,6 @@ private: ...@@ -285,7 +263,6 @@ private:
CodecId _codecId; CodecId _codecId;
}; };
class AudioMeta : public Metadata{ class AudioMeta : public Metadata{
public: public:
typedef std::shared_ptr<AudioMeta> Ptr; typedef std::shared_ptr<AudioMeta> Ptr;
...@@ -294,18 +271,6 @@ public: ...@@ -294,18 +271,6 @@ public:
virtual ~AudioMeta(){} virtual ~AudioMeta(){}
/**
* 返回音频或视频类型
* @return
*/
TrackType getTrackType() const override {
return TrackAudio;
}
/**
* 返回编码器id
* @return
*/
CodecId getCodecId() const override{ CodecId getCodecId() const override{
return _codecId; return _codecId;
} }
...@@ -317,7 +282,4 @@ private: ...@@ -317,7 +282,4 @@ private:
uint8_t getAudioRtmpFlags(const Track::Ptr &track); uint8_t getAudioRtmpFlags(const Track::Ptr &track);
}//namespace mediakit }//namespace mediakit
#endif//__rtmp_h
#endif
...@@ -13,60 +13,56 @@ ...@@ -13,60 +13,56 @@
namespace mediakit { namespace mediakit {
void RtmpDemuxer::loadMetaData(const AMFValue &val){ bool RtmpDemuxer::loadMetaData(const AMFValue &val){
bool ret = false;
try { try {
int audiosamplerate = 0; int audiosamplerate = 0;
int audiochannels = 0; int audiochannels = 0;
int audiosamplesize = 0; int audiosamplesize = 0;
const AMFValue *audiocodecid = nullptr; const AMFValue *audiocodecid = nullptr;
const AMFValue *videocodecid = nullptr; const AMFValue *videocodecid = nullptr;
val.object_for_each([&](const string &key, const AMFValue &val) { val.object_for_each([&](const string &key, const AMFValue &val) {
if (key == "duration") { if (key == "duration") {
_fDuration = val.as_number(); _fDuration = val.as_number();
return; return;
} }
if (key == "audiosamplerate") {
if(key == "audiosamplerate"){
audiosamplerate = val.as_integer(); audiosamplerate = val.as_integer();
return; return;
} }
if (key == "audiosamplesize") {
if(key == "audiosamplesize"){
audiosamplesize = val.as_integer(); audiosamplesize = val.as_integer();
return; return;
} }
if (key == "stereo") {
if(key == "stereo"){
audiochannels = val.as_boolean() ? 2 : 1; audiochannels = val.as_boolean() ? 2 : 1;
return; return;
} }
if (key == "videocodecid") {
if(key == "videocodecid"){
//找到视频 //找到视频
videocodecid = &val; videocodecid = &val;
return; return;
} }
if (key == "audiocodecid") {
if(key == "audiocodecid"){
//找到音频 //找到音频
audiocodecid = &val; audiocodecid = &val;
return; return;
} }
}); });
if (videocodecid) {
if(videocodecid){
//有视频 //有视频
ret = true;
makeVideoTrack(*videocodecid); makeVideoTrack(*videocodecid);
} }
if (audiocodecid) {
if(audiocodecid){
//有音频 //有音频
ret = true;
makeAudioTrack(*audiocodecid, audiosamplerate, audiochannels, audiosamplesize); makeAudioTrack(*audiocodecid, audiosamplerate, audiochannels, audiosamplesize);
} }
}catch (std::exception &ex){ } catch (std::exception &ex) {
WarnL << ex.what(); WarnL << ex.what();
} }
return ret;
} }
bool RtmpDemuxer::inputRtmp(const RtmpPacket::Ptr &pkt) { bool RtmpDemuxer::inputRtmp(const RtmpPacket::Ptr &pkt) {
...@@ -105,12 +101,11 @@ void RtmpDemuxer::makeVideoTrack(const AMFValue &videoCodec) { ...@@ -105,12 +101,11 @@ void RtmpDemuxer::makeVideoTrack(const AMFValue &videoCodec) {
_videoTrack = dynamic_pointer_cast<VideoTrack>(Factory::getVideoTrackByAmf(videoCodec)); _videoTrack = dynamic_pointer_cast<VideoTrack>(Factory::getVideoTrackByAmf(videoCodec));
if (_videoTrack) { if (_videoTrack) {
//生成rtmpCodec对象以便解码rtmp //生成rtmpCodec对象以便解码rtmp
_videoRtmpDecoder = Factory::getRtmpCodecByTrack(_videoTrack); _videoRtmpDecoder = Factory::getRtmpCodecByTrack(_videoTrack, false);
if (_videoRtmpDecoder) { if (_videoRtmpDecoder) {
//设置rtmp解码器代理,生成的frame写入该Track //设置rtmp解码器代理,生成的frame写入该Track
_videoRtmpDecoder->addDelegate(_videoTrack); _videoRtmpDecoder->addDelegate(_videoTrack);
onAddTrack(_videoTrack); onAddTrack(_videoTrack);
_tryedGetVideoTrack = true;
} else { } else {
//找不到相应的rtmp解码器,该track无效 //找不到相应的rtmp解码器,该track无效
_videoTrack.reset(); _videoTrack.reset();
...@@ -123,12 +118,11 @@ void RtmpDemuxer::makeAudioTrack(const AMFValue &audioCodec,int sample_rate, int ...@@ -123,12 +118,11 @@ void RtmpDemuxer::makeAudioTrack(const AMFValue &audioCodec,int sample_rate, int
_audioTrack = dynamic_pointer_cast<AudioTrack>(Factory::getAudioTrackByAmf(audioCodec, sample_rate, channels, sample_bit)); _audioTrack = dynamic_pointer_cast<AudioTrack>(Factory::getAudioTrackByAmf(audioCodec, sample_rate, channels, sample_bit));
if (_audioTrack) { if (_audioTrack) {
//生成rtmpCodec对象以便解码rtmp //生成rtmpCodec对象以便解码rtmp
_audioRtmpDecoder = Factory::getRtmpCodecByTrack(_audioTrack); _audioRtmpDecoder = Factory::getRtmpCodecByTrack(_audioTrack, false);
if (_audioRtmpDecoder) { if (_audioRtmpDecoder) {
//设置rtmp解码器代理,生成的frame写入该Track //设置rtmp解码器代理,生成的frame写入该Track
_audioRtmpDecoder->addDelegate(_audioTrack); _audioRtmpDecoder->addDelegate(_audioTrack);
onAddTrack(_audioTrack); onAddTrack(_audioTrack);
_tryedGetAudioTrack = true;
} else { } else {
//找不到相应的rtmp解码器,该track无效 //找不到相应的rtmp解码器,该track无效
_audioTrack.reset(); _audioTrack.reset();
......
...@@ -30,7 +30,7 @@ public: ...@@ -30,7 +30,7 @@ public:
RtmpDemuxer() = default; RtmpDemuxer() = default;
virtual ~RtmpDemuxer() = default; virtual ~RtmpDemuxer() = default;
void loadMetaData(const AMFValue &metadata); bool loadMetaData(const AMFValue &metadata);
/** /**
* 开始解复用 * 开始解复用
......
...@@ -33,9 +33,6 @@ using namespace toolkit; ...@@ -33,9 +33,6 @@ using namespace toolkit;
#define RTMP_GOP_SIZE 512 #define RTMP_GOP_SIZE 512
namespace mediakit { namespace mediakit {
typedef VideoPacketCache<RtmpPacket> RtmpVideoCache;
typedef AudioPacketCache<RtmpPacket> RtmpAudioCache;
/** /**
* rtmp媒体源的数据抽象 * rtmp媒体源的数据抽象
* rtmp有关键的三要素,分别是metadata、config帧,普通帧 * rtmp有关键的三要素,分别是metadata、config帧,普通帧
...@@ -43,7 +40,7 @@ typedef AudioPacketCache<RtmpPacket> RtmpAudioCache; ...@@ -43,7 +40,7 @@ typedef AudioPacketCache<RtmpPacket> RtmpAudioCache;
* 只要生成了这三要素,那么要实现rtmp推流、rtmp服务器就很简单了 * 只要生成了这三要素,那么要实现rtmp推流、rtmp服务器就很简单了
* rtmp推拉流协议中,先传递metadata,然后传递config帧,然后一直传递普通帧 * rtmp推拉流协议中,先传递metadata,然后传递config帧,然后一直传递普通帧
*/ */
class RtmpMediaSource : public MediaSource, public RingDelegate<RtmpPacket::Ptr>, public RtmpVideoCache, public RtmpAudioCache{ class RtmpMediaSource : public MediaSource, public RingDelegate<RtmpPacket::Ptr>, public PacketCache<RtmpPacket>{
public: public:
typedef std::shared_ptr<RtmpMediaSource> Ptr; typedef std::shared_ptr<RtmpMediaSource> Ptr;
typedef std::shared_ptr<List<RtmpPacket::Ptr> > RingDataType; typedef std::shared_ptr<List<RtmpPacket::Ptr> > RingDataType;
...@@ -111,6 +108,14 @@ public: ...@@ -111,6 +108,14 @@ public:
} }
/** /**
* 更新metadata
*/
void updateMetaData(const AMFValue &metadata) {
lock_guard<recursive_mutex> lock(_mtx);
_metadata = metadata;
}
/**
* 输入rtmp包 * 输入rtmp包
* @param pkt rtmp包 * @param pkt rtmp包
* @param key 是否为关键帧 * @param key 是否为关键帧
...@@ -149,12 +154,7 @@ public: ...@@ -149,12 +154,7 @@ public:
regist(); regist();
} }
} }
PacketCache<RtmpPacket>::inputPacket(pkt->typeId == MSG_VIDEO, pkt, key);
if(pkt->typeId == MSG_VIDEO){
RtmpVideoCache::inputVideo(pkt, key);
}else{
RtmpAudioCache::inputAudio(pkt);
}
} }
/** /**
...@@ -175,21 +175,13 @@ public: ...@@ -175,21 +175,13 @@ public:
private: private:
/** /**
* 批量flush时间戳相同的视频rtmp包时触发该函数 * 批量flush rtmp包时触发该函数
* @param rtmp_list 时间戳相同的rtmp包列表
* @param key_pos 是否包含关键帧
*/
void onFlushVideo(std::shared_ptr<List<RtmpPacket::Ptr> > &rtmp_list, bool key_pos) override {
_ring->write(rtmp_list, key_pos);
}
/**
* 批量flush一定数量的音频rtmp包时触发该函数
* @param rtmp_list rtmp包列表 * @param rtmp_list rtmp包列表
* @param key_pos 是否包含关键帧
*/ */
void onFlushAudio(std::shared_ptr<List<RtmpPacket::Ptr> > &rtmp_list) override{ void onFlush(std::shared_ptr<List<RtmpPacket::Ptr> > &rtmp_list, bool key_pos) override {
//只有音频的话,就不存在gop缓存的意义 //如果不存在视频,那么就没有存在GOP缓存的意义,所以is_key一直为true确保一直清空GOP缓存
_ring->write(rtmp_list, !_have_video); _ring->write(rtmp_list, _have_video ? key_pos : true);
} }
/** /**
......
...@@ -49,7 +49,11 @@ public: ...@@ -49,7 +49,11 @@ public:
* 设置metadata * 设置metadata
*/ */
void setMetaData(const AMFValue &metadata) override{ void setMetaData(const AMFValue &metadata) override{
_demuxer->loadMetaData(metadata); if(!_demuxer->loadMetaData(metadata)){
//该metadata无效,需要重新生成
_metadata = metadata;
_recreate_metadata = true;
}
RtmpMediaSource::setMetaData(metadata); RtmpMediaSource::setMetaData(metadata);
} }
...@@ -146,11 +150,22 @@ public: ...@@ -146,11 +150,22 @@ public:
void onAllTrackReady() override{ void onAllTrackReady() override{
setTrackSource(_muxer); setTrackSource(_muxer);
_all_track_ready = true; _all_track_ready = true;
if (_recreate_metadata) {
//更新metadata
for (auto &track : _muxer->getTracks()) {
Metadata::addTrack(_metadata, track);
}
RtmpMediaSource::updateMetaData(_metadata);
} }
}
private: private:
RtmpDemuxer::Ptr _demuxer; RtmpDemuxer::Ptr _demuxer;
MultiMediaSourceMuxer::Ptr _muxer; MultiMediaSourceMuxer::Ptr _muxer;
AMFValue _metadata;
bool _all_track_ready = false; bool _all_track_ready = false;
bool _recreate_metadata = false;
}; };
} /* namespace mediakit */ } /* namespace mediakit */
......
...@@ -23,47 +23,9 @@ RtmpMuxer::RtmpMuxer(const TitleMeta::Ptr &title) { ...@@ -23,47 +23,9 @@ RtmpMuxer::RtmpMuxer(const TitleMeta::Ptr &title) {
} }
void RtmpMuxer::addTrack(const Track::Ptr &track) { void RtmpMuxer::addTrack(const Track::Ptr &track) {
//根据track生产metadata
Metadata::Ptr metadata;
switch (track->getTrackType()){
case TrackVideo:{
metadata = std::make_shared<VideoMeta>(dynamic_pointer_cast<VideoTrack>(track));
}
break;
case TrackAudio:{
metadata = std::make_shared<AudioMeta>(dynamic_pointer_cast<AudioTrack>(track));
}
break;
default:
return;
}
switch (track->getCodecId()){
case CodecG711A:
case CodecG711U:{
auto audio_track = dynamic_pointer_cast<AudioTrack>(track);
if(!audio_track){
return;
}
if (audio_track->getAudioSampleRate() != 8000 ||
audio_track->getAudioChannel() != 1 ||
audio_track->getAudioSampleBit() != 16) {
WarnL << "RTMP只支持8000/1/16规格的G711,目前规格是:"
<< audio_track->getAudioSampleRate() << "/"
<< audio_track->getAudioChannel() << "/"
<< audio_track->getAudioSampleBit()
<< ",该音频已被忽略";
return;
}
break;
}
default : break;
}
auto &encoder = _encoder[track->getTrackType()]; auto &encoder = _encoder[track->getTrackType()];
//生成rtmp编码器,克隆该Track,防止循环引用 //生成rtmp编码器,克隆该Track,防止循环引用
encoder = Factory::getRtmpCodecByTrack(track->clone()); encoder = Factory::getRtmpCodecByTrack(track->clone(), true);
if (!encoder) { if (!encoder) {
return; return;
} }
...@@ -71,10 +33,8 @@ void RtmpMuxer::addTrack(const Track::Ptr &track) { ...@@ -71,10 +33,8 @@ void RtmpMuxer::addTrack(const Track::Ptr &track) {
//设置rtmp输出环形缓存 //设置rtmp输出环形缓存
encoder->setRtmpRing(_rtmpRing); encoder->setRtmpRing(_rtmpRing);
//添加其metadata //添加metadata
metadata->getMetadata().object_for_each([&](const std::string &key, const AMFValue &value){ Metadata::addTrack(_metadata,track);
_metadata.set(key,value);
});
} }
void RtmpMuxer::inputFrame(const Frame::Ptr &frame) { void RtmpMuxer::inputFrame(const Frame::Ptr &frame) {
......
...@@ -130,8 +130,7 @@ void RtmpSession::onCmd_publish(AMFDecoder &dec) { ...@@ -130,8 +130,7 @@ void RtmpSession::onCmd_publish(AMFDecoder &dec) {
auto src = dynamic_pointer_cast<RtmpMediaSource>(MediaSource::find(RTMP_SCHEMA, auto src = dynamic_pointer_cast<RtmpMediaSource>(MediaSource::find(RTMP_SCHEMA,
_mediaInfo._vhost, _mediaInfo._vhost,
_mediaInfo._app, _mediaInfo._app,
_mediaInfo._streamid, _mediaInfo._streamid));
false));
bool authSuccess = err.empty(); bool authSuccess = err.empty();
bool ok = (!src && !_pPublisherSrc && authSuccess); bool ok = (!src && !_pPublisherSrc && authSuccess);
AMFValue status(AMF_OBJECT); AMFValue status(AMF_OBJECT);
...@@ -158,6 +157,12 @@ void RtmpSession::onCmd_publish(AMFDecoder &dec) { ...@@ -158,6 +157,12 @@ void RtmpSession::onCmd_publish(AMFDecoder &dec) {
setSocketFlags(); setSocketFlags();
}; };
if(_mediaInfo._app.empty() || _mediaInfo._streamid.empty()){
//不允许莫名其妙的推流url
onRes("rtmp推流url非法", false, false, false);
return;
}
Broadcast::PublishAuthInvoker invoker = [weakSelf,onRes,pToken](const string &err,bool enableRtxp,bool enableHls,bool enableMP4){ Broadcast::PublishAuthInvoker invoker = [weakSelf,onRes,pToken](const string &err,bool enableRtxp,bool enableHls,bool enableMP4){
auto strongSelf = weakSelf.lock(); auto strongSelf = weakSelf.lock();
if(!strongSelf){ if(!strongSelf){
...@@ -266,6 +271,8 @@ void RtmpSession::sendPlayResponse(const string &err,const RtmpMediaSource::Ptr ...@@ -266,6 +271,8 @@ void RtmpSession::sendPlayResponse(const string &err,const RtmpMediaSource::Ptr
onSendMedia(pkt); onSendMedia(pkt);
}); });
//音频同步于视频
_stamp[0].syncTo(_stamp[1]);
_pRingReader = src->getRing()->attach(getPoller()); _pRingReader = src->getRing()->attach(getPoller());
weak_ptr<RtmpSession> weakSelf = dynamic_pointer_cast<RtmpSession>(shared_from_this()); weak_ptr<RtmpSession> weakSelf = dynamic_pointer_cast<RtmpSession>(shared_from_this());
_pRingReader->setReadCB([weakSelf](const RtmpMediaSource::RingDataType &pkt) { _pRingReader->setReadCB([weakSelf](const RtmpMediaSource::RingDataType &pkt) {
......
...@@ -44,6 +44,7 @@ inline void AMFValue::destroy() { ...@@ -44,6 +44,7 @@ inline void AMFValue::destroy() {
break; break;
} }
} }
inline void AMFValue::init() { inline void AMFValue::init() {
switch (_type) { switch (_type) {
case AMF_OBJECT: case AMF_OBJECT:
...@@ -60,14 +61,13 @@ inline void AMFValue::init() { ...@@ -60,14 +61,13 @@ inline void AMFValue::init() {
default: default:
break; break;
} }
} }
AMFValue::AMFValue(AMFType type) : AMFValue::AMFValue(AMFType type) :
_type(type) { _type(type) {
init(); init();
} }
AMFValue::~AMFValue() { AMFValue::~AMFValue() {
destroy(); destroy();
} }
...@@ -78,7 +78,6 @@ AMFValue::AMFValue(const char *s) : ...@@ -78,7 +78,6 @@ AMFValue::AMFValue(const char *s) :
*_value.string = s; *_value.string = s;
} }
AMFValue::AMFValue(const std::string &s) : AMFValue::AMFValue(const std::string &s) :
_type(AMF_STRING) { _type(AMF_STRING) {
init(); init();
...@@ -108,15 +107,7 @@ AMFValue::AMFValue(const AMFValue &from) : ...@@ -108,15 +107,7 @@ AMFValue::AMFValue(const AMFValue &from) :
*this = from; *this = from;
} }
AMFValue::AMFValue(AMFValue &&from) { AMFValue& AMFValue::operator = (const AMFValue &from) {
*this = std::forward<AMFValue>(from);
}
AMFValue& AMFValue::operator =(const AMFValue &from) {
return *this = const_cast<AMFValue &&>(from);
}
AMFValue& AMFValue::operator =(AMFValue &&from) {
destroy(); destroy();
_type = from._type; _type = from._type;
init(); init();
...@@ -144,7 +135,6 @@ AMFValue& AMFValue::operator =(AMFValue &&from) { ...@@ -144,7 +135,6 @@ AMFValue& AMFValue::operator =(AMFValue &&from) {
break; break;
} }
return *this; return *this;
} }
void AMFValue::clear() { void AMFValue::clear() {
...@@ -236,7 +226,6 @@ string AMFValue::to_string() const{ ...@@ -236,7 +226,6 @@ string AMFValue::to_string() const{
} }
} }
const AMFValue& AMFValue::operator[](const char *str) const { const AMFValue& AMFValue::operator[](const char *str) const {
if (_type != AMF_OBJECT && _type != AMF_ECMA_ARRAY) { if (_type != AMF_OBJECT && _type != AMF_ECMA_ARRAY) {
throw std::runtime_error("AMF not a object"); throw std::runtime_error("AMF not a object");
...@@ -338,6 +327,7 @@ AMFEncoder & AMFEncoder::operator <<(const char *s) { ...@@ -338,6 +327,7 @@ AMFEncoder & AMFEncoder::operator <<(const char *s) {
} }
return *this; return *this;
} }
AMFEncoder & AMFEncoder::operator <<(const std::string &s) { AMFEncoder & AMFEncoder::operator <<(const std::string &s) {
if (!s.empty()) { if (!s.empty()) {
buf += char(AMF0_STRING); buf += char(AMF0_STRING);
...@@ -349,18 +339,22 @@ AMFEncoder & AMFEncoder::operator <<(const std::string &s) { ...@@ -349,18 +339,22 @@ AMFEncoder & AMFEncoder::operator <<(const std::string &s) {
} }
return *this; return *this;
} }
AMFEncoder & AMFEncoder::operator <<(std::nullptr_t) { AMFEncoder & AMFEncoder::operator <<(std::nullptr_t) {
buf += char(AMF0_NULL); buf += char(AMF0_NULL);
return *this; return *this;
} }
AMFEncoder & AMFEncoder::write_undefined() { AMFEncoder & AMFEncoder::write_undefined() {
buf += char(AMF0_UNDEFINED); buf += char(AMF0_UNDEFINED);
return *this; return *this;
} }
AMFEncoder & AMFEncoder::operator <<(const int n){ AMFEncoder & AMFEncoder::operator <<(const int n){
return (*this) << (double)n; return (*this) << (double)n;
} }
AMFEncoder & AMFEncoder::operator <<(const double n) { AMFEncoder & AMFEncoder::operator <<(const double n) {
buf += char(AMF0_NUMBER); buf += char(AMF0_NUMBER);
uint64_t encoded = 0; uint64_t encoded = 0;
......
...@@ -40,6 +40,7 @@ public: ...@@ -40,6 +40,7 @@ public:
typedef std::map<std::string, AMFValue> mapType; typedef std::map<std::string, AMFValue> mapType;
typedef std::vector<AMFValue> arrayType; typedef std::vector<AMFValue> arrayType;
~AMFValue();
AMFValue(AMFType type = AMF_NULL); AMFValue(AMFType type = AMF_NULL);
AMFValue(const char *s); AMFValue(const char *s);
AMFValue(const std::string &s); AMFValue(const std::string &s);
...@@ -47,10 +48,7 @@ public: ...@@ -47,10 +48,7 @@ public:
AMFValue(int i); AMFValue(int i);
AMFValue(bool b); AMFValue(bool b);
AMFValue(const AMFValue &from); AMFValue(const AMFValue &from);
AMFValue(AMFValue &&from); AMFValue &operator = (const AMFValue &from);
AMFValue &operator =(const AMFValue &from);
AMFValue &operator =(AMFValue &&from);
~AMFValue();
void clear(); void clear();
AMFType type() const ; AMFType type() const ;
......
...@@ -8,18 +8,211 @@ ...@@ -8,18 +8,211 @@
* may be found in the AUTHORS file in the root of the source tree. * may be found in the AUTHORS file in the root of the source tree.
*/ */
#if defined(ENABLE_RTPPROXY)
#include "Decoder.h" #include "Decoder.h"
#include "PSDecoder.h" #include "PSDecoder.h"
#include "TSDecoder.h" #include "TSDecoder.h"
#include "Extension/H264.h"
#include "Extension/H265.h"
#include "Extension/AAC.h"
#include "Extension/G711.h"
#if defined(ENABLE_RTPPROXY) || defined(ENABLE_HLS)
#include "mpeg-ts-proto.h"
#endif
namespace mediakit { namespace mediakit {
Decoder::Ptr Decoder::createDecoder(Decoder::Type type) { static Decoder::Ptr createDecoder_l(DecoderImp::Type type) {
switch (type){ switch (type){
case decoder_ps : return std::make_shared<PSDecoder>(); case DecoderImp::decoder_ps:
case decoder_ts : return std::make_shared<TSDecoder>(); #ifdef ENABLE_RTPPROXY
default : return nullptr; return std::make_shared<PSDecoder>();
#else
WarnL << "创建ps解复用器失败,请打开ENABLE_RTPPROXY然后重新编译";
return nullptr;
#endif//ENABLE_RTPPROXY
case DecoderImp::decoder_ts:
#ifdef ENABLE_HLS
return std::make_shared<TSDecoder>();
#else
WarnL << "创建mpegts解复用器失败,请打开ENABLE_HLS然后重新编译";
return nullptr;
#endif//ENABLE_HLS
default: return nullptr;
}
}
/////////////////////////////////////////////////////////////
DecoderImp::Ptr DecoderImp::createDecoder(Type type, MediaSinkInterface *sink){
auto decoder = createDecoder_l(type);
if(!decoder){
return nullptr;
}
return DecoderImp::Ptr(new DecoderImp(decoder, sink));
}
int DecoderImp::input(const uint8_t *data, int bytes){
return _decoder->input(data, bytes);
}
DecoderImp::DecoderImp(const Decoder::Ptr &decoder, MediaSinkInterface *sink){
_decoder = decoder;
_sink = sink;
_decoder->setOnDecode([this](int stream,int codecid,int flags,int64_t pts,int64_t dts,const void *data,int bytes){
onDecode(stream,codecid,flags,pts,dts,data,bytes);
});
}
#if defined(ENABLE_RTPPROXY) || defined(ENABLE_HLS)
#define SWITCH_CASE(codec_id) case codec_id : return #codec_id
static const char *getCodecName(int codec_id) {
switch (codec_id) {
SWITCH_CASE(PSI_STREAM_MPEG1);
SWITCH_CASE(PSI_STREAM_MPEG2);
SWITCH_CASE(PSI_STREAM_AUDIO_MPEG1);
SWITCH_CASE(PSI_STREAM_MP3);
SWITCH_CASE(PSI_STREAM_AAC);
SWITCH_CASE(PSI_STREAM_MPEG4);
SWITCH_CASE(PSI_STREAM_MPEG4_AAC_LATM);
SWITCH_CASE(PSI_STREAM_H264);
SWITCH_CASE(PSI_STREAM_MPEG4_AAC);
SWITCH_CASE(PSI_STREAM_H265);
SWITCH_CASE(PSI_STREAM_AUDIO_AC3);
SWITCH_CASE(PSI_STREAM_AUDIO_EAC3);
SWITCH_CASE(PSI_STREAM_AUDIO_DTS);
SWITCH_CASE(PSI_STREAM_VIDEO_DIRAC);
SWITCH_CASE(PSI_STREAM_VIDEO_VC1);
SWITCH_CASE(PSI_STREAM_VIDEO_SVAC);
SWITCH_CASE(PSI_STREAM_AUDIO_SVAC);
SWITCH_CASE(PSI_STREAM_AUDIO_G711A);
SWITCH_CASE(PSI_STREAM_AUDIO_G711U);
SWITCH_CASE(PSI_STREAM_AUDIO_G722);
SWITCH_CASE(PSI_STREAM_AUDIO_G723);
SWITCH_CASE(PSI_STREAM_AUDIO_G729);
default : return "unknown codec";
}
}
void FrameMerger::inputFrame(const Frame::Ptr &frame,const function<void(uint32_t dts,uint32_t pts,const Buffer::Ptr &buffer)> &cb){
if (!_frameCached.empty() && _frameCached.back()->dts() != frame->dts()) {
Frame::Ptr back = _frameCached.back();
Buffer::Ptr merged_frame = back;
if(_frameCached.size() != 1){
string merged;
_frameCached.for_each([&](const Frame::Ptr &frame){
merged.append(frame->data(),frame->size());
});
merged_frame = std::make_shared<BufferString>(std::move(merged));
}
cb(back->dts(),back->pts(),merged_frame);
_frameCached.clear();
}
_frameCached.emplace_back(Frame::getCacheAbleFrame(frame));
}
void DecoderImp::onDecode(int stream,int codecid,int flags,int64_t pts,int64_t dts,const void *data,int bytes) {
pts /= 90;
dts /= 90;
switch (codecid) {
case PSI_STREAM_H264: {
if (!_codecid_video) {
//获取到视频
_codecid_video = codecid;
InfoL<< "got video track: H264";
auto track = std::make_shared<H264Track>();
onTrack(track);
}
if (codecid != _codecid_video) {
WarnL<< "video track change to H264 from codecid:" << getCodecName(_codecid_video);
return;
}
auto frame = std::make_shared<H264FrameNoCacheAble>((char *) data, bytes, dts, pts,0);
_merger.inputFrame(frame,[this](uint32_t dts, uint32_t pts, const Buffer::Ptr &buffer) {
onFrame(std::make_shared<H264FrameNoCacheAble>(buffer->data(), buffer->size(), dts, pts, prefixSize(buffer->data(), buffer->size())));
});
break;
}
case PSI_STREAM_H265: {
if (!_codecid_video) {
//获取到视频
_codecid_video = codecid;
InfoL<< "got video track: H265";
auto track = std::make_shared<H265Track>();
onTrack(track);
}
if (codecid != _codecid_video) {
WarnL<< "video track change to H265 from codecid:" << getCodecName(_codecid_video);
return;
}
auto frame = std::make_shared<H265FrameNoCacheAble>((char *) data, bytes, dts, pts, 0);
_merger.inputFrame(frame,[this](uint32_t dts, uint32_t pts, const Buffer::Ptr &buffer) {
onFrame(std::make_shared<H265FrameNoCacheAble>(buffer->data(), buffer->size(), dts, pts, prefixSize(buffer->data(), buffer->size())));
});
break;
}
case PSI_STREAM_AAC: {
if (!_codecid_audio) {
//获取到音频
_codecid_audio = codecid;
InfoL<< "got audio track: AAC";
auto track = std::make_shared<AACTrack>();
onTrack(track);
}
if (codecid != _codecid_audio) {
WarnL<< "audio track change to AAC from codecid:" << getCodecName(_codecid_audio);
return;
}
onFrame(std::make_shared<AACFrameNoCacheAble>((char *) data, bytes, dts, 0, 7));
break;
} }
case PSI_STREAM_AUDIO_G711A:
case PSI_STREAM_AUDIO_G711U: {
auto codec = codecid == PSI_STREAM_AUDIO_G711A ? CodecG711A : CodecG711U;
if (!_codecid_audio) {
//获取到音频
_codecid_audio = codecid;
InfoL<< "got audio track: G711";
//G711传统只支持 8000/1/16的规格,FFmpeg貌似做了扩展,但是这里不管它了
auto track = std::make_shared<G711Track>(codec, 8000, 1, 16);
onTrack(track);
}
if (codecid != _codecid_audio) {
WarnL<< "audio track change to G711 from codecid:" << getCodecName(_codecid_audio);
return;
}
auto frame = std::make_shared<G711FrameNoCacheAble>((char *) data, bytes, dts);
frame->setCodec(codec);
onFrame(frame);
break;
}
default:
if(codecid != 0){
WarnL<< "unsupported codec type:" << getCodecName(codecid) << " " << (int)codecid;
}
break;
}
}
#else
void DecoderImp::onDecode(int stream,int codecid,int flags,int64_t pts,int64_t dts,const void *data,int bytes) {}
#endif
void DecoderImp::onTrack(const Track::Ptr &track) {
_sink->addTrack(track);
}
void DecoderImp::onFrame(const Frame::Ptr &frame) {
_sink->inputFrame(frame);
} }
}//namespace mediakit }//namespace mediakit
#endif//defined(ENABLE_RTPPROXY)
...@@ -11,31 +11,66 @@ ...@@ -11,31 +11,66 @@
#ifndef ZLMEDIAKIT_DECODER_H #ifndef ZLMEDIAKIT_DECODER_H
#define ZLMEDIAKIT_DECODER_H #define ZLMEDIAKIT_DECODER_H
#if defined(ENABLE_RTPPROXY)
#include <stdint.h> #include <stdint.h>
#include <memory> #include <memory>
#include <functional> #include <functional>
#include "Decoder.h" #include "Decoder.h"
#include "Common/MediaSink.h"
using namespace std; using namespace std;
namespace mediakit { namespace mediakit {
class Decoder { class Decoder {
public: public:
typedef std::shared_ptr<Decoder> Ptr; typedef std::shared_ptr<Decoder> Ptr;
typedef enum {
decoder_ts = 0,
decoder_ps
}Type;
typedef std::function<void(int stream,int codecid,int flags,int64_t pts,int64_t dts,const void *data,int bytes)> onDecode; typedef std::function<void(int stream,int codecid,int flags,int64_t pts,int64_t dts,const void *data,int bytes)> onDecode;
virtual int input(const uint8_t *data, int bytes) = 0; virtual int input(const uint8_t *data, int bytes) = 0;
virtual void setOnDecode(const onDecode &decode) = 0; virtual void setOnDecode(const onDecode &decode) = 0;
static Ptr createDecoder(Type type);
protected: protected:
Decoder() = default; Decoder() = default;
virtual ~Decoder() = default; virtual ~Decoder() = default;
}; };
/**
* 合并一些时间戳相同的frame
*/
class FrameMerger {
public:
FrameMerger() = default;
~FrameMerger() = default;
void inputFrame(const Frame::Ptr &frame,const function<void(uint32_t dts,uint32_t pts,const Buffer::Ptr &buffer)> &cb);
private:
List<Frame::Ptr> _frameCached;
};
class DecoderImp{
public:
typedef enum {
decoder_ts = 0,
decoder_ps
}Type;
typedef std::shared_ptr<DecoderImp> Ptr;
~DecoderImp() = default;
static Ptr createDecoder(Type type, MediaSinkInterface *sink);
int input(const uint8_t *data, int bytes);
protected:
void onTrack(const Track::Ptr &track);
void onFrame(const Frame::Ptr &frame);
private:
DecoderImp(const Decoder::Ptr &decoder, MediaSinkInterface *sink);
void onDecode(int stream,int codecid,int flags,int64_t pts,int64_t dts,const void *data,int bytes);
private:
Decoder::Ptr _decoder;
MediaSinkInterface *_sink;
FrameMerger _merger;
int _codecid_video = 0;
int _codecid_audio = 0;
};
}//namespace mediakit }//namespace mediakit
#endif//defined(ENABLE_RTPPROXY)
#endif //ZLMEDIAKIT_DECODER_H #endif //ZLMEDIAKIT_DECODER_H
...@@ -16,8 +16,9 @@ using namespace toolkit; ...@@ -16,8 +16,9 @@ using namespace toolkit;
namespace mediakit{ namespace mediakit{
RtpDecoder::RtpDecoder() { RtpDecoder::RtpDecoder(const char *codec) {
_buffer = std::make_shared<BufferRaw>(); _buffer = std::make_shared<BufferRaw>();
_codec = codec;
} }
RtpDecoder::~RtpDecoder() { RtpDecoder::~RtpDecoder() {
...@@ -46,7 +47,7 @@ void RtpDecoder::decodeRtp(const void *data, int bytes) { ...@@ -46,7 +47,7 @@ void RtpDecoder::decodeRtp(const void *data, int bytes) {
uint8_t rtp_type = 0x7F & ((uint8_t *) data)[1]; uint8_t rtp_type = 0x7F & ((uint8_t *) data)[1];
InfoL << "rtp type:" << (int) rtp_type; InfoL << "rtp type:" << (int) rtp_type;
_rtp_decoder = rtp_payload_decode_create(rtp_type, "MP2P", &s_func, this); _rtp_decoder = rtp_payload_decode_create(rtp_type, _codec.data(), &s_func, this);
if (!_rtp_decoder) { if (!_rtp_decoder) {
WarnL << "unsupported rtp type:" << (int) rtp_type << ",size:" << bytes << ",hexdump" << hexdump(data, bytes > 16 ? 16 : bytes); WarnL << "unsupported rtp type:" << (int) rtp_type << ",size:" << bytes << ",hexdump" << hexdump(data, bytes > 16 ? 16 : bytes);
} }
......
...@@ -19,14 +19,15 @@ namespace mediakit{ ...@@ -19,14 +19,15 @@ namespace mediakit{
class RtpDecoder { class RtpDecoder {
public: public:
RtpDecoder(); RtpDecoder(const char *codec = "MP2P");
virtual ~RtpDecoder(); virtual ~RtpDecoder();
protected:
void decodeRtp(const void *data, int bytes); void decodeRtp(const void *data, int bytes);
protected:
virtual void onRtpDecode(const uint8_t *packet, int bytes, uint32_t timestamp, int flags) = 0; virtual void onRtpDecode(const uint8_t *packet, int bytes, uint32_t timestamp, int flags) = 0;
private: private:
void *_rtp_decoder = nullptr; void *_rtp_decoder = nullptr;
BufferRaw::Ptr _buffer; BufferRaw::Ptr _buffer;
string _codec;
}; };
}//namespace mediakit }//namespace mediakit
......
...@@ -9,44 +9,13 @@ ...@@ -9,44 +9,13 @@
*/ */
#if defined(ENABLE_RTPPROXY) #if defined(ENABLE_RTPPROXY)
#include "mpeg-ts-proto.h"
#include "RtpProcess.h" #include "RtpProcess.h"
#include "Util/File.h" #include "Util/File.h"
#include "Extension/H265.h" #include "Http/HttpTSPlayer.h"
#include "Extension/AAC.h"
#include "Extension/G711.h"
#define RTP_APP_NAME "rtp" #define RTP_APP_NAME "rtp"
namespace mediakit{ namespace mediakit{
/**
* 合并一些时间戳相同的frame
*/
class FrameMerger {
public:
FrameMerger() = default;
virtual ~FrameMerger() = default;
void inputFrame(const Frame::Ptr &frame,const function<void(uint32_t dts,uint32_t pts,const Buffer::Ptr &buffer)> &cb){
if (!_frameCached.empty() && _frameCached.back()->dts() != frame->dts()) {
Frame::Ptr back = _frameCached.back();
Buffer::Ptr merged_frame = back;
if(_frameCached.size() != 1){
string merged;
_frameCached.for_each([&](const Frame::Ptr &frame){
merged.append(frame->data(),frame->size());
});
merged_frame = std::make_shared<BufferString>(std::move(merged));
}
cb(back->dts(),back->pts(),merged_frame);
_frameCached.clear();
}
_frameCached.emplace_back(Frame::getCacheAbleFrame(frame));
}
private:
List<Frame::Ptr> _frameCached;
};
string printSSRC(uint32_t ui32Ssrc) { string printSSRC(uint32_t ui32Ssrc) {
char tmp[9] = { 0 }; char tmp[9] = { 0 };
ui32Ssrc = htonl(ui32Ssrc); ui32Ssrc = htonl(ui32Ssrc);
...@@ -101,7 +70,6 @@ RtpProcess::RtpProcess(uint32_t ssrc) { ...@@ -101,7 +70,6 @@ RtpProcess::RtpProcess(uint32_t ssrc) {
}); });
} }
} }
_merger = std::make_shared<FrameMerger>();
} }
RtpProcess::~RtpProcess() { RtpProcess::~RtpProcess() {
...@@ -147,7 +115,6 @@ bool RtpProcess::inputRtp(const Socket::Ptr &sock, const char *data, int data_le ...@@ -147,7 +115,6 @@ bool RtpProcess::inputRtp(const Socket::Ptr &sock, const char *data, int data_le
} }
_total_bytes += data_len; _total_bytes += data_len;
_last_rtp_time.resetTime();
bool ret = handleOneRtp(0,_track,(unsigned char *)data,data_len); bool ret = handleOneRtp(0,_track,(unsigned char *)data,data_len);
if(dts_out){ if(dts_out){
*dts_out = _dts; *dts_out = _dts;
...@@ -157,12 +124,12 @@ bool RtpProcess::inputRtp(const Socket::Ptr &sock, const char *data, int data_le ...@@ -157,12 +124,12 @@ bool RtpProcess::inputRtp(const Socket::Ptr &sock, const char *data, int data_le
//判断是否为ts负载 //判断是否为ts负载
static inline bool checkTS(const uint8_t *packet, int bytes){ static inline bool checkTS(const uint8_t *packet, int bytes){
return bytes % 188 == 0 && packet[0] == 0x47; return bytes % TS_PACKET_SIZE == 0 && packet[0] == TS_SYNC_BYTE;
} }
void RtpProcess::onRtpSorted(const RtpPacket::Ptr &rtp, int) { void RtpProcess::onRtpSorted(const RtpPacket::Ptr &rtp, int) {
if(rtp->sequence != _sequence + 1 && rtp->sequence != 0){ if(rtp->sequence != _sequence + 1 && _sequence != 0){
WarnP(this) << rtp->sequence << " != " << _sequence << "+1"; WarnP(this) << "rtp丢包:" << rtp->sequence << " != " << _sequence << "+1" << ",公网环境下请使用tcp方式推流";
} }
_sequence = rtp->sequence; _sequence = rtp->sequence;
if(_save_file_rtp){ if(_save_file_rtp){
...@@ -179,155 +146,38 @@ void RtpProcess::onRtpDecode(const uint8_t *packet, int bytes, uint32_t timestam ...@@ -179,155 +146,38 @@ void RtpProcess::onRtpDecode(const uint8_t *packet, int bytes, uint32_t timestam
fwrite((uint8_t *)packet,bytes, 1, _save_file_ps.get()); fwrite((uint8_t *)packet,bytes, 1, _save_file_ps.get());
} }
if(!_decoder){ if (!_decoder) {
//创建解码器 //创建解码器
if(checkTS(packet, bytes)){ if (checkTS(packet, bytes)) {
//猜测是ts负载 //猜测是ts负载
InfoP(this) << "judged to be TS"; InfoP(this) << "judged to be TS";
_decoder = Decoder::createDecoder(Decoder::decoder_ts); _decoder = DecoderImp::createDecoder(DecoderImp::decoder_ts, this);
}else{ } else {
//猜测是ps负载 //猜测是ps负载
InfoP(this) << "judged to be PS"; InfoP(this) << "judged to be PS";
_decoder = Decoder::createDecoder(Decoder::decoder_ps); _decoder = DecoderImp::createDecoder(DecoderImp::decoder_ps, this);
} }
_decoder->setOnDecode([this](int stream,int codecid,int flags,int64_t pts,int64_t dts,const void *data,int bytes){
onDecode(stream,codecid,flags,pts,dts,data,bytes);
});
} }
auto ret = _decoder->input((uint8_t *)packet,bytes); if (_decoder) {
if(ret != bytes){ auto ret = _decoder->input((uint8_t *) packet, bytes);
if (ret != bytes) {
WarnP(this) << ret << " != " << bytes << " " << flags; WarnP(this) << ret << " != " << bytes << " " << flags;
} }
}
#define SWITCH_CASE(codec_id) case codec_id : return #codec_id
static const char *getCodecName(int codec_id) {
switch (codec_id) {
SWITCH_CASE(PSI_STREAM_MPEG1);
SWITCH_CASE(PSI_STREAM_MPEG2);
SWITCH_CASE(PSI_STREAM_AUDIO_MPEG1);
SWITCH_CASE(PSI_STREAM_MP3);
SWITCH_CASE(PSI_STREAM_AAC);
SWITCH_CASE(PSI_STREAM_MPEG4);
SWITCH_CASE(PSI_STREAM_MPEG4_AAC_LATM);
SWITCH_CASE(PSI_STREAM_H264);
SWITCH_CASE(PSI_STREAM_MPEG4_AAC);
SWITCH_CASE(PSI_STREAM_H265);
SWITCH_CASE(PSI_STREAM_AUDIO_AC3);
SWITCH_CASE(PSI_STREAM_AUDIO_EAC3);
SWITCH_CASE(PSI_STREAM_AUDIO_DTS);
SWITCH_CASE(PSI_STREAM_VIDEO_DIRAC);
SWITCH_CASE(PSI_STREAM_VIDEO_VC1);
SWITCH_CASE(PSI_STREAM_VIDEO_SVAC);
SWITCH_CASE(PSI_STREAM_AUDIO_SVAC);
SWITCH_CASE(PSI_STREAM_AUDIO_G711A);
SWITCH_CASE(PSI_STREAM_AUDIO_G711U);
SWITCH_CASE(PSI_STREAM_AUDIO_G722);
SWITCH_CASE(PSI_STREAM_AUDIO_G723);
SWITCH_CASE(PSI_STREAM_AUDIO_G729);
default : return "unknown codec";
} }
} }
void RtpProcess::onDecode(int stream,int codecid,int flags,int64_t pts,int64_t dts,const void *data,int bytes) { void RtpProcess::inputFrame(const Frame::Ptr &frame){
pts /= 90; _last_rtp_time.resetTime();
dts /= 90; _dts = frame->dts();
_stamps[codecid].revise(dts,pts,dts,pts,false); if (_save_file_video && frame->getTrackType() == TrackVideo) {
fwrite((uint8_t *) frame->data(), frame->size(), 1, _save_file_video.get());
switch (codecid) {
case PSI_STREAM_H264: {
_dts = dts;
if (!_codecid_video) {
//获取到视频
_codecid_video = codecid;
InfoP(this) << "got video track: H264";
auto track = std::make_shared<H264Track>();
_muxer->addTrack(track);
}
if (codecid != _codecid_video) {
WarnP(this) << "video track change to H264 from codecid:" << getCodecName(_codecid_video);
return;
}
if(_save_file_video){
fwrite((uint8_t *)data,bytes, 1, _save_file_video.get());
}
auto frame = std::make_shared<H264FrameNoCacheAble>((char *) data, bytes, dts, pts,0);
_merger->inputFrame(frame,[this](uint32_t dts, uint32_t pts, const Buffer::Ptr &buffer) {
_muxer->inputFrame(std::make_shared<H264FrameNoCacheAble>(buffer->data(), buffer->size(), dts, pts,4));
});
break;
}
case PSI_STREAM_H265: {
_dts = dts;
if (!_codecid_video) {
//获取到视频
_codecid_video = codecid;
InfoP(this) << "got video track: H265";
auto track = std::make_shared<H265Track>();
_muxer->addTrack(track);
}
if (codecid != _codecid_video) {
WarnP(this) << "video track change to H265 from codecid:" << getCodecName(_codecid_video);
return;
}
if(_save_file_video){
fwrite((uint8_t *)data,bytes, 1, _save_file_video.get());
}
auto frame = std::make_shared<H265FrameNoCacheAble>((char *) data, bytes, dts, pts, 0);
_merger->inputFrame(frame,[this](uint32_t dts, uint32_t pts, const Buffer::Ptr &buffer) {
_muxer->inputFrame(std::make_shared<H265FrameNoCacheAble>(buffer->data(), buffer->size(), dts, pts, 4));
});
break;
}
case PSI_STREAM_AAC: {
_dts = dts;
if (!_codecid_audio) {
//获取到音频
_codecid_audio = codecid;
InfoP(this) << "got audio track: AAC";
auto track = std::make_shared<AACTrack>();
_muxer->addTrack(track);
}
if (codecid != _codecid_audio) {
WarnP(this) << "audio track change to AAC from codecid:" << getCodecName(_codecid_audio);
return;
}
_muxer->inputFrame(std::make_shared<AACFrameNoCacheAble>((char *) data, bytes, dts, 0, 7));
break;
} }
_muxer->inputFrame(frame);
}
case PSI_STREAM_AUDIO_G711A: void RtpProcess::addTrack(const Track::Ptr & track){
case PSI_STREAM_AUDIO_G711U: {
_dts = dts;
auto codec = codecid == PSI_STREAM_AUDIO_G711A ? CodecG711A : CodecG711U;
if (!_codecid_audio) {
//获取到音频
_codecid_audio = codecid;
InfoP(this) << "got audio track: G711";
//G711传统只支持 8000/1/16的规格,FFmpeg貌似做了扩展,但是这里不管它了
auto track = std::make_shared<G711Track>(codec, 8000, 1, 16);
_muxer->addTrack(track); _muxer->addTrack(track);
}
if (codecid != _codecid_audio) {
WarnP(this) << "audio track change to G711 from codecid:" << getCodecName(_codecid_audio);
return;
}
_muxer->inputFrame(std::make_shared<G711FrameNoCacheAble>(codec, (char *) data, bytes, dts));
break;
}
default:
if(codecid != 0){
WarnP(this) << "unsupported codec type:" << getCodecName(codecid) << " " << (int)codecid;
}
return;
}
} }
bool RtpProcess::alive() { bool RtpProcess::alive() {
...@@ -412,6 +262,5 @@ void RtpProcess::emitOnPublish() { ...@@ -412,6 +262,5 @@ void RtpProcess::emitOnPublish() {
} }
} }
}//namespace mediakit }//namespace mediakit
#endif//defined(ENABLE_RTPPROXY) #endif//defined(ENABLE_RTPPROXY)
\ No newline at end of file
...@@ -23,8 +23,7 @@ using namespace mediakit; ...@@ -23,8 +23,7 @@ using namespace mediakit;
namespace mediakit{ namespace mediakit{
string printSSRC(uint32_t ui32Ssrc); string printSSRC(uint32_t ui32Ssrc);
class FrameMerger; class RtpProcess : public RtpReceiver , public RtpDecoder, public SockInfo, public MediaSinkInterface, public std::enable_shared_from_this<RtpProcess>{
class RtpProcess : public RtpReceiver , public RtpDecoder, public SockInfo, public std::enable_shared_from_this<RtpProcess>{
public: public:
typedef std::shared_ptr<RtpProcess> Ptr; typedef std::shared_ptr<RtpProcess> Ptr;
RtpProcess(uint32_t ssrc); RtpProcess(uint32_t ssrc);
...@@ -44,7 +43,9 @@ public: ...@@ -44,7 +43,9 @@ public:
protected: protected:
void onRtpSorted(const RtpPacket::Ptr &rtp, int track_index) override ; void onRtpSorted(const RtpPacket::Ptr &rtp, int track_index) override ;
void onRtpDecode(const uint8_t *packet, int bytes, uint32_t timestamp, int flags) override; void onRtpDecode(const uint8_t *packet, int bytes, uint32_t timestamp, int flags) override;
void onDecode(int stream,int codecid,int flags,int64_t pts,int64_t dts, const void *data,int bytes); void inputFrame(const Frame::Ptr &frame) override;
void addTrack(const Track::Ptr & track) override;
void resetTracks() override {};
private: private:
void emitOnPublish(); void emitOnPublish();
...@@ -57,14 +58,10 @@ private: ...@@ -57,14 +58,10 @@ private:
SdpTrack::Ptr _track; SdpTrack::Ptr _track;
struct sockaddr *_addr = nullptr; struct sockaddr *_addr = nullptr;
uint16_t _sequence = 0; uint16_t _sequence = 0;
int _codecid_video = 0;
int _codecid_audio = 0;
MultiMediaSourceMuxer::Ptr _muxer; MultiMediaSourceMuxer::Ptr _muxer;
std::shared_ptr<FrameMerger> _merger;
Ticker _last_rtp_time; Ticker _last_rtp_time;
unordered_map<int,Stamp> _stamps;
uint32_t _dts = 0; uint32_t _dts = 0;
Decoder::Ptr _decoder; DecoderImp::Ptr _decoder;
std::weak_ptr<MediaSourceEvent> _listener; std::weak_ptr<MediaSourceEvent> _listener;
MediaInfo _media_info; MediaInfo _media_info;
uint64_t _total_bytes = 0; uint64_t _total_bytes = 0;
......
...@@ -8,34 +8,38 @@ ...@@ -8,34 +8,38 @@
* may be found in the AUTHORS file in the root of the source tree. * may be found in the AUTHORS file in the root of the source tree.
*/ */
#if defined(ENABLE_RTPPROXY)
#include "mpeg-ts.h"
#include "TSDecoder.h" #include "TSDecoder.h"
#define TS_PACKET_SIZE 188
namespace mediakit { namespace mediakit {
bool TSSegment::isTSPacket(const char *data, int len){
return len == TS_PACKET_SIZE && ((uint8_t*)data)[0] == TS_SYNC_BYTE;
}
void TSSegment::setOnSegment(const TSSegment::onSegment &cb) { void TSSegment::setOnSegment(const TSSegment::onSegment &cb) {
_onSegment = cb; _onSegment = cb;
} }
int64_t TSSegment::onRecvHeader(const char *data, uint64_t len) { int64_t TSSegment::onRecvHeader(const char *data, uint64_t len) {
if (!isTSPacket(data, len)) {
WarnL << "不是ts包:" << (int) (data[0]) << " " << len;
return 0;
}
_onSegment(data, len); _onSegment(data, len);
return 0; return 0;
} }
const char *TSSegment::onSearchPacketTail(const char *data, int len) { const char *TSSegment::onSearchPacketTail(const char *data, int len) {
if (len < _size + 1) { if (len < _size + 1) {
if (len == _size && ((uint8_t *) data)[0] == 0x47) { if (len == _size && ((uint8_t *) data)[0] == TS_SYNC_BYTE) {
return data + _size; return data + _size;
} }
return nullptr; return nullptr;
} }
//下一个包头 //下一个包头
if (((uint8_t *) data)[_size] == 0x47) { if (((uint8_t *) data)[_size] == TS_SYNC_BYTE) {
return data + _size; return data + _size;
} }
auto pos = memchr(data + _size, TS_SYNC_BYTE, len - _size);
auto pos = memchr(data + _size, 0x47, len - _size);
if (pos) { if (pos) {
return (char *) pos; return (char *) pos;
} }
...@@ -44,12 +48,10 @@ const char *TSSegment::onSearchPacketTail(const char *data, int len) { ...@@ -44,12 +48,10 @@ const char *TSSegment::onSearchPacketTail(const char *data, int len) {
//////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////
TSDecoder::TSDecoder() : _ts_segment(TS_PACKET_SIZE) { #if defined(ENABLE_HLS)
#include "mpeg-ts.h"
TSDecoder::TSDecoder() : _ts_segment() {
_ts_segment.setOnSegment([this](const char *data,uint64_t len){ _ts_segment.setOnSegment([this](const char *data,uint64_t len){
if(((uint8_t*)data)[0] != 0x47 || len != TS_PACKET_SIZE ){
WarnL << "不是ts包:" << (int)(data[0]) << " " << len;
return;
}
ts_demuxer_input(_demuxer_ctx,(uint8_t*)data,len); ts_demuxer_input(_demuxer_ctx,(uint8_t*)data,len);
}); });
_demuxer_ctx = ts_demuxer_create([](void* param, int program, int stream, int codecid, int flags, int64_t pts, int64_t dts, const void* data, size_t bytes){ _demuxer_ctx = ts_demuxer_create([](void* param, int program, int stream, int codecid, int flags, int64_t pts, int64_t dts, const void* data, size_t bytes){
...@@ -66,8 +68,8 @@ TSDecoder::~TSDecoder() { ...@@ -66,8 +68,8 @@ TSDecoder::~TSDecoder() {
} }
int TSDecoder::input(const uint8_t *data, int bytes) { int TSDecoder::input(const uint8_t *data, int bytes) {
if(bytes == TS_PACKET_SIZE && ((uint8_t*)data)[0] == 0x47){ if (TSSegment::isTSPacket((char *)data, bytes)) {
return ts_demuxer_input(_demuxer_ctx,(uint8_t*)data,bytes); return ts_demuxer_input(_demuxer_ctx, (uint8_t *) data, bytes);
} }
_ts_segment.input((char*)data,bytes); _ts_segment.input((char*)data,bytes);
return bytes; return bytes;
...@@ -76,6 +78,6 @@ int TSDecoder::input(const uint8_t *data, int bytes) { ...@@ -76,6 +78,6 @@ int TSDecoder::input(const uint8_t *data, int bytes) {
void TSDecoder::setOnDecode(const Decoder::onDecode &decode) { void TSDecoder::setOnDecode(const Decoder::onDecode &decode) {
_on_decode = decode; _on_decode = decode;
} }
#endif//defined(ENABLE_HLS)
}//namespace mediakit }//namespace mediakit
#endif//defined(ENABLE_RTPPROXY)
\ No newline at end of file
...@@ -11,7 +11,6 @@ ...@@ -11,7 +11,6 @@
#ifndef ZLMEDIAKIT_TSDECODER_H #ifndef ZLMEDIAKIT_TSDECODER_H
#define ZLMEDIAKIT_TSDECODER_H #define ZLMEDIAKIT_TSDECODER_H
#if defined(ENABLE_RTPPROXY)
#include "Util/logger.h" #include "Util/logger.h"
#include "Http/HttpRequestSplitter.h" #include "Http/HttpRequestSplitter.h"
#include "Decoder.h" #include "Decoder.h"
...@@ -19,13 +18,17 @@ ...@@ -19,13 +18,17 @@
using namespace toolkit; using namespace toolkit;
namespace mediakit { namespace mediakit {
//ts包拆分器 #define TS_PACKET_SIZE 188
#define TS_SYNC_BYTE 0x47
//TS包分割器,用于split一个一个的ts包
class TSSegment : public HttpRequestSplitter { class TSSegment : public HttpRequestSplitter {
public: public:
typedef std::function<void(const char *data,uint64_t len)> onSegment; typedef std::function<void(const char *data,uint64_t len)> onSegment;
TSSegment(int size = 188) : _size(size){} TSSegment(int size = TS_PACKET_SIZE) : _size(size){}
~TSSegment(){} ~TSSegment(){}
void setOnSegment(const onSegment &cb); void setOnSegment(const onSegment &cb);
static bool isTSPacket(const char *data, int len);
protected: protected:
int64_t onRecvHeader(const char *data, uint64_t len) override ; int64_t onRecvHeader(const char *data, uint64_t len) override ;
const char *onSearchPacketTail(const char *data, int len) override ; const char *onSearchPacketTail(const char *data, int len) override ;
...@@ -34,6 +37,7 @@ private: ...@@ -34,6 +37,7 @@ private:
onSegment _onSegment; onSegment _onSegment;
}; };
#if defined(ENABLE_HLS)
//ts解析器 //ts解析器
class TSDecoder : public Decoder { class TSDecoder : public Decoder {
public: public:
...@@ -46,7 +50,7 @@ private: ...@@ -46,7 +50,7 @@ private:
struct ts_demuxer_t* _demuxer_ctx = nullptr; struct ts_demuxer_t* _demuxer_ctx = nullptr;
onDecode _on_decode; onDecode _on_decode;
}; };
#endif//defined(ENABLE_HLS)
}//namespace mediakit }//namespace mediakit
#endif//defined(ENABLE_RTPPROXY)
#endif //ZLMEDIAKIT_TSDECODER_H #endif //ZLMEDIAKIT_TSDECODER_H
...@@ -17,7 +17,9 @@ UdpRecver::UdpRecver() { ...@@ -17,7 +17,9 @@ UdpRecver::UdpRecver() {
} }
UdpRecver::~UdpRecver() { UdpRecver::~UdpRecver() {
if(_sock){
_sock->setOnRead(nullptr); _sock->setOnRead(nullptr);
}
} }
bool UdpRecver::initSock(uint16_t local_port,const char *local_ip) { bool UdpRecver::initSock(uint16_t local_port,const char *local_ip) {
......
...@@ -28,18 +28,18 @@ RtpPacket::Ptr RtpInfo::makeRtp(TrackType type, const void* data, unsigned int l ...@@ -28,18 +28,18 @@ RtpPacket::Ptr RtpInfo::makeRtp(TrackType type, const void* data, unsigned int l
pucRtp[2] = ui16RtpLen >> 8; pucRtp[2] = ui16RtpLen >> 8;
pucRtp[3] = ui16RtpLen & 0x00FF; pucRtp[3] = ui16RtpLen & 0x00FF;
pucRtp[4] = 0x80; pucRtp[4] = 0x80;
pucRtp[5] = (mark << 7) | _ui8PlayloadType; pucRtp[5] = (mark << 7) | _ui8PayloadType;
memcpy(&pucRtp[6], &sq, 2); memcpy(&pucRtp[6], &sq, 2);
memcpy(&pucRtp[8], &ts, 4); memcpy(&pucRtp[8], &ts, 4);
//ssrc //ssrc
memcpy(&pucRtp[12], &sc, 4); memcpy(&pucRtp[12], &sc, 4);
if(data){ if(data){
//playload //payload
memcpy(&pucRtp[16], data, len); memcpy(&pucRtp[16], data, len);
} }
rtppkt->PT = _ui8PlayloadType; rtppkt->PT = _ui8PayloadType;
rtppkt->interleaved = _ui8Interleaved; rtppkt->interleaved = _ui8Interleaved;
rtppkt->mark = mark; rtppkt->mark = mark;
rtppkt->sequence = _ui16Sequence; rtppkt->sequence = _ui16Sequence;
......
...@@ -66,7 +66,7 @@ public: ...@@ -66,7 +66,7 @@ public:
RtpInfo(uint32_t ui32Ssrc, RtpInfo(uint32_t ui32Ssrc,
uint32_t ui32MtuSize, uint32_t ui32MtuSize,
uint32_t ui32SampleRate, uint32_t ui32SampleRate,
uint8_t ui8PlayloadType, uint8_t ui8PayloadType,
uint8_t ui8Interleaved) { uint8_t ui8Interleaved) {
if(ui32Ssrc == 0){ if(ui32Ssrc == 0){
ui32Ssrc = ((uint64_t)this) & 0xFFFFFFFF; ui32Ssrc = ((uint64_t)this) & 0xFFFFFFFF;
...@@ -74,7 +74,7 @@ public: ...@@ -74,7 +74,7 @@ public:
_ui32Ssrc = ui32Ssrc; _ui32Ssrc = ui32Ssrc;
_ui32SampleRate = ui32SampleRate; _ui32SampleRate = ui32SampleRate;
_ui32MtuSize = ui32MtuSize; _ui32MtuSize = ui32MtuSize;
_ui8PlayloadType = ui8PlayloadType; _ui8PayloadType = ui8PayloadType;
_ui8Interleaved = ui8Interleaved; _ui8Interleaved = ui8Interleaved;
} }
...@@ -84,8 +84,8 @@ public: ...@@ -84,8 +84,8 @@ public:
return _ui8Interleaved; return _ui8Interleaved;
} }
int getPlayloadType() const { int getPayloadType() const {
return _ui8PlayloadType; return _ui8PayloadType;
} }
int getSampleRate() const { int getSampleRate() const {
...@@ -110,7 +110,7 @@ protected: ...@@ -110,7 +110,7 @@ protected:
uint32_t _ui32Ssrc; uint32_t _ui32Ssrc;
uint32_t _ui32SampleRate; uint32_t _ui32SampleRate;
uint32_t _ui32MtuSize; uint32_t _ui32MtuSize;
uint8_t _ui8PlayloadType; uint8_t _ui8PayloadType;
uint8_t _ui8Interleaved; uint8_t _ui8Interleaved;
uint16_t _ui16Sequence = 0; uint16_t _ui16Sequence = 0;
uint32_t _ui32TimeStamp = 0; uint32_t _ui32TimeStamp = 0;
......
...@@ -81,6 +81,7 @@ RtpMultiCaster::~RtpMultiCaster() { ...@@ -81,6 +81,7 @@ RtpMultiCaster::~RtpMultiCaster() {
_pReader->setDetachCB(nullptr); _pReader->setDetachCB(nullptr);
DebugL; DebugL;
} }
RtpMultiCaster::RtpMultiCaster(const EventPoller::Ptr &poller,const string &strLocalIp,const string &strVhost,const string &strApp,const string &strStream) { RtpMultiCaster::RtpMultiCaster(const EventPoller::Ptr &poller,const string &strLocalIp,const string &strVhost,const string &strApp,const string &strStream) {
auto src = dynamic_pointer_cast<RtspMediaSource>(MediaSource::find(RTSP_SCHEMA,strVhost,strApp, strStream)); auto src = dynamic_pointer_cast<RtspMediaSource>(MediaSource::find(RTSP_SCHEMA,strVhost,strApp, strStream));
if(!src){ if(!src){
......
...@@ -34,11 +34,11 @@ bool RtpReceiver::handleOneRtp(int track_index,SdpTrack::Ptr &track, unsigned ch ...@@ -34,11 +34,11 @@ bool RtpReceiver::handleOneRtp(int track_index,SdpTrack::Ptr &track, unsigned ch
} }
uint8_t padding = 0; uint8_t padding = 0;
if (rtp_raw_ptr[0] & 0x40) { if (rtp_raw_ptr[0] & 0x20) {
//获取padding大小 //获取padding大小
padding = rtp_raw_ptr[rtp_raw_len - 1]; padding = rtp_raw_ptr[rtp_raw_len - 1];
//移除padding flag //移除padding flag
rtp_raw_ptr[0] &= ~0x40; rtp_raw_ptr[0] &= ~0x20;
//移除padding字节 //移除padding字节
rtp_raw_len -= padding; rtp_raw_len -= padding;
} }
......
...@@ -365,5 +365,43 @@ bool RtspUrl::setup(bool isSSL, const string &strUrl, const string &strUser, con ...@@ -365,5 +365,43 @@ bool RtspUrl::setup(bool isSSL, const string &strUrl, const string &strUser, con
return true; return true;
} }
}//namespace mediakit std::pair<Socket::Ptr, Socket::Ptr> makeSockPair_l(const EventPoller::Ptr &poller, const string &local_ip){
auto pSockRtp = std::make_shared<Socket>(poller);
if (!pSockRtp->bindUdpSock(0, local_ip.data())) {
//分配端口失败
throw runtime_error("open udp socket failed");
}
//是否是偶数
bool even_numbers = pSockRtp->get_local_port() % 2 == 0;
auto pSockRtcp = std::make_shared<Socket>(poller);
if (!pSockRtcp->bindUdpSock(pSockRtp->get_local_port() + (even_numbers ? 1 : -1), local_ip.data())) {
//分配端口失败
throw runtime_error("open udp socket failed");
}
if (!even_numbers) {
//如果rtp端口不是偶数,那么与rtcp端口互换,目的是兼容一些要求严格的播放器或服务器
Socket::Ptr tmp = pSockRtp;
pSockRtp = pSockRtcp;
pSockRtcp = tmp;
}
return std::make_pair(pSockRtp, pSockRtcp);
}
std::pair<Socket::Ptr, Socket::Ptr> makeSockPair(const EventPoller::Ptr &poller, const string &local_ip){
int try_count = 0;
while (true) {
try {
return makeSockPair_l(poller, local_ip);
} catch (...) {
if (++try_count == 3) {
throw;
}
WarnL << "open udp socket failed, retry: " << try_count;
}
}
}
}//namespace mediakit
\ No newline at end of file
...@@ -188,11 +188,11 @@ public: ...@@ -188,11 +188,11 @@ public:
/** /**
* 构造sdp * 构造sdp
* @param sample_rate 采样率 * @param sample_rate 采样率
* @param playload_type pt类型 * @param payload_type pt类型
*/ */
Sdp(uint32_t sample_rate, uint8_t playload_type){ Sdp(uint32_t sample_rate, uint8_t payload_type){
_sample_rate = sample_rate; _sample_rate = sample_rate;
_playload_type = playload_type; _payload_type = payload_type;
} }
virtual ~Sdp(){} virtual ~Sdp(){}
...@@ -207,8 +207,8 @@ public: ...@@ -207,8 +207,8 @@ public:
* 获取pt * 获取pt
* @return * @return
*/ */
uint8_t getPlayloadType() const{ uint8_t getPayloadType() const{
return _playload_type; return _payload_type;
} }
/** /**
...@@ -219,7 +219,7 @@ public: ...@@ -219,7 +219,7 @@ public:
return _sample_rate; return _sample_rate;
} }
private: private:
uint8_t _playload_type; uint8_t _payload_type;
uint32_t _sample_rate; uint32_t _sample_rate;
}; };
...@@ -263,18 +263,7 @@ public: ...@@ -263,18 +263,7 @@ public:
string getSdp() const override { string getSdp() const override {
return _printer; return _printer;
} }
/**
* 返回音频或视频类型
* @return
*/
TrackType getTrackType() const override {
return TrackTitle;
}
/**
* 返回编码器id
* @return
*/
CodecId getCodecId() const override{ CodecId getCodecId() const override{
return CodecInvalid; return CodecInvalid;
} }
...@@ -282,6 +271,7 @@ private: ...@@ -282,6 +271,7 @@ private:
_StrPrinter _printer; _StrPrinter _printer;
}; };
} //namespace mediakit std::pair<Socket::Ptr, Socket::Ptr> makeSockPair(const EventPoller::Ptr &poller, const string &local_ip);
} //namespace mediakit
#endif //RTSP_RTSP_H_ #endif //RTSP_RTSP_H_
...@@ -30,16 +30,13 @@ using namespace toolkit; ...@@ -30,16 +30,13 @@ using namespace toolkit;
#define RTP_GOP_SIZE 512 #define RTP_GOP_SIZE 512
namespace mediakit { namespace mediakit {
typedef VideoPacketCache<RtpPacket> RtpVideoCache; /**
typedef AudioPacketCache<RtpPacket> RtpAudioCache;
/**
* rtsp媒体源的数据抽象 * rtsp媒体源的数据抽象
* rtsp有关键的两要素,分别是sdp、rtp包 * rtsp有关键的两要素,分别是sdp、rtp包
* 只要生成了这两要素,那么要实现rtsp推流、rtsp服务器就很简单了 * 只要生成了这两要素,那么要实现rtsp推流、rtsp服务器就很简单了
* rtsp推拉流协议中,先传递sdp,然后再协商传输方式(tcp/udp/组播),最后一直传递rtp * rtsp推拉流协议中,先传递sdp,然后再协商传输方式(tcp/udp/组播),最后一直传递rtp
*/ */
class RtspMediaSource : public MediaSource, public RingDelegate<RtpPacket::Ptr>, public RtpVideoCache, public RtpAudioCache { class RtspMediaSource : public MediaSource, public RingDelegate<RtpPacket::Ptr>, public PacketCache<RtpPacket> {
public: public:
typedef ResourcePool<RtpPacket> PoolType; typedef ResourcePool<RtpPacket> PoolType;
typedef std::shared_ptr<RtspMediaSource> Ptr; typedef std::shared_ptr<RtspMediaSource> Ptr;
...@@ -175,32 +172,19 @@ public: ...@@ -175,32 +172,19 @@ public:
regist(); regist();
} }
} }
PacketCache<RtpPacket>::inputPacket(rtp->type == TrackVideo, rtp, keyPos);
if(rtp->type == TrackVideo){
RtpVideoCache::inputVideo(rtp, keyPos);
}else{
RtpAudioCache::inputAudio(rtp);
}
} }
private: private:
/** /**
* 批量flush时间戳相同的视频rtp包时触发该函数 * 批量flush rtp包时触发该函数
* @param rtp_list 时间戳相同的rtp包列表
* @param key_pos 是否包含关键帧
*/
void onFlushVideo(std::shared_ptr<List<RtpPacket::Ptr> > &rtp_list, bool key_pos) override {
_ring->write(rtp_list, key_pos);
}
/**
* 批量flush一定数量的音频rtp包时触发该函数
* @param rtp_list rtp包列表 * @param rtp_list rtp包列表
* @param key_pos 是否包含关键帧
*/ */
void onFlushAudio(std::shared_ptr<List<RtpPacket::Ptr> > &rtp_list) override{ void onFlush(std::shared_ptr<List<RtpPacket::Ptr> > &rtp_list, bool key_pos) override {
//只有音频的话,就不存在gop缓存的意义 //如果不存在视频,那么就没有存在GOP缓存的意义,所以is_key一直为true确保一直清空GOP缓存
_ring->write(rtp_list, !_have_video); _ring->write(rtp_list, _have_video ? key_pos : true);
} }
/** /**
......
...@@ -10,14 +10,11 @@ ...@@ -10,14 +10,11 @@
#include <set> #include <set>
#include <cmath> #include <cmath>
#include <stdarg.h>
#include <algorithm> #include <algorithm>
#include <iomanip> #include <iomanip>
#include "Common/config.h" #include "Common/config.h"
#include "RtspPlayer.h" #include "RtspPlayer.h"
#include "Util/MD5.h" #include "Util/MD5.h"
#include "Util/mini.h"
#include "Util/util.h" #include "Util/util.h"
#include "Util/base64.h" #include "Util/base64.h"
#include "Network/sockutil.h" #include "Network/sockutil.h"
...@@ -40,28 +37,28 @@ RtspPlayer::~RtspPlayer(void) { ...@@ -40,28 +37,28 @@ RtspPlayer::~RtspPlayer(void) {
} }
void RtspPlayer::teardown(){ void RtspPlayer::teardown(){
if (alive()) { if (alive()) {
sendRtspRequest("TEARDOWN" ,_strContentBase); sendRtspRequest("TEARDOWN" ,_content_base);
shutdown(SockException(Err_shutdown,"teardown")); shutdown(SockException(Err_shutdown,"teardown"));
} }
_rtspMd5Nonce.clear(); _md5_nonce.clear();
_rtspRealm.clear(); _realm.clear();
_aTrackInfo.clear(); _sdp_track.clear();
_strSession.clear(); _session_id.clear();
_strContentBase.clear(); _content_base.clear();
RtpReceiver::clear(); RtpReceiver::clear();
CLEAR_ARR(_apRtpSock); CLEAR_ARR(_rtp_sock);
CLEAR_ARR(_apRtcpSock); CLEAR_ARR(_rtcp_sock);
CLEAR_ARR(_aui16FirstSeq) CLEAR_ARR(_rtp_seq_start)
CLEAR_ARR(_aui64RtpRecv) CLEAR_ARR(_rtp_recv_count)
CLEAR_ARR(_aui64RtpRecv) CLEAR_ARR(_rtp_recv_count)
CLEAR_ARR(_aui16NowSeq) CLEAR_ARR(_rtp_seq_now)
_pPlayTimer.reset(); _play_check_timer.reset();
_pRtpTimer.reset(); _rtp_check_timer.reset();
_uiCseq = 1; _cseq_send = 1;
_onHandshake = nullptr; _on_response = nullptr;
} }
void RtspPlayer::play(const string &strUrl){ void RtspPlayer::play(const string &strUrl){
...@@ -81,20 +78,20 @@ void RtspPlayer::play(const string &strUrl){ ...@@ -81,20 +78,20 @@ void RtspPlayer::play(const string &strUrl){
(*this)[kRtspPwdIsMD5] = false; (*this)[kRtspPwdIsMD5] = false;
} }
_strUrl = url._url; _play_url = url._url;
_eType = (Rtsp::eRtpType)(int)(*this)[kRtpType]; _rtp_type = (Rtsp::eRtpType)(int)(*this)[kRtpType];
DebugL << url._url << " " << (url._user.size() ? url._user : "null") << " " << (url._passwd.size() ? url._passwd : "null") << " " << _eType; DebugL << url._url << " " << (url._user.size() ? url._user : "null") << " " << (url._passwd.size() ? url._passwd : "null") << " " << _rtp_type;
weak_ptr<RtspPlayer> weakSelf = dynamic_pointer_cast<RtspPlayer>(shared_from_this()); weak_ptr<RtspPlayer> weakSelf = dynamic_pointer_cast<RtspPlayer>(shared_from_this());
float playTimeOutSec = (*this)[kTimeoutMS].as<int>() / 1000.0; float playTimeOutSec = (*this)[kTimeoutMS].as<int>() / 1000.0;
_pPlayTimer.reset( new Timer(playTimeOutSec, [weakSelf]() { _play_check_timer.reset(new Timer(playTimeOutSec, [weakSelf]() {
auto strongSelf=weakSelf.lock(); auto strongSelf=weakSelf.lock();
if(!strongSelf) { if(!strongSelf) {
return false; return false;
} }
strongSelf->onPlayResult_l(SockException(Err_timeout,"play rtsp timeout"),false); strongSelf->onPlayResult_l(SockException(Err_timeout,"play rtsp timeout"),false);
return false; return false;
},getPoller())); }, getPoller()));
if(!(*this)[kNetAdapter].empty()){ if(!(*this)[kNetAdapter].empty()){
setNetAdapter((*this)[kNetAdapter]); setNetAdapter((*this)[kNetAdapter]);
...@@ -107,14 +104,13 @@ void RtspPlayer::onConnect(const SockException &err){ ...@@ -107,14 +104,13 @@ void RtspPlayer::onConnect(const SockException &err){
onPlayResult_l(err,false); onPlayResult_l(err,false);
return; return;
} }
sendOptions();
sendDescribe();
} }
void RtspPlayer::onRecv(const Buffer::Ptr& pBuf) { void RtspPlayer::onRecv(const Buffer::Ptr& pBuf) {
if(_benchmark_mode && !_pPlayTimer){ if(_benchmark_mode && !_play_check_timer){
//在性能测试模式下,如果rtsp握手完毕后,不再解析rtp包 //在性能测试模式下,如果rtsp握手完毕后,不再解析rtp包
_rtpTicker.resetTime(); _rtp_recv_ticker.resetTime();
return; return;
} }
input(pBuf->data(),pBuf->size()); input(pBuf->data(),pBuf->size());
...@@ -122,12 +118,12 @@ void RtspPlayer::onRecv(const Buffer::Ptr& pBuf) { ...@@ -122,12 +118,12 @@ void RtspPlayer::onRecv(const Buffer::Ptr& pBuf) {
void RtspPlayer::onErr(const SockException &ex) { void RtspPlayer::onErr(const SockException &ex) {
//定时器_pPlayTimer为空后表明握手结束了 //定时器_pPlayTimer为空后表明握手结束了
onPlayResult_l(ex,!_pPlayTimer); onPlayResult_l(ex,!_play_check_timer);
} }
// from live555 // from live555
bool RtspPlayer::handleAuthenticationFailure(const string &paramsStr) { bool RtspPlayer::handleAuthenticationFailure(const string &paramsStr) {
if(!_rtspRealm.empty()){ if(!_realm.empty()){
//已经认证过了 //已经认证过了
return false; return false;
} }
...@@ -142,28 +138,28 @@ bool RtspPlayer::handleAuthenticationFailure(const string &paramsStr) { ...@@ -142,28 +138,28 @@ bool RtspPlayer::handleAuthenticationFailure(const string &paramsStr) {
}); });
if (sscanf(paramsStr.data(), "Digest realm=\"%[^\"]\", nonce=\"%[^\"]\", stale=%[a-zA-Z]", realm, nonce, stale) == 3) { if (sscanf(paramsStr.data(), "Digest realm=\"%[^\"]\", nonce=\"%[^\"]\", stale=%[a-zA-Z]", realm, nonce, stale) == 3) {
_rtspRealm = (const char *)realm; _realm = (const char *)realm;
_rtspMd5Nonce = (const char *)nonce; _md5_nonce = (const char *)nonce;
return true; return true;
} }
if (sscanf(paramsStr.data(), "Digest realm=\"%[^\"]\", nonce=\"%[^\"]\"", realm, nonce) == 2) { if (sscanf(paramsStr.data(), "Digest realm=\"%[^\"]\", nonce=\"%[^\"]\"", realm, nonce) == 2) {
_rtspRealm = (const char *)realm; _realm = (const char *)realm;
_rtspMd5Nonce = (const char *)nonce; _md5_nonce = (const char *)nonce;
return true; return true;
} }
if (sscanf(paramsStr.data(), "Basic realm=\"%[^\"]\"", realm) == 1) { if (sscanf(paramsStr.data(), "Basic realm=\"%[^\"]\"", realm) == 1) {
_rtspRealm = (const char *)realm; _realm = (const char *)realm;
return true; return true;
} }
return false; return false;
} }
void RtspPlayer::handleResDESCRIBE(const Parser& parser) { bool RtspPlayer::handleResponse(const string &cmd, const Parser &parser){
string authInfo = parser["WWW-Authenticate"]; string authInfo = parser["WWW-Authenticate"];
//发送DESCRIBE命令后的回复 //发送DESCRIBE命令后的回复
if ((parser.Url() == "401") && handleAuthenticationFailure(authInfo)) { if ((parser.Url() == "401") && handleAuthenticationFailure(authInfo)) {
sendDescribe(); sendOptions();
return; return false;
} }
if(parser.Url() == "302" || parser.Url() == "301"){ if(parser.Url() == "302" || parser.Url() == "301"){
auto newUrl = parser["Location"]; auto newUrl = parser["Location"];
...@@ -171,36 +167,36 @@ void RtspPlayer::handleResDESCRIBE(const Parser& parser) { ...@@ -171,36 +167,36 @@ void RtspPlayer::handleResDESCRIBE(const Parser& parser) {
throw std::runtime_error("未找到Location字段(跳转url)"); throw std::runtime_error("未找到Location字段(跳转url)");
} }
play(newUrl); play(newUrl);
return; return false;
} }
if (parser.Url() != "200") { if (parser.Url() != "200") {
throw std::runtime_error( throw std::runtime_error(StrPrinter << cmd << ":" << parser.Url() << " " << parser.Tail() << endl);
StrPrinter << "DESCRIBE:" << parser.Url() << " " << parser.Tail() << endl);
} }
_strContentBase = parser["Content-Base"]; return true;
}
if(_strContentBase.empty()){ void RtspPlayer::handleResDESCRIBE(const Parser& parser) {
_strContentBase = _strUrl; if (!handleResponse("DESCRIBE", parser)) {
return;
} }
if (_strContentBase.back() == '/') { _content_base = parser["Content-Base"];
_strContentBase.pop_back(); if(_content_base.empty()){
_content_base = _play_url;
}
if (_content_base.back() == '/') {
_content_base.pop_back();
} }
SdpParser sdpParser(parser.Content()); SdpParser sdpParser(parser.Content());
//解析sdp //解析sdp
_aTrackInfo = sdpParser.getAvailableTrack(); _sdp_track = sdpParser.getAvailableTrack();
auto title = sdpParser.getTrack(TrackTitle); auto title = sdpParser.getTrack(TrackTitle);
_is_play_back = false; bool is_play_back = false;
if(title && title->_duration ){ if(title && title->_duration ){
_is_play_back = true; is_play_back = true;
}
for(auto &stamp : _stamp){
stamp.setPlayBack(_is_play_back);
stamp.setRelativeStamp(0);
} }
if (_aTrackInfo.empty()) { if (_sdp_track.empty()) {
throw std::runtime_error("无有效的Sdp Track"); throw std::runtime_error("无有效的Sdp Track");
} }
if (!onCheckSDP(sdpParser.toString())) { if (!onCheckSDP(sdpParser.toString())) {
...@@ -212,40 +208,21 @@ void RtspPlayer::handleResDESCRIBE(const Parser& parser) { ...@@ -212,40 +208,21 @@ void RtspPlayer::handleResDESCRIBE(const Parser& parser) {
//有必要的情况下创建udp端口 //有必要的情况下创建udp端口
void RtspPlayer::createUdpSockIfNecessary(int track_idx){ void RtspPlayer::createUdpSockIfNecessary(int track_idx){
auto &rtpSockRef = _apRtpSock[track_idx]; auto &rtpSockRef = _rtp_sock[track_idx];
auto &rtcpSockRef = _apRtcpSock[track_idx]; auto &rtcpSockRef = _rtcp_sock[track_idx];
if(!rtpSockRef){ if (!rtpSockRef || !rtcpSockRef) {
rtpSockRef.reset(new Socket(getPoller())); auto pr = makeSockPair(getPoller(), get_local_ip());
//rtp随机端口 rtpSockRef = pr.first;
if (!rtpSockRef->bindUdpSock(0, get_local_ip().data())) { rtcpSockRef = pr.second;
rtpSockRef.reset();
throw std::runtime_error("open rtp sock failed");
}
}
if(!rtcpSockRef){
rtcpSockRef.reset(new Socket(getPoller()));
//rtcp端口为rtp端口+1,目的是为了兼容某些服务器,其实更推荐随机端口
if (!rtcpSockRef->bindUdpSock(rtpSockRef->get_local_port() + 1, get_local_ip().data())) {
rtcpSockRef.reset();
throw std::runtime_error("open rtcp sock failed");
}
}
if(rtpSockRef->get_local_port() % 2 != 0){
//如果rtp端口不是偶数,那么与rtcp端口互换,目的是兼容一些要求严格的服务器
Socket::Ptr tmp = rtpSockRef;
rtpSockRef = rtcpSockRef;
rtcpSockRef = tmp;
} }
} }
//发送SETUP命令 //发送SETUP命令
void RtspPlayer::sendSetup(unsigned int trackIndex) { void RtspPlayer::sendSetup(unsigned int trackIndex) {
_onHandshake = std::bind(&RtspPlayer::handleResSETUP,this, placeholders::_1,trackIndex); _on_response = std::bind(&RtspPlayer::handleResSETUP, this, placeholders::_1, trackIndex);
auto &track = _aTrackInfo[trackIndex]; auto &track = _sdp_track[trackIndex];
auto baseUrl = _strContentBase + "/" + track->_control_surffix; auto baseUrl = _content_base + "/" + track->_control_surffix;
switch (_eType) { switch (_rtp_type) {
case Rtsp::RTP_TCP: { case Rtsp::RTP_TCP: {
sendRtspRequest("SETUP",baseUrl,{"Transport",StrPrinter << "RTP/AVP/TCP;unicast;interleaved=" << track->_type * 2 << "-" << track->_type * 2 + 1}); sendRtspRequest("SETUP",baseUrl,{"Transport",StrPrinter << "RTP/AVP/TCP;unicast;interleaved=" << track->_type * 2 << "-" << track->_type * 2 + 1});
} }
...@@ -256,10 +233,10 @@ void RtspPlayer::sendSetup(unsigned int trackIndex) { ...@@ -256,10 +233,10 @@ void RtspPlayer::sendSetup(unsigned int trackIndex) {
break; break;
case Rtsp::RTP_UDP: { case Rtsp::RTP_UDP: {
createUdpSockIfNecessary(trackIndex); createUdpSockIfNecessary(trackIndex);
sendRtspRequest("SETUP",baseUrl,{"Transport", sendRtspRequest("SETUP", baseUrl, {"Transport",
StrPrinter << "RTP/AVP;unicast;client_port=" StrPrinter << "RTP/AVP;unicast;client_port="
<< _apRtpSock[trackIndex]->get_local_port() << "-" << _rtp_sock[trackIndex]->get_local_port() << "-"
<< _apRtcpSock[trackIndex]->get_local_port()}); << _rtcp_sock[trackIndex]->get_local_port()});
} }
break; break;
default: default:
...@@ -273,34 +250,34 @@ void RtspPlayer::handleResSETUP(const Parser &parser, unsigned int uiTrackIndex) ...@@ -273,34 +250,34 @@ void RtspPlayer::handleResSETUP(const Parser &parser, unsigned int uiTrackIndex)
StrPrinter << "SETUP:" << parser.Url() << " " << parser.Tail() << endl); StrPrinter << "SETUP:" << parser.Url() << " " << parser.Tail() << endl);
} }
if (uiTrackIndex == 0) { if (uiTrackIndex == 0) {
_strSession = parser["Session"]; _session_id = parser["Session"];
_strSession.append(";"); _session_id.append(";");
_strSession = FindField(_strSession.data(), nullptr, ";"); _session_id = FindField(_session_id.data(), nullptr, ";");
} }
auto strTransport = parser["Transport"]; auto strTransport = parser["Transport"];
if(strTransport.find("TCP") != string::npos || strTransport.find("interleaved") != string::npos){ if(strTransport.find("TCP") != string::npos || strTransport.find("interleaved") != string::npos){
_eType = Rtsp::RTP_TCP; _rtp_type = Rtsp::RTP_TCP;
}else if(strTransport.find("multicast") != string::npos){ }else if(strTransport.find("multicast") != string::npos){
_eType = Rtsp::RTP_MULTICAST; _rtp_type = Rtsp::RTP_MULTICAST;
}else{ }else{
_eType = Rtsp::RTP_UDP; _rtp_type = Rtsp::RTP_UDP;
} }
RtspSplitter::enableRecvRtp(_eType == Rtsp::RTP_TCP); RtspSplitter::enableRecvRtp(_rtp_type == Rtsp::RTP_TCP);
if(_eType == Rtsp::RTP_TCP) { if(_rtp_type == Rtsp::RTP_TCP) {
string interleaved = FindField( FindField((strTransport + ";").data(), "interleaved=", ";").data(), NULL, "-"); string interleaved = FindField( FindField((strTransport + ";").data(), "interleaved=", ";").data(), NULL, "-");
_aTrackInfo[uiTrackIndex]->_interleaved = atoi(interleaved.data()); _sdp_track[uiTrackIndex]->_interleaved = atoi(interleaved.data());
}else{ }else{
const char *strPos = (_eType == Rtsp::RTP_MULTICAST ? "port=" : "server_port=") ; const char *strPos = (_rtp_type == Rtsp::RTP_MULTICAST ? "port=" : "server_port=") ;
auto port_str = FindField((strTransport + ";").data(), strPos, ";"); auto port_str = FindField((strTransport + ";").data(), strPos, ";");
uint16_t rtp_port = atoi(FindField(port_str.data(), NULL, "-").data()); uint16_t rtp_port = atoi(FindField(port_str.data(), NULL, "-").data());
uint16_t rtcp_port = atoi(FindField(port_str.data(), "-",NULL).data()); uint16_t rtcp_port = atoi(FindField(port_str.data(), "-",NULL).data());
auto &pRtpSockRef = _apRtpSock[uiTrackIndex]; auto &pRtpSockRef = _rtp_sock[uiTrackIndex];
auto &pRtcpSockRef = _apRtcpSock[uiTrackIndex]; auto &pRtcpSockRef = _rtcp_sock[uiTrackIndex];
if (_eType == Rtsp::RTP_MULTICAST) { if (_rtp_type == Rtsp::RTP_MULTICAST) {
//udp组播 //udp组播
auto multiAddr = FindField((strTransport + ";").data(), "destination=", ";"); auto multiAddr = FindField((strTransport + ";").data(), "destination=", ";");
pRtpSockRef.reset(new Socket(getPoller())); pRtpSockRef.reset(new Socket(getPoller()));
...@@ -342,7 +319,7 @@ void RtspPlayer::handleResSETUP(const Parser &parser, unsigned int uiTrackIndex) ...@@ -342,7 +319,7 @@ void RtspPlayer::handleResSETUP(const Parser &parser, unsigned int uiTrackIndex)
WarnL << "收到其他地址的rtp数据:" << SockUtil::inet_ntoa(((struct sockaddr_in *) addr)->sin_addr); WarnL << "收到其他地址的rtp数据:" << SockUtil::inet_ntoa(((struct sockaddr_in *) addr)->sin_addr);
return; return;
} }
strongSelf->handleOneRtp(uiTrackIndex, strongSelf->_aTrackInfo[uiTrackIndex], (unsigned char *) buf->data(), buf->size()); strongSelf->handleOneRtp(uiTrackIndex, strongSelf->_sdp_track[uiTrackIndex], (unsigned char *) buf->data(), buf->size());
}); });
if(pRtcpSockRef) { if(pRtcpSockRef) {
...@@ -356,12 +333,12 @@ void RtspPlayer::handleResSETUP(const Parser &parser, unsigned int uiTrackIndex) ...@@ -356,12 +333,12 @@ void RtspPlayer::handleResSETUP(const Parser &parser, unsigned int uiTrackIndex)
WarnL << "收到其他地址的rtcp数据:" << SockUtil::inet_ntoa(((struct sockaddr_in *) addr)->sin_addr); WarnL << "收到其他地址的rtcp数据:" << SockUtil::inet_ntoa(((struct sockaddr_in *) addr)->sin_addr);
return; return;
} }
strongSelf->onRtcpPacket(uiTrackIndex, strongSelf->_aTrackInfo[uiTrackIndex], (unsigned char *) buf->data(), buf->size()); strongSelf->onRtcpPacket(uiTrackIndex, strongSelf->_sdp_track[uiTrackIndex], (unsigned char *) buf->data(), buf->size());
}); });
} }
} }
if (uiTrackIndex < _aTrackInfo.size() - 1) { if (uiTrackIndex < _sdp_track.size() - 1) {
//需要继续发送SETUP命令 //需要继续发送SETUP命令
sendSetup(uiTrackIndex + 1); sendSetup(uiTrackIndex + 1);
return; return;
...@@ -373,26 +350,55 @@ void RtspPlayer::handleResSETUP(const Parser &parser, unsigned int uiTrackIndex) ...@@ -373,26 +350,55 @@ void RtspPlayer::handleResSETUP(const Parser &parser, unsigned int uiTrackIndex)
void RtspPlayer::sendDescribe() { void RtspPlayer::sendDescribe() {
//发送DESCRIBE命令后处理函数:handleResDESCRIBE //发送DESCRIBE命令后处理函数:handleResDESCRIBE
_onHandshake = std::bind(&RtspPlayer::handleResDESCRIBE,this, placeholders::_1); _on_response = std::bind(&RtspPlayer::handleResDESCRIBE, this, placeholders::_1);
sendRtspRequest("DESCRIBE",_strUrl,{"Accept","application/sdp"}); sendRtspRequest("DESCRIBE", _play_url, {"Accept", "application/sdp"});
}
void RtspPlayer::sendOptions(){
_on_response = [this](const Parser& parser){
if (!handleResponse("OPTIONS", parser)) {
return;
}
//获取服务器支持的命令
_supported_cmd.clear();
auto public_val = split(parser["Public"],",");
for(auto &cmd : public_val){
trim(cmd);
_supported_cmd.emplace(cmd);
}
//发送Describe请求,获取sdp
sendDescribe();
};
sendRtspRequest("OPTIONS", _play_url);
}
void RtspPlayer::sendKeepAlive(){
_on_response = [this](const Parser& parser){};
if(_supported_cmd.find("GET_PARAMETER") != _supported_cmd.end()){
//支持GET_PARAMETER,用此命令保活
sendRtspRequest("GET_PARAMETER", _play_url);
}else{
//不支持GET_PARAMETER,用OPTIONS命令保活
sendRtspRequest("OPTIONS", _play_url);
}
} }
void RtspPlayer::sendPause(int type , uint32_t seekMS){ void RtspPlayer::sendPause(int type , uint32_t seekMS){
_onHandshake = std::bind(&RtspPlayer::handleResPAUSE,this, placeholders::_1,type); _on_response = std::bind(&RtspPlayer::handleResPAUSE, this, placeholders::_1, type);
//开启或暂停rtsp //开启或暂停rtsp
switch (type){ switch (type){
case type_pause: case type_pause:
sendRtspRequest("PAUSE", _strContentBase); sendRtspRequest("PAUSE", _content_base);
break; break;
case type_play: case type_play:
sendRtspRequest("PLAY", _strContentBase); sendRtspRequest("PLAY", _content_base);
break; break;
case type_seek: case type_seek:
sendRtspRequest("PLAY", _strContentBase, {"Range",StrPrinter << "npt=" << setiosflags(ios::fixed) << setprecision(2) << seekMS / 1000.0 << "-"}); sendRtspRequest("PLAY", _content_base, {"Range",StrPrinter << "npt=" << setiosflags(ios::fixed) << setprecision(2) << seekMS / 1000.0 << "-"});
break; break;
default: default:
WarnL << "unknown type : " << type; WarnL << "unknown type : " << type;
_onHandshake = nullptr; _on_response = nullptr;
break; break;
} }
} }
...@@ -419,7 +425,7 @@ void RtspPlayer::handleResPAUSE(const Parser& parser,int type) { ...@@ -419,7 +425,7 @@ void RtspPlayer::handleResPAUSE(const Parser& parser,int type) {
if (type == type_pause) { if (type == type_pause) {
//暂停成功! //暂停成功!
_pRtpTimer.reset(); _rtp_check_timer.reset();
return; return;
} }
...@@ -436,22 +442,20 @@ void RtspPlayer::handleResPAUSE(const Parser& parser,int type) { ...@@ -436,22 +442,20 @@ void RtspPlayer::handleResPAUSE(const Parser& parser,int type) {
DebugL << "seekTo(ms):" << iSeekTo; DebugL << "seekTo(ms):" << iSeekTo;
} }
//设置相对时间戳 //设置相对时间戳
_stamp[0].setRelativeStamp(iSeekTo);
_stamp[1].setRelativeStamp(iSeekTo);
onPlayResult_l(SockException(Err_success, type == type_seek ? "resum rtsp success" : "rtsp play success"), type == type_seek); onPlayResult_l(SockException(Err_success, type == type_seek ? "resum rtsp success" : "rtsp play success"), type == type_seek);
} }
void RtspPlayer::onWholeRtspPacket(Parser &parser) { void RtspPlayer::onWholeRtspPacket(Parser &parser) {
try { try {
decltype(_onHandshake) fun; decltype(_on_response) func;
_onHandshake.swap(fun); _on_response.swap(func);
if(fun){ if(func){
fun(parser); func(parser);
} }
parser.Clear(); parser.Clear();
} catch (std::exception &err) { } catch (std::exception &err) {
//定时器_pPlayTimer为空后表明握手结束了 //定时器_pPlayTimer为空后表明握手结束了
onPlayResult_l(SockException(Err_other, err.what()),!_pPlayTimer); onPlayResult_l(SockException(Err_other, err.what()),!_play_check_timer);
} }
} }
...@@ -461,12 +465,12 @@ void RtspPlayer::onRtpPacket(const char *data, uint64_t len) { ...@@ -461,12 +465,12 @@ void RtspPlayer::onRtpPacket(const char *data, uint64_t len) {
if(interleaved %2 == 0){ if(interleaved %2 == 0){
trackIdx = getTrackIndexByInterleaved(interleaved); trackIdx = getTrackIndexByInterleaved(interleaved);
if (trackIdx != -1) { if (trackIdx != -1) {
handleOneRtp(trackIdx,_aTrackInfo[trackIdx],(unsigned char *)data + 4, len - 4); handleOneRtp(trackIdx, _sdp_track[trackIdx], (unsigned char *)data + 4, len - 4);
} }
}else{ }else{
trackIdx = getTrackIndexByInterleaved(interleaved - 1); trackIdx = getTrackIndexByInterleaved(interleaved - 1);
if (trackIdx != -1) { if (trackIdx != -1) {
onRtcpPacket(trackIdx, _aTrackInfo[trackIdx], (unsigned char *) data + 4, len - 4); onRtcpPacket(trackIdx, _sdp_track[trackIdx], (unsigned char *) data + 4, len - 4);
} }
} }
} }
...@@ -536,8 +540,8 @@ void RtspPlayer::sendReceiverReport(bool overTcp,int iTrackIndex){ ...@@ -536,8 +540,8 @@ void RtspPlayer::sendReceiverReport(bool overTcp,int iTrackIndex){
static const char s_cname[] = "ZLMediaKitRtsp"; static const char s_cname[] = "ZLMediaKitRtsp";
uint8_t aui8Rtcp[4 + 32 + 10 + sizeof(s_cname) + 1] = {0}; uint8_t aui8Rtcp[4 + 32 + 10 + sizeof(s_cname) + 1] = {0};
uint8_t *pui8Rtcp_RR = aui8Rtcp + 4, *pui8Rtcp_SDES = pui8Rtcp_RR + 32; uint8_t *pui8Rtcp_RR = aui8Rtcp + 4, *pui8Rtcp_SDES = pui8Rtcp_RR + 32;
auto &track = _aTrackInfo[iTrackIndex]; auto &track = _sdp_track[iTrackIndex];
auto &counter = _aRtcpCnt[iTrackIndex]; auto &counter = _rtcp_counter[iTrackIndex];
aui8Rtcp[0] = '$'; aui8Rtcp[0] = '$';
aui8Rtcp[1] = track->_interleaved + 1; aui8Rtcp[1] = track->_interleaved + 1;
...@@ -593,25 +597,22 @@ void RtspPlayer::sendReceiverReport(bool overTcp,int iTrackIndex){ ...@@ -593,25 +597,22 @@ void RtspPlayer::sendReceiverReport(bool overTcp,int iTrackIndex){
if(overTcp){ if(overTcp){
send(obtainBuffer((char *) aui8Rtcp, sizeof(aui8Rtcp))); send(obtainBuffer((char *) aui8Rtcp, sizeof(aui8Rtcp)));
}else if(_apRtcpSock[iTrackIndex]) { }else if(_rtcp_sock[iTrackIndex]) {
_apRtcpSock[iTrackIndex]->send((char *) aui8Rtcp + 4, sizeof(aui8Rtcp) - 4); _rtcp_sock[iTrackIndex]->send((char *) aui8Rtcp + 4, sizeof(aui8Rtcp) - 4);
} }
} }
void RtspPlayer::onRtpSorted(const RtpPacket::Ptr &rtppt, int trackidx){ void RtspPlayer::onRtpSorted(const RtpPacket::Ptr &rtppt, int trackidx){
//统计丢包率 //统计丢包率
if (_aui16FirstSeq[trackidx] == 0 || rtppt->sequence < _aui16FirstSeq[trackidx]) { if (_rtp_seq_start[trackidx] == 0 || rtppt->sequence < _rtp_seq_start[trackidx]) {
_aui16FirstSeq[trackidx] = rtppt->sequence; _rtp_seq_start[trackidx] = rtppt->sequence;
_aui64RtpRecv[trackidx] = 0; _rtp_recv_count[trackidx] = 0;
} }
_aui64RtpRecv[trackidx] ++; _rtp_recv_count[trackidx] ++;
_aui16NowSeq[trackidx] = rtppt->sequence; _rtp_seq_now[trackidx] = rtppt->sequence;
_stamp[trackidx] = rtppt->timeStamp;
//计算相对时间戳 //计算相对时间戳
int64_t dts_out; onRecvRTP_l(rtppt, _sdp_track[trackidx]);
_stamp[trackidx].revise(rtppt->timeStamp,rtppt->timeStamp,dts_out,dts_out);
rtppt->timeStamp = dts_out;
onRecvRTP_l(rtppt,_aTrackInfo[trackidx]);
} }
float RtspPlayer::getPacketLossRate(TrackType type) const{ float RtspPlayer::getPacketLossRate(TrackType type) const{
...@@ -619,9 +620,9 @@ float RtspPlayer::getPacketLossRate(TrackType type) const{ ...@@ -619,9 +620,9 @@ float RtspPlayer::getPacketLossRate(TrackType type) const{
if(iTrackIdx == -1){ if(iTrackIdx == -1){
uint64_t totalRecv = 0; uint64_t totalRecv = 0;
uint64_t totalSend = 0; uint64_t totalSend = 0;
for (unsigned int i = 0; i < _aTrackInfo.size(); i++) { for (unsigned int i = 0; i < _sdp_track.size(); i++) {
totalRecv += _aui64RtpRecv[i]; totalRecv += _rtp_recv_count[i];
totalSend += (_aui16NowSeq[i] - _aui16FirstSeq[i] + 1); totalSend += (_rtp_seq_now[i] - _rtp_seq_start[i] + 1);
} }
if(totalSend == 0){ if(totalSend == 0){
return 0; return 0;
...@@ -629,14 +630,14 @@ float RtspPlayer::getPacketLossRate(TrackType type) const{ ...@@ -629,14 +630,14 @@ float RtspPlayer::getPacketLossRate(TrackType type) const{
return 1.0 - (double)totalRecv / totalSend; return 1.0 - (double)totalRecv / totalSend;
} }
if(_aui16NowSeq[iTrackIdx] - _aui16FirstSeq[iTrackIdx] + 1 == 0){ if(_rtp_seq_now[iTrackIdx] - _rtp_seq_start[iTrackIdx] + 1 == 0){
return 0; return 0;
} }
return 1.0 - (double)_aui64RtpRecv[iTrackIdx] / (_aui16NowSeq[iTrackIdx] - _aui16FirstSeq[iTrackIdx] + 1); return 1.0 - (double)_rtp_recv_count[iTrackIdx] / (_rtp_seq_now[iTrackIdx] - _rtp_seq_start[iTrackIdx] + 1);
} }
uint32_t RtspPlayer::getProgressMilliSecond() const{ uint32_t RtspPlayer::getProgressMilliSecond() const{
return MAX(_stamp[0].getRelativeStamp(),_stamp[1].getRelativeStamp()); return MAX(_stamp[0],_stamp[1]);
} }
void RtspPlayer::seekToMilliSecond(uint32_t ms) { void RtspPlayer::seekToMilliSecond(uint32_t ms) {
...@@ -659,15 +660,15 @@ void RtspPlayer::sendRtspRequest(const string &cmd, const string &url, const std ...@@ -659,15 +660,15 @@ void RtspPlayer::sendRtspRequest(const string &cmd, const string &url, const std
void RtspPlayer::sendRtspRequest(const string &cmd, const string &url,const StrCaseMap &header_const) { void RtspPlayer::sendRtspRequest(const string &cmd, const string &url,const StrCaseMap &header_const) {
auto header = header_const; auto header = header_const;
header.emplace("CSeq",StrPrinter << _uiCseq++); header.emplace("CSeq",StrPrinter << _cseq_send++);
header.emplace("User-Agent",SERVER_NAME); header.emplace("User-Agent",SERVER_NAME);
if(!_strSession.empty()){ if(!_session_id.empty()){
header.emplace("Session",_strSession); header.emplace("Session", _session_id);
} }
if(!_rtspRealm.empty() && !(*this)[kRtspUser].empty()){ if(!_realm.empty() && !(*this)[kRtspUser].empty()){
if(!_rtspMd5Nonce.empty()){ if(!_md5_nonce.empty()){
//MD5认证 //MD5认证
/* /*
response计算方法如下: response计算方法如下:
...@@ -679,14 +680,14 @@ void RtspPlayer::sendRtspRequest(const string &cmd, const string &url,const StrC ...@@ -679,14 +680,14 @@ void RtspPlayer::sendRtspRequest(const string &cmd, const string &url,const StrC
*/ */
string encrypted_pwd = (*this)[kRtspPwd]; string encrypted_pwd = (*this)[kRtspPwd];
if(!(*this)[kRtspPwdIsMD5].as<bool>()){ if(!(*this)[kRtspPwdIsMD5].as<bool>()){
encrypted_pwd = MD5((*this)[kRtspUser]+ ":" + _rtspRealm + ":" + encrypted_pwd).hexdigest(); encrypted_pwd = MD5((*this)[kRtspUser] + ":" + _realm + ":" + encrypted_pwd).hexdigest();
} }
auto response = MD5( encrypted_pwd + ":" + _rtspMd5Nonce + ":" + MD5(cmd + ":" + url).hexdigest()).hexdigest(); auto response = MD5(encrypted_pwd + ":" + _md5_nonce + ":" + MD5(cmd + ":" + url).hexdigest()).hexdigest();
_StrPrinter printer; _StrPrinter printer;
printer << "Digest "; printer << "Digest ";
printer << "username=\"" << (*this)[kRtspUser] << "\", "; printer << "username=\"" << (*this)[kRtspUser] << "\", ";
printer << "realm=\"" << _rtspRealm << "\", "; printer << "realm=\"" << _realm << "\", ";
printer << "nonce=\"" << _rtspMd5Nonce << "\", "; printer << "nonce=\"" << _md5_nonce << "\", ";
printer << "uri=\"" << url << "\", "; printer << "uri=\"" << url << "\", ";
printer << "response=\"" << response << "\""; printer << "response=\"" << response << "\"";
header.emplace("Authorization",printer); header.emplace("Authorization",printer);
...@@ -708,25 +709,31 @@ void RtspPlayer::sendRtspRequest(const string &cmd, const string &url,const StrC ...@@ -708,25 +709,31 @@ void RtspPlayer::sendRtspRequest(const string &cmd, const string &url,const StrC
} }
void RtspPlayer::onRecvRTP_l(const RtpPacket::Ptr &pkt, const SdpTrack::Ptr &track) { void RtspPlayer::onRecvRTP_l(const RtpPacket::Ptr &pkt, const SdpTrack::Ptr &track) {
_rtpTicker.resetTime(); _rtp_recv_ticker.resetTime();
onRecvRTP(pkt,track); onRecvRTP(pkt, track);
int iTrackIndex = getTrackIndexByInterleaved(pkt->interleaved); int iTrackIndex = getTrackIndexByInterleaved(pkt->interleaved);
if(iTrackIndex == -1){ if (iTrackIndex == -1) {
return; return;
} }
RtcpCounter &counter = _aRtcpCnt[iTrackIndex]; RtcpCounter &counter = _rtcp_counter[iTrackIndex];
counter.pktCnt = pkt->sequence; counter.pktCnt = pkt->sequence;
auto &ticker = _aRtcpTicker[iTrackIndex]; auto &ticker = _rtcp_send_ticker[iTrackIndex];
if (ticker.elapsedTime() > 5 * 1000) { if (ticker.elapsedTime() > 5 * 1000) {
//send rtcp every 5 second //send rtcp every 5 second
counter.lastTimeStamp = counter.timeStamp; counter.lastTimeStamp = counter.timeStamp;
//直接保存网络字节序 //直接保存网络字节序
memcpy(&counter.timeStamp, pkt->data() + 8 , 4); memcpy(&counter.timeStamp, pkt->data() + 8, 4);
if(counter.lastTimeStamp != 0){ if (counter.lastTimeStamp != 0) {
sendReceiverReport(_eType == Rtsp::RTP_TCP,iTrackIndex); sendReceiverReport(_rtp_type == Rtsp::RTP_TCP, iTrackIndex);
ticker.resetTime(); ticker.resetTime();
} }
//有些rtsp服务器需要rtcp保活,有些需要发送信令保活
if (iTrackIndex == 0) {
//只需要发送一次心跳信令包
sendKeepAlive();
}
} }
} }
...@@ -735,27 +742,27 @@ void RtspPlayer::onPlayResult_l(const SockException &ex , bool handshakeComplete ...@@ -735,27 +742,27 @@ void RtspPlayer::onPlayResult_l(const SockException &ex , bool handshakeComplete
if(!ex){ if(!ex){
//播放成功,恢复rtp接收超时定时器 //播放成功,恢复rtp接收超时定时器
_rtpTicker.resetTime(); _rtp_recv_ticker.resetTime();
weak_ptr<RtspPlayer> weakSelf = dynamic_pointer_cast<RtspPlayer>(shared_from_this()); weak_ptr<RtspPlayer> weakSelf = dynamic_pointer_cast<RtspPlayer>(shared_from_this());
int timeoutMS = (*this)[kMediaTimeoutMS].as<int>(); int timeoutMS = (*this)[kMediaTimeoutMS].as<int>();
//创建rtp数据接收超时检测定时器 //创建rtp数据接收超时检测定时器
_pRtpTimer.reset( new Timer(timeoutMS / 2000.0, [weakSelf,timeoutMS]() { _rtp_check_timer.reset(new Timer(timeoutMS / 2000.0, [weakSelf,timeoutMS]() {
auto strongSelf=weakSelf.lock(); auto strongSelf=weakSelf.lock();
if(!strongSelf) { if(!strongSelf) {
return false; return false;
} }
if(strongSelf->_rtpTicker.elapsedTime()> timeoutMS) { if(strongSelf->_rtp_recv_ticker.elapsedTime() > timeoutMS) {
//接收rtp媒体数据包超时 //接收rtp媒体数据包超时
strongSelf->onPlayResult_l(SockException(Err_timeout,"receive rtp timeout"), true); strongSelf->onPlayResult_l(SockException(Err_timeout,"receive rtp timeout"), true);
return false; return false;
} }
return true; return true;
},getPoller())); }, getPoller()));
} }
if (!handshakeCompleted) { if (!handshakeCompleted) {
//开始播放阶段 //开始播放阶段
_pPlayTimer.reset(); _play_check_timer.reset();
onPlayResult(ex); onPlayResult(ex);
//是否为性能测试模式 //是否为性能测试模式
_benchmark_mode = (*this)[Client::kBenchmarkMode].as<int>(); _benchmark_mode = (*this)[Client::kBenchmarkMode].as<int>();
...@@ -772,25 +779,25 @@ void RtspPlayer::onPlayResult_l(const SockException &ex , bool handshakeComplete ...@@ -772,25 +779,25 @@ void RtspPlayer::onPlayResult_l(const SockException &ex , bool handshakeComplete
} }
} }
int RtspPlayer::getTrackIndexByInterleaved(int interleaved) const{ int RtspPlayer::getTrackIndexByInterleaved(int interleaved) const {
for (unsigned int i = 0; i < _aTrackInfo.size(); i++) { for (unsigned int i = 0; i < _sdp_track.size(); i++) {
if (_aTrackInfo[i]->_interleaved == interleaved) { if (_sdp_track[i]->_interleaved == interleaved) {
return i; return i;
} }
} }
if(_aTrackInfo.size() == 1){ if (_sdp_track.size() == 1) {
return 0; return 0;
} }
return -1; return -1;
} }
int RtspPlayer::getTrackIndexByTrackType(TrackType trackType) const { int RtspPlayer::getTrackIndexByTrackType(TrackType trackType) const {
for (unsigned int i = 0; i < _aTrackInfo.size(); i++) { for (unsigned int i = 0; i < _sdp_track.size(); i++) {
if (_aTrackInfo[i]->_type == trackType) { if (_sdp_track[i]->_type == trackType) {
return i; return i;
} }
} }
if(_aTrackInfo.size() == 1){ if (_sdp_track.size() == 1) {
return 0; return 0;
} }
return -1; return -1;
......
...@@ -94,53 +94,57 @@ private: ...@@ -94,53 +94,57 @@ private:
void handleResDESCRIBE(const Parser &parser); void handleResDESCRIBE(const Parser &parser);
bool handleAuthenticationFailure(const string &wwwAuthenticateParamsStr); bool handleAuthenticationFailure(const string &wwwAuthenticateParamsStr);
void handleResPAUSE(const Parser &parser, int type); void handleResPAUSE(const Parser &parser, int type);
bool handleResponse(const string &cmd, const Parser &parser);
//发送SETUP命令 void sendOptions();
void sendSetup(unsigned int uiTrackIndex); void sendSetup(unsigned int uiTrackIndex);
void sendPause(int type , uint32_t ms); void sendPause(int type , uint32_t ms);
void sendDescribe(); void sendDescribe();
void sendKeepAlive();
void sendRtspRequest(const string &cmd, const string &url ,const StrCaseMap &header = StrCaseMap()); void sendRtspRequest(const string &cmd, const string &url ,const StrCaseMap &header = StrCaseMap());
void sendRtspRequest(const string &cmd, const string &url ,const std::initializer_list<string> &header); void sendRtspRequest(const string &cmd, const string &url ,const std::initializer_list<string> &header);
void sendReceiverReport(bool overTcp,int iTrackIndex); void sendReceiverReport(bool overTcp,int iTrackIndex);
void createUdpSockIfNecessary(int track_idx); void createUdpSockIfNecessary(int track_idx);
private: private:
string _strUrl; string _play_url;
vector<SdpTrack::Ptr> _aTrackInfo; vector<SdpTrack::Ptr> _sdp_track;
function<void(const Parser&)> _onHandshake; function<void(const Parser&)> _on_response;
Socket::Ptr _apRtpSock[2]; //RTP端口,trackid idx 为数组下标 //RTP端口,trackid idx 为数组下标
Socket::Ptr _apRtcpSock[2];//RTCP端口,trackid idx 为数组下标 Socket::Ptr _rtp_sock[2];
//RTCP端口,trackid idx 为数组下标
Socket::Ptr _rtcp_sock[2];
//rtsp鉴权相关 //rtsp鉴权相关
string _rtspMd5Nonce; string _md5_nonce;
string _rtspRealm; string _realm;
//rtsp info //rtsp info
string _strSession; string _session_id;
unsigned int _uiCseq = 1; uint32_t _cseq_send = 1;
string _strContentBase; string _content_base;
Rtsp::eRtpType _eType = Rtsp::RTP_TCP; Rtsp::eRtpType _rtp_type = Rtsp::RTP_TCP;
/* 丢包率统计需要用到的参数 */ /* 丢包率统计需要用到的参数 */
uint16_t _aui16FirstSeq[2] = { 0 , 0}; uint16_t _rtp_seq_start[2] = {0, 0};
uint16_t _aui16NowSeq[2] = { 0 , 0 }; uint16_t _rtp_seq_now[2] = {0, 0};
uint64_t _aui64RtpRecv[2] = { 0 , 0}; uint64_t _rtp_recv_count[2] = {0, 0};
//当前rtp时间戳
uint32_t _stamp[2] = {0, 0};
//超时功能实现 //超时功能实现
Ticker _rtpTicker; Ticker _rtp_recv_ticker;
std::shared_ptr<Timer> _pPlayTimer; std::shared_ptr<Timer> _play_check_timer;
std::shared_ptr<Timer> _pRtpTimer; std::shared_ptr<Timer> _rtp_check_timer;
//时间戳
Stamp _stamp[2];
//rtcp相关 //rtcp统计,trackid idx 为数组下标
RtcpCounter _aRtcpCnt[2]; //rtcp统计,trackid idx 为数组下标 RtcpCounter _rtcp_counter[2];
Ticker _aRtcpTicker[2]; //rtcp发送时间,trackid idx 为数组下标 //rtcp发送时间,trackid idx 为数组下标
Ticker _rtcp_send_ticker[2];
//是否为rtsp点播
bool _is_play_back;
//是否为性能测试模式 //是否为性能测试模式
bool _benchmark_mode = false; bool _benchmark_mode = false;
//服务器支持的命令
set<string> _supported_cmd;
}; };
} /* namespace mediakit */ } /* namespace mediakit */
......
...@@ -221,8 +221,7 @@ void RtspSession::handleReq_ANNOUNCE(const Parser &parser) { ...@@ -221,8 +221,7 @@ void RtspSession::handleReq_ANNOUNCE(const Parser &parser) {
auto src = dynamic_pointer_cast<RtmpMediaSource>(MediaSource::find(RTSP_SCHEMA, auto src = dynamic_pointer_cast<RtmpMediaSource>(MediaSource::find(RTSP_SCHEMA,
_mediaInfo._vhost, _mediaInfo._vhost,
_mediaInfo._app, _mediaInfo._app,
_mediaInfo._streamid, _mediaInfo._streamid));
false));
if(src){ if(src){
sendRtspResponse("406 Not Acceptable", {"Content-Type", "text/plain"}, "Already publishing."); sendRtspResponse("406 Not Acceptable", {"Content-Type", "text/plain"}, "Already publishing.");
string err = StrPrinter << "ANNOUNCE:" string err = StrPrinter << "ANNOUNCE:"
...@@ -240,6 +239,12 @@ void RtspSession::handleReq_ANNOUNCE(const Parser &parser) { ...@@ -240,6 +239,12 @@ void RtspSession::handleReq_ANNOUNCE(const Parser &parser) {
_mediaInfo.parse(full_url); _mediaInfo.parse(full_url);
} }
if(_mediaInfo._app.empty() || _mediaInfo._streamid.empty()){
//推流rtsp url必须最少两级(rtsp://host/app/stream_id),不允许莫名其妙的推流url
sendRtspResponse("403 Forbidden", {"Content-Type", "text/plain"}, "rtsp推流url非法,最少确保两级rtsp url");
throw SockException(Err_shutdown,StrPrinter << "rtsp推流url非法:" << full_url);
}
SdpParser sdpParser(parser.Content()); SdpParser sdpParser(parser.Content());
_strSession = makeRandStr(12); _strSession = makeRandStr(12);
_aTrackInfo = sdpParser.getAvailableTrack(); _aTrackInfo = sdpParser.getAvailableTrack();
...@@ -312,39 +317,81 @@ void RtspSession::handleReq_RECORD(const Parser &parser){ ...@@ -312,39 +317,81 @@ void RtspSession::handleReq_RECORD(const Parser &parser){
} }
} }
void RtspSession::handleReq_Describe(const Parser &parser) { void RtspSession::emitOnPlay(){
weak_ptr<RtspSession> weakSelf = dynamic_pointer_cast<RtspSession>(shared_from_this()); weak_ptr<RtspSession> weakSelf = dynamic_pointer_cast<RtspSession>(shared_from_this());
//url鉴权回调
auto onRes = [weakSelf](const string &err) {
auto strongSelf = weakSelf.lock();
if (!strongSelf) {
return;
}
if (!err.empty()) {
//播放url鉴权失败
strongSelf->sendRtspResponse("401 Unauthorized", {"Content-Type", "text/plain"}, err);
strongSelf->shutdown(SockException(Err_shutdown, StrPrinter << "401 Unauthorized:" << err));
return;
}
strongSelf->onAuthSuccess();
};
Broadcast::AuthInvoker invoker = [weakSelf, onRes](const string &err) {
auto strongSelf = weakSelf.lock();
if (!strongSelf) {
return;
}
strongSelf->async([onRes, err, weakSelf]() {
onRes(err);
});
};
//广播通用播放url鉴权事件
auto flag = _emit_on_play ? false : NoticeCenter::Instance().emitEvent(Broadcast::kBroadcastMediaPlayed, _mediaInfo, invoker, static_cast<SockInfo &>(*this));
if (!flag) {
//该事件无人监听,默认不鉴权
onRes("");
}
//已经鉴权过了
_emit_on_play = true;
}
void RtspSession::handleReq_Describe(const Parser &parser) {
//该请求中的认证信息 //该请求中的认证信息
auto authorization = parser["Authorization"]; auto authorization = parser["Authorization"];
onGetRealm invoker = [weakSelf,authorization](const string &realm){ weak_ptr<RtspSession> weakSelf = dynamic_pointer_cast<RtspSession>(shared_from_this());
//rtsp专属鉴权是否开启事件回调
onGetRealm invoker = [weakSelf, authorization](const string &realm) {
auto strongSelf = weakSelf.lock(); auto strongSelf = weakSelf.lock();
if(!strongSelf){ if (!strongSelf) {
//本对象已经销毁 //本对象已经销毁
return; return;
} }
//切换到自己的线程然后执行 //切换到自己的线程然后执行
strongSelf->async([weakSelf,realm,authorization](){ strongSelf->async([weakSelf, realm, authorization]() {
auto strongSelf = weakSelf.lock(); auto strongSelf = weakSelf.lock();
if(!strongSelf){ if (!strongSelf) {
//本对象已经销毁 //本对象已经销毁
return; return;
} }
if(realm.empty()){ if (realm.empty()) {
//无需认证,回复sdp //无需rtsp专属认证, 那么继续url通用鉴权认证(on_play)
strongSelf->onAuthSuccess(); strongSelf->emitOnPlay();
return; return;
} }
//该流需要认证 //该流需要rtsp专属认证,开启rtsp专属认证后,将不再触发url通用鉴权认证(on_play)
strongSelf->onAuthUser(realm,authorization); strongSelf->_rtsp_realm = realm;
strongSelf->onAuthUser(realm, authorization);
}); });
}; };
//广播是否需要认证事件 if(_rtsp_realm.empty()){
if(!NoticeCenter::Instance().emitEvent(Broadcast::kBroadcastOnGetRtspRealm,_mediaInfo,invoker,static_cast<SockInfo &>(*this))){ //广播是否需要rtsp专属认证事件
if (!NoticeCenter::Instance().emitEvent(Broadcast::kBroadcastOnGetRtspRealm, _mediaInfo, invoker, static_cast<SockInfo &>(*this))) {
//无人监听此事件,说明无需认证 //无人监听此事件,说明无需认证
invoker(""); invoker("");
} }
}else{
invoker(_rtsp_realm);
}
} }
void RtspSession::onAuthSuccess() { void RtspSession::onAuthSuccess() {
TraceP(this); TraceP(this);
...@@ -627,21 +674,18 @@ void RtspSession::handleReq_Setup(const Parser &parser) { ...@@ -627,21 +674,18 @@ void RtspSession::handleReq_Setup(const Parser &parser) {
} }
break; break;
case Rtsp::RTP_UDP: { case Rtsp::RTP_UDP: {
//我们用trackIdx区分rtp和rtcp包 std::pair<Socket::Ptr, Socket::Ptr> pr;
auto pSockRtp = std::make_shared<Socket>(_sock->getPoller()); try{
if (!pSockRtp->bindUdpSock(0,get_local_ip().data())) { pr = makeSockPair(_sock->getPoller(), get_local_ip());
}catch(std::exception &ex) {
//分配端口失败 //分配端口失败
send_NotAcceptable(); send_NotAcceptable();
throw SockException(Err_shutdown, "open rtp socket failed"); throw SockException(Err_shutdown, ex.what());
} }
auto pSockRtcp = std::make_shared<Socket>(_sock->getPoller());
if (!pSockRtcp->bindUdpSock(pSockRtp->get_local_port() + 1,get_local_ip().data())) { _apRtpSock[trackIdx] = pr.first;
//分配端口失败 _apRtcpSock[trackIdx] = pr.second;
send_NotAcceptable();
throw SockException(Err_shutdown, "open rtcp socket failed");
}
_apRtpSock[trackIdx] = pSockRtp;
_apRtcpSock[trackIdx] = pSockRtcp;
//设置客户端内网端口信息 //设置客户端内网端口信息
string strClientPort = FindField(parser["Transport"].data(), "client_port=", NULL); string strClientPort = FindField(parser["Transport"].data(), "client_port=", NULL);
uint16_t ui16RtpPort = atoi( FindField(strClientPort.data(), NULL, "-").data()); uint16_t ui16RtpPort = atoi( FindField(strClientPort.data(), NULL, "-").data());
...@@ -653,23 +697,23 @@ void RtspSession::handleReq_Setup(const Parser &parser) { ...@@ -653,23 +697,23 @@ void RtspSession::handleReq_Setup(const Parser &parser) {
peerAddr.sin_port = htons(ui16RtpPort); peerAddr.sin_port = htons(ui16RtpPort);
peerAddr.sin_addr.s_addr = inet_addr(get_peer_ip().data()); peerAddr.sin_addr.s_addr = inet_addr(get_peer_ip().data());
bzero(&(peerAddr.sin_zero), sizeof peerAddr.sin_zero); bzero(&(peerAddr.sin_zero), sizeof peerAddr.sin_zero);
pSockRtp->setSendPeerAddr((struct sockaddr *)(&peerAddr)); pr.first->setSendPeerAddr((struct sockaddr *)(&peerAddr));
//设置rtcp发送目标地址 //设置rtcp发送目标地址
peerAddr.sin_family = AF_INET; peerAddr.sin_family = AF_INET;
peerAddr.sin_port = htons(ui16RtcpPort); peerAddr.sin_port = htons(ui16RtcpPort);
peerAddr.sin_addr.s_addr = inet_addr(get_peer_ip().data()); peerAddr.sin_addr.s_addr = inet_addr(get_peer_ip().data());
bzero(&(peerAddr.sin_zero), sizeof peerAddr.sin_zero); bzero(&(peerAddr.sin_zero), sizeof peerAddr.sin_zero);
pSockRtcp->setSendPeerAddr((struct sockaddr *)(&peerAddr)); pr.second->setSendPeerAddr((struct sockaddr *)(&peerAddr));
//尝试获取客户端nat映射地址 //尝试获取客户端nat映射地址
startListenPeerUdpData(trackIdx); startListenPeerUdpData(trackIdx);
//InfoP(this) << "分配端口:" << srv_port; //InfoP(this) << "分配端口:" << srv_port;
sendRtspResponse("200 OK", sendRtspResponse("200 OK",
{"Transport",StrPrinter << "RTP/AVP/UDP;unicast;" {"Transport", StrPrinter << "RTP/AVP/UDP;unicast;"
<< "client_port=" << strClientPort << ";" << "client_port=" << strClientPort << ";"
<< "server_port=" << pSockRtp->get_local_port() << "-" << pSockRtcp->get_local_port() << ";" << "server_port=" << pr.first->get_local_port() << "-" << pr.second->get_local_port() << ";"
<< "ssrc=" << printSSRC(trackRef->_ssrc) << "ssrc=" << printSSRC(trackRef->_ssrc)
}); });
} }
...@@ -720,18 +764,8 @@ void RtspSession::handleReq_Setup(const Parser &parser) { ...@@ -720,18 +764,8 @@ void RtspSession::handleReq_Setup(const Parser &parser) {
void RtspSession::handleReq_Play(const Parser &parser) { void RtspSession::handleReq_Play(const Parser &parser) {
if (_aTrackInfo.empty() || parser["Session"] != _strSession) { if (_aTrackInfo.empty() || parser["Session"] != _strSession) {
send_SessionNotFound(); send_SessionNotFound();
throw SockException(Err_shutdown,_aTrackInfo.empty() ? "can not find any availabe track when play" : "session not found when play"); throw SockException(Err_shutdown,_aTrackInfo.empty() ? "can not find any available track when play" : "session not found when play");
} }
auto strRange = parser["Range"];
auto onRes = [this,strRange](const string &err){
bool authSuccess = err.empty();
if(!authSuccess){
//第一次play是播放,否则是恢复播放。只对播放鉴权
sendRtspResponse("401 Unauthorized", {"Content-Type", "text/plain"}, err);
shutdown(SockException(Err_shutdown,StrPrinter << "401 Unauthorized:" << err));
return;
}
auto pMediaSrc = _pMediaSrc.lock(); auto pMediaSrc = _pMediaSrc.lock();
if(!pMediaSrc){ if(!pMediaSrc){
send_StreamNotFound(); send_StreamNotFound();
...@@ -742,7 +776,8 @@ void RtspSession::handleReq_Play(const Parser &parser) { ...@@ -742,7 +776,8 @@ void RtspSession::handleReq_Play(const Parser &parser) {
bool useBuf = true; bool useBuf = true;
_enableSendRtp = false; _enableSendRtp = false;
float iStartTime = 0; float iStartTime = 0;
if (strRange.size() && !_bFirstPlay) { auto strRange = parser["Range"];
if (strRange.size()) {
//这个是seek操作 //这个是seek操作
auto strStart = FindField(strRange.data(), "npt=", "-"); auto strStart = FindField(strRange.data(), "npt=", "-");
if (strStart == "now") { if (strStart == "now") {
...@@ -751,17 +786,16 @@ void RtspSession::handleReq_Play(const Parser &parser) { ...@@ -751,17 +786,16 @@ void RtspSession::handleReq_Play(const Parser &parser) {
iStartTime = 1000 * atof(strStart.data()); iStartTime = 1000 * atof(strStart.data());
InfoP(this) << "rtsp seekTo(ms):" << iStartTime; InfoP(this) << "rtsp seekTo(ms):" << iStartTime;
useBuf = !pMediaSrc->seekTo(iStartTime); useBuf = !pMediaSrc->seekTo(iStartTime);
}else if(pMediaSrc->totalReaderCount() == 0){ } else if (pMediaSrc->totalReaderCount() == 0) {
//第一个消费者 //第一个消费者
pMediaSrc->seekTo(0); pMediaSrc->seekTo(0);
} }
_bFirstPlay = false;
_StrPrinter rtp_info; _StrPrinter rtp_info;
for(auto &track : _aTrackInfo){ for (auto &track : _aTrackInfo) {
if (track->_inited == false) { if (track->_inited == false) {
//还有track没有setup //还有track没有setup
shutdown(SockException(Err_shutdown,"track not setuped")); shutdown(SockException(Err_shutdown, "track not setuped"));
return; return;
} }
track->_ssrc = pMediaSrc->getSsrc(track->_type); track->_ssrc = pMediaSrc->getSsrc(track->_type);
...@@ -770,11 +804,10 @@ void RtspSession::handleReq_Play(const Parser &parser) { ...@@ -770,11 +804,10 @@ void RtspSession::handleReq_Play(const Parser &parser) {
rtp_info << "url=" << _strContentBase << "/" << track->_control_surffix << ";" rtp_info << "url=" << _strContentBase << "/" << track->_control_surffix << ";"
<< "seq=" << track->_seq << ";" << "seq=" << track->_seq << ";"
<< "rtptime=" << (int)(track->_time_stamp * (track->_samplerate / 1000)) << ","; << "rtptime=" << (int) (track->_time_stamp * (track->_samplerate / 1000)) << ",";
} }
rtp_info.pop_back(); rtp_info.pop_back();
sendRtspResponse("200 OK", sendRtspResponse("200 OK",
{"Range", StrPrinter << "npt=" << setiosflags(ios::fixed) << setprecision(2) << (useBuf? pMediaSrc->getTimeStamp(TrackInvalid) / 1000.0 : iStartTime / 1000), {"Range", StrPrinter << "npt=" << setiosflags(ios::fixed) << setprecision(2) << (useBuf? pMediaSrc->getTimeStamp(TrackInvalid) / 1000.0 : iStartTime / 1000),
"RTP-Info",rtp_info "RTP-Info",rtp_info
...@@ -785,51 +818,24 @@ void RtspSession::handleReq_Play(const Parser &parser) { ...@@ -785,51 +818,24 @@ void RtspSession::handleReq_Play(const Parser &parser) {
if (!_pRtpReader && _rtpType != Rtsp::RTP_MULTICAST) { if (!_pRtpReader && _rtpType != Rtsp::RTP_MULTICAST) {
weak_ptr<RtspSession> weakSelf = dynamic_pointer_cast<RtspSession>(shared_from_this()); weak_ptr<RtspSession> weakSelf = dynamic_pointer_cast<RtspSession>(shared_from_this());
_pRtpReader = pMediaSrc->getRing()->attach(getPoller(),useBuf); _pRtpReader = pMediaSrc->getRing()->attach(getPoller(), useBuf);
_pRtpReader->setDetachCB([weakSelf]() { _pRtpReader->setDetachCB([weakSelf]() {
auto strongSelf = weakSelf.lock(); auto strongSelf = weakSelf.lock();
if(!strongSelf) { if (!strongSelf) {
return; return;
} }
strongSelf->shutdown(SockException(Err_shutdown,"rtsp ring buffer detached")); strongSelf->shutdown(SockException(Err_shutdown, "rtsp ring buffer detached"));
}); });
_pRtpReader->setReadCB([weakSelf](const RtspMediaSource::RingDataType &pack) { _pRtpReader->setReadCB([weakSelf](const RtspMediaSource::RingDataType &pack) {
auto strongSelf = weakSelf.lock(); auto strongSelf = weakSelf.lock();
if(!strongSelf) { if (!strongSelf) {
return; return;
} }
if(strongSelf->_enableSendRtp) { if (strongSelf->_enableSendRtp) {
strongSelf->sendRtpPacket(pack); strongSelf->sendRtpPacket(pack);
} }
}); });
} }
};
weak_ptr<RtspSession> weakSelf = dynamic_pointer_cast<RtspSession>(shared_from_this());
Broadcast::AuthInvoker invoker = [weakSelf,onRes](const string &err){
auto strongSelf = weakSelf.lock();
if(!strongSelf){
return;
}
strongSelf->async([weakSelf,onRes,err](){
auto strongSelf = weakSelf.lock();
if(!strongSelf){
return;
}
onRes(err);
});
};
if(_bFirstPlay){
//第一次收到play命令,需要鉴权
auto flag = NoticeCenter::Instance().emitEvent(Broadcast::kBroadcastMediaPlayed,_mediaInfo,invoker,static_cast<SockInfo &>(*this));
if(!flag){
//该事件无人监听,默认不鉴权
onRes("");
}
}else{
//后面是seek或恢复命令,不需要鉴权
onRes("");
}
} }
void RtspSession::handleReq_Pause(const Parser &parser) { void RtspSession::handleReq_Pause(const Parser &parser) {
......
...@@ -160,6 +160,8 @@ private: ...@@ -160,6 +160,8 @@ private:
void onAuthBasic(const string &realm,const string &strBase64); void onAuthBasic(const string &realm,const string &strBase64);
//校验md5方式的认证加密 //校验md5方式的认证加密
void onAuthDigest(const string &realm,const string &strMd5); void onAuthDigest(const string &realm,const string &strMd5);
//触发url鉴权事件
void emitOnPlay();
//发送rtp给客户端 //发送rtp给客户端
void sendRtpPacket(const RtspMediaSource::RingDataType &pkt); void sendRtpPacket(const RtspMediaSource::RingDataType &pkt);
...@@ -179,8 +181,10 @@ private: ...@@ -179,8 +181,10 @@ private:
string _strContentBase; string _strContentBase;
//Session号 //Session号
string _strSession; string _strSession;
//是否第一次播放,第一次播放需要鉴权,第二次播放属于暂停恢复 //记录是否需要rtsp专属鉴权,防止重复触发事件
bool _bFirstPlay = true; string _rtsp_realm;
//是否已经触发on_play事件
bool _emit_on_play = false;
//url解析后保存的相关信息 //url解析后保存的相关信息
MediaInfo _mediaInfo; MediaInfo _mediaInfo;
//rtsp播放器绑定的直播源 //rtsp播放器绑定的直播源
......
...@@ -10,10 +10,29 @@ ...@@ -10,10 +10,29 @@
#include <cstdlib> #include <cstdlib>
#include "RtspSplitter.h" #include "RtspSplitter.h"
#include "Util/logger.h"
#include "Util/util.h"
namespace mediakit{ namespace mediakit{
const char *RtspSplitter::onSearchPacketTail(const char *data, int len) { const char *RtspSplitter::onSearchPacketTail(const char *data, int len) {
auto ret = onSearchPacketTail_l(data, len);
if(ret){
return ret;
}
if (len > 256 * 1024) {
//rtp大于256KB
ret = (char *) memchr(data, '$', len);
if (!ret) {
WarnL << "rtp缓存溢出:" << hexdump(data, 1024);
reset();
}
}
return ret;
}
const char *RtspSplitter::onSearchPacketTail_l(const char *data, int len) {
if(!_enableRecvRtp || data[0] != '$'){ if(!_enableRecvRtp || data[0] != '$'){
//这是rtsp包 //这是rtsp包
_isRtpPacket = false; _isRtpPacket = false;
......
...@@ -48,6 +48,7 @@ protected: ...@@ -48,6 +48,7 @@ protected:
virtual int64_t getContentLength(Parser &parser); virtual int64_t getContentLength(Parser &parser);
protected: protected:
const char *onSearchPacketTail(const char *data,int len) override ; const char *onSearchPacketTail(const char *data,int len) override ;
const char *onSearchPacketTail_l(const char *data,int len) ;
int64_t onRecvHeader(const char *data,uint64_t len) override; int64_t onRecvHeader(const char *data,uint64_t len) override;
void onRecvContent(const char *data,uint64_t len) override; void onRecvContent(const char *data,uint64_t len) override;
private: private:
......
...@@ -13,6 +13,7 @@ ...@@ -13,6 +13,7 @@
#include <string> #include <string>
#include <memory> #include <memory>
#include <stdexcept> #include <stdexcept>
#include "Extension/Frame.h"
#ifdef __cplusplus #ifdef __cplusplus
extern "C" { extern "C" {
#endif #endif
...@@ -27,14 +28,24 @@ using namespace std; ...@@ -27,14 +28,24 @@ using namespace std;
namespace mediakit { namespace mediakit {
class H264Decoder class FFMpegDecoder{
{
public: public:
H264Decoder(void){ FFMpegDecoder(int codec_id){
auto ff_codec_id = AV_CODEC_ID_H264;
switch (codec_id){
case CodecH264:
ff_codec_id = AV_CODEC_ID_H264;
break;
case CodecH265:
ff_codec_id = AV_CODEC_ID_H265;
break;
default:
throw std::invalid_argument("不支持该编码格式");
}
avcodec_register_all(); avcodec_register_all();
AVCodec *pCodec = avcodec_find_decoder(AV_CODEC_ID_H264); AVCodec *pCodec = avcodec_find_decoder(ff_codec_id);
if (!pCodec) { if (!pCodec) {
throw std::runtime_error("未找到H264解码器"); throw std::runtime_error("未找到解码器");
} }
m_pContext.reset(avcodec_alloc_context3(pCodec), [](AVCodecContext *pCtx) { m_pContext.reset(avcodec_alloc_context3(pCodec), [](AVCodecContext *pCtx) {
avcodec_close(pCtx); avcodec_close(pCtx);
...@@ -57,7 +68,7 @@ public: ...@@ -57,7 +68,7 @@ public:
throw std::runtime_error("创建帧缓存失败"); throw std::runtime_error("创建帧缓存失败");
} }
} }
virtual ~H264Decoder(void){} virtual ~FFMpegDecoder(void){}
bool inputVideo(unsigned char* data,unsigned int dataSize,uint32_t ui32Stamp,AVFrame **ppFrame){ bool inputVideo(unsigned char* data,unsigned int dataSize,uint32_t ui32Stamp,AVFrame **ppFrame){
AVPacket pkt; AVPacket pkt;
av_init_packet(&pkt); av_init_packet(&pkt);
......
...@@ -61,40 +61,6 @@ public: ...@@ -61,40 +61,6 @@ public:
} }
}; };
void get_file_path(const char *path, const char *file_name, char *file_path) {
strcpy(file_path, path);
if (file_path[strlen(file_path) - 1] != '/') {
strcat(file_path, "/");
}
strcat(file_path, file_name);
}
template <typename FUNC>
void for_each_file(const char *path, FUNC &&func){
DIR *dir;
dirent *dir_info;
char file_path[PATH_MAX];
if (File::is_file(path)) {
func(path);
return;
}
if (File::is_dir(path)) {
if ((dir = opendir(path)) == NULL) {
closedir(dir);
return;
}
while ((dir_info = readdir(dir)) != NULL) {
if (File::is_special_dir(dir_info->d_name)) {
continue;
}
get_file_path(path, dir_info->d_name, file_path);
for_each_file(file_path,std::forward<FUNC>(func));
}
closedir(dir);
return;
}
}
static const char s_bom[] = "\xEF\xBB\xBF"; static const char s_bom[] = "\xEF\xBB\xBF";
void add_or_rm_bom(const char *file,bool rm_bom){ void add_or_rm_bom(const char *file,bool rm_bom){
...@@ -159,23 +125,26 @@ int main(int argc, char *argv[]) { ...@@ -159,23 +125,26 @@ int main(int argc, char *argv[]) {
bool no_filter = filter_set.find("*") != filter_set.end(); bool no_filter = filter_set.find("*") != filter_set.end();
//设置日志 //设置日志
Logger::Instance().add(std::make_shared<ConsoleChannel>()); Logger::Instance().add(std::make_shared<ConsoleChannel>());
path = File::absolutePath(path, ""); File::scanDir(path, [&](const string &path, bool isDir) {
for_each_file(path.data(),[&](const char *path){ if (isDir) {
if(!no_filter){ return true;
}
if (!no_filter) {
//开启了过滤器 //开启了过滤器
auto pos = strstr(path,"."); auto pos = strstr(path.data(), ".");
if(pos == nullptr){ if (pos == nullptr) {
//没有后缀 //没有后缀
return; return true;
} }
auto ext = pos + 1; auto ext = pos + 1;
if(filter_set.find(ext) == filter_set.end()){ if (filter_set.find(ext) == filter_set.end()) {
//后缀不匹配 //后缀不匹配
return; return true;
} }
} }
//该文件匹配 //该文件匹配
process_file(path,rm_bom); process_file(path.data(), rm_bom);
}); return true;
}, true);
return 0; return 0;
} }
...@@ -128,8 +128,6 @@ int main(int argc, char *argv[]) { ...@@ -128,8 +128,6 @@ int main(int argc, char *argv[]) {
bool no_filter = filter_set.find("*") != filter_set.end(); bool no_filter = filter_set.find("*") != filter_set.end();
//设置日志 //设置日志
Logger::Instance().add(std::make_shared<ConsoleChannel>()); Logger::Instance().add(std::make_shared<ConsoleChannel>());
path = File::absolutePath(path, "");
DebugL << path;
File::scanDir(path, [&](const string &path, bool isDir) { File::scanDir(path, [&](const string &path, bool isDir) {
if (isDir) { if (isDir) {
return true; return true;
......
...@@ -44,6 +44,10 @@ int main(int argc, char *argv[]) { ...@@ -44,6 +44,10 @@ int main(int argc, char *argv[]) {
auto playerCnt = atoi(argv[1]);//启动的播放器个数 auto playerCnt = atoi(argv[1]);//启动的播放器个数
atomic_int alivePlayerCnt(0); atomic_int alivePlayerCnt(0);
//由于所有播放器都是再一个timer里面创建的,默认情况下所有播放器会绑定该timer所在的poller线程
//为了提高性能,poller分配策略关闭优先返回当前线程的策略
EventPollerPool::Instance().preferCurrentThread(false);
//每隔若干毫秒启动一个播放器(如果一次性全部启动,服务器和客户端可能都承受不了) //每隔若干毫秒启动一个播放器(如果一次性全部启动,服务器和客户端可能都承受不了)
Timer timer0(atoi(argv[2])/1000.0f,[&]() { Timer timer0(atoi(argv[2])/1000.0f,[&]() {
MediaPlayer::Ptr player(new MediaPlayer()); MediaPlayer::Ptr player(new MediaPlayer());
......
...@@ -12,13 +12,12 @@ ...@@ -12,13 +12,12 @@
#include "Util/util.h" #include "Util/util.h"
#include "Util/logger.h" #include "Util/logger.h"
#include <iostream> #include <iostream>
#include "Poller/EventPoller.h"
#include "Rtsp/UDPServer.h" #include "Rtsp/UDPServer.h"
#include "Player/MediaPlayer.h" #include "Player/MediaPlayer.h"
#include "Util/onceToken.h" #include "Util/onceToken.h"
#include "H264Decoder.h" #include "FFMpegDecoder.h"
#include "YuvDisplayer.h" #include "YuvDisplayer.h"
#include "Network/sockutil.h" #include "Extension/H265.h"
using namespace std; using namespace std;
using namespace toolkit; using namespace toolkit;
...@@ -111,36 +110,34 @@ int main(int argc, char *argv[]) { ...@@ -111,36 +110,34 @@ int main(int argc, char *argv[]) {
} }
auto viedoTrack = strongPlayer->getTrack(TrackVideo); auto viedoTrack = strongPlayer->getTrack(TrackVideo);
if (!viedoTrack || viedoTrack->getCodecId() != CodecH264) { if (!viedoTrack) {
WarnL << "没有视频或者视频不是264编码!"; WarnL << "没有视频!";
return; return;
} }
AnyStorage::Ptr storage(new AnyStorage); AnyStorage::Ptr storage(new AnyStorage);
viedoTrack->addDelegate(std::make_shared<FrameWriterInterfaceHelper>([storage](const Frame::Ptr &frame) { viedoTrack->addDelegate(std::make_shared<FrameWriterInterfaceHelper>([storage](const Frame::Ptr &frame_in) {
auto frame = Frame::getCacheAbleFrame(frame_in);
SDLDisplayerHelper::Instance().doTask([frame,storage]() { SDLDisplayerHelper::Instance().doTask([frame,storage]() {
auto &decoder = (*storage)["decoder"]; auto &decoder = (*storage)["decoder"];
auto &displayer = (*storage)["displayer"]; auto &displayer = (*storage)["displayer"];
auto &merger = (*storage)["merger"]; auto &merger = (*storage)["merger"];
if(!decoder){ if(!decoder){
decoder.set<H264Decoder>(); decoder.set<FFMpegDecoder>(frame->getCodecId());
} }
if(!displayer){ if(!displayer){
displayer.set<YuvDisplayer>(nullptr,url); displayer.set<YuvDisplayer>(nullptr,url);
} }
if(!merger){ if(!merger){
merger.set<FrameMerger>(); merger.set<FrameMerger>();
}; }
merger.get<FrameMerger>().inputFrame(frame,[&](uint32_t dts,uint32_t pts,const Buffer::Ptr &buffer){ merger.get<FrameMerger>().inputFrame(frame,[&](uint32_t dts,uint32_t pts,const Buffer::Ptr &buffer){
AVFrame *pFrame = nullptr; AVFrame *pFrame = nullptr;
bool flag = decoder.get<H264Decoder>().inputVideo((unsigned char *) buffer->data(), buffer->size(), dts, &pFrame); bool flag = decoder.get<FFMpegDecoder>().inputVideo((unsigned char *) buffer->data(), buffer->size(), dts, &pFrame);
if (flag) { if (flag) {
displayer.get<YuvDisplayer>().displayYUV(pFrame); displayer.get<YuvDisplayer>().displayYUV(pFrame);
} }
}); });
return true; return true;
}); });
})); }));
......
/* /*
* Copyright (c) 2016 The ZLMediaKit project authors. All Rights Reserved. * Copyright (c) 2016 The ZLMediaKit project authors. All Rights Reserved.
* *
* This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit). * This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
...@@ -58,7 +58,7 @@ static bool loadFile(const char *path){ ...@@ -58,7 +58,7 @@ static bool loadFile(const char *path){
RtpSelector::Instance().inputRtp(nullptr,rtp,len, &addr,&timeStamp); RtpSelector::Instance().inputRtp(nullptr,rtp,len, &addr,&timeStamp);
if(timeStamp_last){ if(timeStamp_last){
auto diff = timeStamp - timeStamp_last; auto diff = timeStamp - timeStamp_last;
if(diff > 0){ if(diff > 0 && diff < 500){
usleep(diff * 1000); usleep(diff * 1000);
} }
} }
......
...@@ -59,10 +59,11 @@ int main(int argc, char *argv[]) { ...@@ -59,10 +59,11 @@ int main(int argc, char *argv[]) {
Logger::Instance().add(std::make_shared<ConsoleChannel>()); Logger::Instance().add(std::make_shared<ConsoleChannel>());
Logger::Instance().setWriter(std::make_shared<AsyncLogWriter>()); Logger::Instance().setWriter(std::make_shared<AsyncLogWriter>());
{
WebSocketClient<EchoTcpClient>::Ptr client = std::make_shared<WebSocketClient<EchoTcpClient> >(); WebSocketClient<EchoTcpClient>::Ptr client = std::make_shared<WebSocketClient<EchoTcpClient> >();
client->startConnect("121.40.165.18",8800); client->startConnect("127.0.0.1", 80);
sem.wait(); sem.wait();
}
return 0; return 0;
} }
...@@ -96,26 +96,28 @@ int main(int argc, char *argv[]) { ...@@ -96,26 +96,28 @@ int main(int argc, char *argv[]) {
SSL_Initor::Instance().loadCertificate((exeDir() + "ssl.p12").data()); SSL_Initor::Instance().loadCertificate((exeDir() + "ssl.p12").data());
{
TcpServer::Ptr httpSrv(new TcpServer()); TcpServer::Ptr httpSrv(new TcpServer());
//http服务器,支持websocket //http服务器,支持websocket
httpSrv->start<WebSocketSessionBase<EchoSessionCreator,HttpSession> >(80);//默认80 httpSrv->start<WebSocketSessionBase<EchoSessionCreator, HttpSession> >(80);//默认80
TcpServer::Ptr httpsSrv(new TcpServer()); TcpServer::Ptr httpsSrv(new TcpServer());
//https服务器,支持websocket //https服务器,支持websocket
httpsSrv->start<WebSocketSessionBase<EchoSessionCreator,HttpsSession> >(443);//默认443 httpsSrv->start<WebSocketSessionBase<EchoSessionCreator, HttpsSession> >(443);//默认443
TcpServer::Ptr httpSrvOld(new TcpServer()); TcpServer::Ptr httpSrvOld(new TcpServer());
//兼容之前的代码(但是不支持根据url选择生成TcpSession类型) //兼容之前的代码(但是不支持根据url选择生成TcpSession类型)
httpSrvOld->start<WebSocketSession<EchoSession,HttpSession> >(8080); httpSrvOld->start<WebSocketSession<EchoSession, HttpSession> >(8080);
DebugL << "请打开网页:http://www.websocket-test.com/,进行测试"; DebugL << "请打开网页:http://www.websocket-test.com/,进行测试";
DebugL << "连接 ws://127.0.0.1/xxxx,ws://127.0.0.1/ 测试的效果将不同,支持根据url选择不同的处理逻辑"; DebugL << "连接 ws://127.0.0.1/xxxx,ws://127.0.0.1/ 测试的效果将不同,支持根据url选择不同的处理逻辑";
//设置退出信号处理函数 //设置退出信号处理函数
static semaphore sem; static semaphore sem;
signal(SIGINT, [](int) { sem.post(); });// 设置退出信号 signal(SIGINT, [](int) { sem.post(); });// 设置退出信号
sem.wait(); sem.wait();
}
return 0; return 0;
} }
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论