Skip to content
项目
群组
代码片段
帮助
当前项目
正在载入...
登录 / 注册
切换导航面板
Z
ZLMediaKit
概览
Overview
Details
Activity
Cycle Analytics
版本库
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
问题
0
Issues
0
列表
Board
标记
里程碑
合并请求
0
Merge Requests
0
CI / CD
CI / CD
流水线
作业
日程表
图表
维基
Wiki
代码片段
Snippets
成员
Collapse sidebar
Close sidebar
活动
图像
聊天
创建新问题
作业
提交
Issue Boards
Open sidebar
张翔宇
ZLMediaKit
Commits
9736badc
Commit
9736badc
authored
May 25, 2022
by
ziyue
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
优化编解码相关代码
parent
0739b1dd
全部展开
隐藏空白字符变更
内嵌
并排
正在显示
9 个修改的文件
包含
106 行增加
和
81 行删除
+106
-81
player/test_player.cpp
+2
-2
src/Codec/H264Encoder.cpp
+29
-34
src/Codec/H264Encoder.h
+9
-10
src/Codec/Transcode.cpp
+0
-0
src/Codec/Transcode.h
+42
-20
src/Common/Device.cpp
+6
-6
src/Common/Device.h
+9
-8
src/Common/config.cpp
+5
-1
src/Common/config.h
+4
-0
没有找到文件。
player/test_player.cpp
查看文件 @
9736badc
...
...
@@ -84,7 +84,7 @@ int main(int argc, char *argv[]) {
});
});
auto
delegate
=
std
::
make_shared
<
FrameWriterInterfaceHelper
>
([
decoder
](
const
Frame
::
Ptr
&
frame
)
{
return
decoder
->
inputFrame
(
frame
,
false
);
return
decoder
->
inputFrame
(
frame
,
false
,
true
);
});
videoTrack
->
addDelegate
(
delegate
);
}
...
...
@@ -106,7 +106,7 @@ int main(int argc, char *argv[]) {
audio_player
->
playPCM
((
const
char
*
)
(
pcm
->
get
()
->
data
[
0
]),
MIN
(
len
,
frame
->
get
()
->
linesize
[
0
]));
});
auto
audio_delegate
=
std
::
make_shared
<
FrameWriterInterfaceHelper
>
(
[
decoder
](
const
Frame
::
Ptr
&
frame
)
{
return
decoder
->
inputFrame
(
frame
,
false
);
return
decoder
->
inputFrame
(
frame
,
false
,
true
);
});
audioTrack
->
addDelegate
(
audio_delegate
);
}
...
...
src/Codec/H264Encoder.cpp
查看文件 @
9736badc
...
...
@@ -9,17 +9,14 @@
*/
#ifdef ENABLE_X264
#include "H264Encoder.h"
#include "H264Encoder.h"
#include "Util/TimeTicker.h"
using
namespace
toolkit
;
namespace
mediakit
{
H264Encoder
::
H264Encoder
()
{
}
H264Encoder
::
H264Encoder
()
{}
H264Encoder
::~
H264Encoder
()
{
//* 清除图像区域
...
...
@@ -39,7 +36,6 @@ H264Encoder::~H264Encoder() {
}
}
/*typedef struct x264_param_t
{
CPU 标志位
...
...
@@ -212,7 +208,7 @@ Value的值就是fps。
void (*param_free)( void* );
} x264_param_t;*/
bool
H264Encoder
::
init
(
int
iWidth
,
int
iHeight
,
int
iFps
)
{
bool
H264Encoder
::
init
(
int
iWidth
,
int
iHeight
,
int
iFps
,
int
iBitRate
)
{
if
(
_pX264Handle
)
{
return
true
;
}
...
...
@@ -222,7 +218,7 @@ bool H264Encoder::init(int iWidth, int iHeight, int iFps) {
x264_param_default_preset
(
pX264Param
,
"ultrafast"
,
"zerolatency"
);
//* cpuFlags
pX264Param
->
i_threads
=
X264_SYNC_LOOKAHEAD_AUTO
;
//* 取空缓冲区继续使用不死锁的保证.
pX264Param
->
i_threads
=
X264_SYNC_LOOKAHEAD_AUTO
;
//* 取空缓冲区继续使用不死锁的保证.
//* video Properties
pX264Param
->
i_width
=
iWidth
;
//* 宽度.
pX264Param
->
i_height
=
iHeight
;
//* 高度
...
...
@@ -230,21 +226,21 @@ bool H264Encoder::init(int iWidth, int iHeight, int iFps) {
pX264Param
->
i_keyint_max
=
iFps
*
3
;
//ffmpeg:gop_size 关键帧最大间隔
pX264Param
->
i_keyint_min
=
iFps
*
1
;
//ffmpeg:keyint_min 关键帧最小间隔
//* Rate control Parameters
pX264Param
->
rc
.
i_bitrate
=
5000
;
//* 码率(比特率,单位Kbps)
pX264Param
->
rc
.
i_qp_step
=
1
;
//最大的在帧与帧之间进行切变的量化因子的变化量。ffmpeg:max_qdiff
pX264Param
->
rc
.
i_qp_min
=
10
;
//ffmpeg:qmin;最小的量化因子。取值范围1-51。建议在10-30之间。
pX264Param
->
rc
.
i_qp_max
=
41
;
//ffmpeg:qmax;最大的量化因子。取值范围1-51。建议在10-30之间。
pX264Param
->
rc
.
i_bitrate
=
iBitRate
/
1000
;
//* 码率(比特率,单位Kbps)
pX264Param
->
rc
.
i_qp_step
=
1
;
//最大的在帧与帧之间进行切变的量化因子的变化量。ffmpeg:max_qdiff
pX264Param
->
rc
.
i_qp_min
=
10
;
//ffmpeg:qmin;最小的量化因子。取值范围1-51。建议在10-30之间。
pX264Param
->
rc
.
i_qp_max
=
41
;
//ffmpeg:qmax;最大的量化因子。取值范围1-51。建议在10-30之间。
pX264Param
->
rc
.
f_qcompress
=
0.6
;
//ffmpeg:qcompress 量化器压缩比率0-1.越小则比特率越区域固定,但是越高越使量化器参数越固定
pX264Param
->
analyse
.
i_me_range
=
16
;
//ffmpeg:me_range 运动侦测的半径
pX264Param
->
i_frame_reference
=
3
;
//ffmpeg:refsB和P帧向前预测参考的帧数。取值范围1-16。
//该值不影响解码的速度,但是越大解码
//所需的内存越大。这个值在一般情况下
//越大效果越好,但是超过6以后效果就
//不明显了。
pX264Param
->
analyse
.
i_trellis
=
1
;
//ffmpeg:trellis
pX264Param
->
analyse
.
i_me_range
=
16
;
//ffmpeg:me_range 运动侦测的半径
pX264Param
->
i_frame_reference
=
3
;
//ffmpeg:refsB和P帧向前预测参考的帧数。取值范围1-16。
//该值不影响解码的速度,但是越大解码
//所需的内存越大。这个值在一般情况下
//越大效果越好,但是超过6以后效果就
//不明显了。
pX264Param
->
analyse
.
i_trellis
=
1
;
//ffmpeg:trellis
//pX264Param->analyse.i_me_method=X264_ME_DIA;//ffmpeg:me_method ME_ZERO 运动侦测的方式
pX264Param
->
rc
.
f_qblur
=
0.5
;
//ffmpeg:qblur
pX264Param
->
rc
.
f_qblur
=
0.5
;
//ffmpeg:qblur
//* bitstream parameters
/*open-GOP
...
...
@@ -268,7 +264,7 @@ bool H264Encoder::init(int iWidth, int iHeight, int iFps) {
由于B帧压缩性能好于P帧,因此open-GOP在编码性能上稍微优于close-GOP,
但为了兼容性和少一些麻烦,还是把opne-GOP关闭的好。*/
pX264Param
->
b_open_gop
=
0
;
pX264Param
->
i_bframe
=
0
;
//最大B帧数.
pX264Param
->
i_bframe
=
0
;
//最大B帧数.
pX264Param
->
i_bframe_pyramid
=
0
;
pX264Param
->
i_bframe_adaptive
=
X264_B_ADAPT_TRELLIS
;
//* Log
...
...
@@ -304,20 +300,19 @@ bool H264Encoder::init(int iWidth, int iHeight, int iFps) {
return
true
;
}
int
H264Encoder
::
inputData
(
char
*
apcYuv
[
3
],
int
aiYuvLen
[
3
],
int64_t
i64Pts
,
H264Frame
**
ppF
rame
)
{
int
H264Encoder
::
inputData
(
char
*
yuv
[
3
],
int
linesize
[
3
],
int64_t
cts
,
H264Frame
**
out_f
rame
)
{
//TimeTicker1(5);
_pPicIn
->
img
.
i_stride
[
0
]
=
aiYuvLen
[
0
];
_pPicIn
->
img
.
i_stride
[
1
]
=
aiYuvLen
[
1
];
_pPicIn
->
img
.
i_stride
[
2
]
=
aiYuvLen
[
2
];
_pPicIn
->
img
.
plane
[
0
]
=
(
uint8_t
*
)
apcY
uv
[
0
];
_pPicIn
->
img
.
plane
[
1
]
=
(
uint8_t
*
)
apcY
uv
[
1
];
_pPicIn
->
img
.
plane
[
2
]
=
(
uint8_t
*
)
apcY
uv
[
2
];
_pPicIn
->
i_pts
=
i64P
ts
;
_pPicIn
->
img
.
i_stride
[
0
]
=
linesize
[
0
];
_pPicIn
->
img
.
i_stride
[
1
]
=
linesize
[
1
];
_pPicIn
->
img
.
i_stride
[
2
]
=
linesize
[
2
];
_pPicIn
->
img
.
plane
[
0
]
=
(
uint8_t
*
)
y
uv
[
0
];
_pPicIn
->
img
.
plane
[
1
]
=
(
uint8_t
*
)
y
uv
[
1
];
_pPicIn
->
img
.
plane
[
2
]
=
(
uint8_t
*
)
y
uv
[
2
];
_pPicIn
->
i_pts
=
c
ts
;
int
iNal
;
x264_nal_t
*
pNals
;
x264_nal_t
*
pNals
;
int
iResult
=
x264_encoder_encode
(
_pX264Handle
,
&
pNals
,
&
iNal
,
_pPicIn
,
_pPicOut
);
int
iResult
=
x264_encoder_encode
(
_pX264Handle
,
&
pNals
,
&
iNal
,
_pPicIn
,
_pPicOut
);
if
(
iResult
<=
0
)
{
return
0
;
}
...
...
@@ -327,7 +322,7 @@ int H264Encoder::inputData(char* apcYuv[3], int aiYuvLen[3], int64_t i64Pts, H26
_aFrames
[
i
].
iLength
=
pNal
.
i_payload
;
_aFrames
[
i
].
pucData
=
pNal
.
p_payload
;
}
*
ppF
rame
=
_aFrames
;
*
out_f
rame
=
_aFrames
;
return
iNal
;
}
...
...
src/Codec/H264Encoder.h
查看文件 @
9736badc
...
...
@@ -7,13 +7,10 @@
* LICENSE file in the root of the source tree. All contributing project authors
* may be found in the AUTHORS file in the root of the source tree.
*/
#ifndef CODEC_H264ENCODER_H_
#define CODEC_H264ENCODER_H_
#include <cstdint>
#ifdef __cplusplus
extern
"C"
{
#endif //__cplusplus
...
...
@@ -32,14 +29,16 @@ public:
uint8_t
*
pucData
;
}
H264Frame
;
H264Encoder
(
void
);
virtual
~
H264Encoder
(
void
);
bool
init
(
int
iWidth
,
int
iHeight
,
int
iFps
);
int
inputData
(
char
*
apcYuv
[
3
],
int
aiYuvLen
[
3
],
int64_t
i64Pts
,
H264Frame
**
ppFrame
);
H264Encoder
();
~
H264Encoder
();
bool
init
(
int
iWidth
,
int
iHeight
,
int
iFps
,
int
iBitRate
);
int
inputData
(
char
*
yuv
[
3
],
int
linesize
[
3
],
int64_t
cts
,
H264Frame
**
out_frame
);
private
:
x264_t
*
_pX264Handle
=
nullptr
;
x264_picture_t
*
_pPicIn
=
nullptr
;
x264_picture_t
*
_pPicOut
=
nullptr
;
x264_t
*
_pX264Handle
=
nullptr
;
x264_picture_t
*
_pPicIn
=
nullptr
;
x264_picture_t
*
_pPicOut
=
nullptr
;
H264Frame
_aFrames
[
10
];
};
...
...
src/Codec/Transcode.cpp
查看文件 @
9736badc
差异被折叠。
点击展开。
src/Codec/Transcode.h
查看文件 @
9736badc
...
...
@@ -8,23 +8,29 @@
* may be found in the AUTHORS file in the root of the source tree.
*/
#ifndef FFMpegDecoder_H_
#define FFMpegDecoder_H_
#ifndef ZLMEDIAKIT_TRANSCODE_H
#define ZLMEDIAKIT_TRANSCODE_H
#if defined(ENABLE_FFMPEG)
#include "Util/TimeTicker.h"
#include "Common/MediaSink.h"
#if defined(ENABLE_FFMPEG)
#ifdef __cplusplus
extern
"C"
{
#endif
#include "libswscale/swscale.h"
#include "libavutil/avutil.h"
#include "libavutil/pixdesc.h"
#include "libavcodec/avcodec.h"
#include "libswresample/swresample.h"
#include "libavutil/audio_fifo.h"
#ifdef __cplusplus
}
#endif
namespace
mediakit
{
class
FFmpegFrame
{
public
:
using
Ptr
=
std
::
shared_ptr
<
FFmpegFrame
>
;
...
...
@@ -33,6 +39,7 @@ public:
~
FFmpegFrame
();
AVFrame
*
get
()
const
;
void
fillPicture
(
AVPixelFormat
target_format
,
int
target_width
,
int
target_height
);
private
:
char
*
_data
=
nullptr
;
...
...
@@ -45,7 +52,6 @@ public:
FFmpegSwr
(
AVSampleFormat
output
,
int
channel
,
int
channel_layout
,
int
samplerate
);
~
FFmpegSwr
();
FFmpegFrame
::
Ptr
inputFrame
(
const
FFmpegFrame
::
Ptr
&
frame
);
private
:
...
...
@@ -59,19 +65,19 @@ private:
class
TaskManager
{
public
:
TaskManager
()
=
default
;
~
TaskManager
();
virtual
~
TaskManager
();
void
setMaxTaskSize
(
size_t
size
);
void
stopThread
(
bool
drop_task
);
protected
:
void
startThread
(
const
std
::
string
&
name
);
void
stopThread
();
void
addEncodeTask
(
std
::
function
<
void
()
>
task
);
void
addDecodeTask
(
bool
key_frame
,
std
::
function
<
void
()
>
task
);
bool
addEncodeTask
(
std
::
function
<
void
()
>
task
);
bool
addDecodeTask
(
bool
key_frame
,
std
::
function
<
void
()
>
task
);
bool
isEnabled
()
const
;
private
:
void
onThreadRun
(
const
std
::
string
&
name
);
void
pushExit
();
private
:
class
ThreadExitException
:
public
std
::
runtime_error
{
...
...
@@ -83,39 +89,55 @@ private:
private
:
bool
_decode_drop_start
=
false
;
bool
_exit
=
false
;
size_t
_max_task
=
30
;
std
::
mutex
_task_mtx
;
toolkit
::
semaphore
_sem
;
toolkit
::
List
<
std
::
function
<
void
()
>
>
_task
;
std
::
shared_ptr
<
std
::
thread
>
_thread
;
};
class
FFmpegDecoder
:
p
rivate
TaskManager
{
class
FFmpegDecoder
:
p
ublic
TaskManager
{
public
:
using
Ptr
=
std
::
shared_ptr
<
FFmpegDecoder
>
;
using
onDec
=
std
::
function
<
void
(
const
FFmpegFrame
::
Ptr
&
)
>
;
FFmpegDecoder
(
const
mediakit
::
Track
::
Ptr
&
track
);
~
FFmpegDecoder
();
FFmpegDecoder
(
const
Track
::
Ptr
&
track
,
int
thread_num
=
2
);
~
FFmpegDecoder
()
override
;
bool
inputFrame
(
const
mediakit
::
Frame
::
Ptr
&
frame
,
bool
may_async
=
true
);
bool
inputFrame
(
const
Frame
::
Ptr
&
frame
,
bool
live
,
bool
async
,
bool
enable_merge
=
true
);
void
setOnDecode
(
onDec
cb
);
void
flush
();
const
AVCodecContext
*
getContext
()
const
;
private
:
void
onDecode
(
const
FFmpegFrame
::
Ptr
&
frame
);
bool
inputFrame_l
(
const
mediakit
::
Frame
::
Ptr
&
fram
e
);
bool
decodeFrame
(
const
char
*
data
,
size_t
size
,
uint32_t
dts
,
uint32_t
pts
);
bool
inputFrame_l
(
const
Frame
::
Ptr
&
frame
,
bool
live
,
bool
enable_merg
e
);
bool
decodeFrame
(
const
char
*
data
,
size_t
size
,
uint32_t
dts
,
uint32_t
pts
,
bool
live
);
private
:
bool
_do_merger
=
false
;
toolkit
::
Ticker
_ticker
;
onDec
_cb
;
std
::
shared_ptr
<
AVCodecContext
>
_context
;
mediakit
::
FrameMerger
_merger
{
mediakit
::
FrameMerger
::
h264_prefix
};
FrameMerger
_merger
{
FrameMerger
::
h264_prefix
};
};
#endif// ENABLE_FFMPEG
#endif
/* FFMpegDecoder_H_ */
class
FFmpegSws
{
public
:
using
Ptr
=
std
::
shared_ptr
<
FFmpegSws
>
;
FFmpegSws
(
AVPixelFormat
output
,
int
width
,
int
height
);
~
FFmpegSws
();
FFmpegFrame
::
Ptr
inputFrame
(
const
FFmpegFrame
::
Ptr
&
frame
);
int
inputFrame
(
const
FFmpegFrame
::
Ptr
&
frame
,
uint8_t
*
data
);
private
:
int
_target_width
;
int
_target_height
;
SwsContext
*
_ctx
=
nullptr
;
AVPixelFormat
_target_format
;
};
}
//namespace mediakit
#endif// ENABLE_FFMPEG
#endif //ZLMEDIAKIT_TRANSCODE_H
src/Common/Device.cpp
查看文件 @
9736badc
...
...
@@ -28,22 +28,22 @@ using namespace std;
namespace
mediakit
{
bool
DevChannel
::
inputYUV
(
char
*
apcYuv
[
3
],
int
aiYuvLen
[
3
],
uint32_t
uiStamp
)
{
bool
DevChannel
::
inputYUV
(
char
*
yuv
[
3
],
int
linesize
[
3
],
uint32_t
cts
)
{
#ifdef ENABLE_X264
//TimeTicker1(50);
if
(
!
_pH264Enc
)
{
_pH264Enc
.
reset
(
new
H264Encoder
());
if
(
!
_pH264Enc
->
init
(
_video
->
iWidth
,
_video
->
iHeight
,
_video
->
iFrameRate
))
{
if
(
!
_pH264Enc
->
init
(
_video
->
iWidth
,
_video
->
iHeight
,
_video
->
iFrameRate
,
_video
->
iBitRate
))
{
_pH264Enc
.
reset
();
WarnL
<<
"H264Encoder init failed!"
;
}
}
if
(
_pH264Enc
)
{
H264Encoder
::
H264Frame
*
pOut
;
int
iFrames
=
_pH264Enc
->
inputData
(
apcYuv
,
aiYuvLen
,
uiStamp
,
&
pOut
);
H264Encoder
::
H264Frame
*
out_frames
;
int
frames
=
_pH264Enc
->
inputData
(
yuv
,
linesize
,
cts
,
&
out_frames
);
bool
ret
=
false
;
for
(
int
i
=
0
;
i
<
iF
rames
;
i
++
)
{
ret
=
inputH264
((
char
*
)
pOut
[
i
].
pucData
,
pOut
[
i
].
iLength
,
uiStamp
)
?
true
:
ret
;
for
(
int
i
=
0
;
i
<
f
rames
;
i
++
)
{
ret
=
inputH264
((
char
*
)
out_frames
[
i
].
pucData
,
out_frames
[
i
].
iLength
,
cts
)
?
true
:
ret
;
}
return
ret
;
}
...
...
src/Common/Device.h
查看文件 @
9736badc
...
...
@@ -29,6 +29,7 @@ public:
int
iWidth
;
int
iHeight
;
float
iFrameRate
;
int
iBitRate
=
2
*
1024
*
1024
;
};
class
AudioInfo
{
...
...
@@ -104,19 +105,19 @@ public:
/**
* 输入yuv420p视频帧,内部会完成编码并调用inputH264方法
* @param
apcYuv
* @param
aiYuvLen
* @param
uiStamp
* @param
yuv yuv420p数据指针
* @param
linesize yuv420p数据linesize
* @param
cts 采集时间戳,单位毫秒
*/
bool
inputYUV
(
char
*
apcYuv
[
3
],
int
aiYuvLen
[
3
],
uint32_t
uiStamp
);
bool
inputYUV
(
char
*
yuv
[
3
],
int
linesize
[
3
],
uint32_t
cts
);
/**
* 输入pcm数据,内部会完成编码并调用inputAAC方法
* @param
pcData
* @param
iDataLen
* @param
uiStamp
* @param
data pcm数据指针,int16整形
* @param
len pcm数据长度
* @param
cts 采集时间戳,单位毫秒
*/
bool
inputPCM
(
char
*
pcData
,
int
iDataLen
,
uint32_t
uiStamp
);
bool
inputPCM
(
char
*
data
,
int
len
,
uint32_t
cts
);
private
:
MediaOriginType
getOriginType
(
MediaSource
&
sender
)
const
override
;
...
...
src/Common/config.cpp
查看文件 @
9736badc
...
...
@@ -42,7 +42,7 @@ bool loadIniConfig(const char *ini_path){
namespace
Broadcast
{
const
string
kBroadcastMediaChanged
=
"kBroadcastMediaChanged"
;
const
string
kBroadcastRecordMP4
=
"kBroadcastRecordMP4"
;
const
string
kBroadcastRecordTs
=
"kBroadcastRecor
e
dTs"
;
const
string
kBroadcastRecordTs
=
"kBroadcastRecordTs"
;
const
string
kBroadcastHttpRequest
=
"kBroadcastHttpRequest"
;
const
string
kBroadcastHttpAccess
=
"kBroadcastHttpAccess"
;
const
string
kBroadcastOnGetRtspRealm
=
"kBroadcastOnGetRtspRealm"
;
...
...
@@ -77,6 +77,8 @@ const string kRtmpDemand = GENERAL_FIELD"rtmp_demand";
const
string
kTSDemand
=
GENERAL_FIELD
"ts_demand"
;
const
string
kFMP4Demand
=
GENERAL_FIELD
"fmp4_demand"
;
const
string
kEnableAudio
=
GENERAL_FIELD
"enable_audio"
;
const
string
kCheckNvidiaDev
=
GENERAL_FIELD
"check_nvidia_dev"
;
const
string
kEnableFFmpegLog
=
GENERAL_FIELD
"enable_ffmpeg_log"
;
const
string
kWaitTrackReadyMS
=
GENERAL_FIELD
"wait_track_ready_ms"
;
const
string
kWaitAddTrackMS
=
GENERAL_FIELD
"wait_add_track_ms"
;
const
string
kUnreadyFrameCache
=
GENERAL_FIELD
"unready_frame_cache"
;
...
...
@@ -100,6 +102,8 @@ static onceToken token([](){
mINI
::
Instance
()[
kTSDemand
]
=
0
;
mINI
::
Instance
()[
kFMP4Demand
]
=
0
;
mINI
::
Instance
()[
kEnableAudio
]
=
1
;
mINI
::
Instance
()[
kCheckNvidiaDev
]
=
1
;
mINI
::
Instance
()[
kEnableFFmpegLog
]
=
0
;
mINI
::
Instance
()[
kWaitTrackReadyMS
]
=
10000
;
mINI
::
Instance
()[
kWaitAddTrackMS
]
=
3000
;
mINI
::
Instance
()[
kUnreadyFrameCache
]
=
100
;
...
...
src/Common/config.h
查看文件 @
9736badc
...
...
@@ -183,6 +183,10 @@ extern const std::string kTSDemand;
extern
const
std
::
string
kFMP4Demand
;
//转协议是否全局开启或忽略音频
extern
const
std
::
string
kEnableAudio
;
//在docker环境下,不能通过英伟达驱动是否存在来判断是否支持硬件转码
extern
const
std
::
string
kCheckNvidiaDev
;
//是否开启ffmpeg日志
extern
const
std
::
string
kEnableFFmpegLog
;
//最多等待未初始化的Track 10秒,超时之后会忽略未初始化的Track
extern
const
std
::
string
kWaitTrackReadyMS
;
//如果直播流只有单Track,最多等待3秒,超时后未收到其他Track的数据,则认为是单Track
...
...
编写
预览
Markdown
格式
0%
重试
或
添加新文件
添加附件
取消
您添加了
0
人
到此讨论。请谨慎行事。
请先完成此评论的编辑!
取消
请
注册
或者
登录
后发表评论