Commit 360eba2c by zqsong

MP4录制添加H265支持

parent fbf0469e
......@@ -36,6 +36,7 @@ set(ENABLE_MYSQL true)
set(ENABLE_MP4V2 true)
set(ENABLE_FAAC true)
set(ENABLE_X264 true)
set(MP4_H265RECORD true)
#添加两个静态库
if(ENABLE_HLS)
......@@ -47,6 +48,12 @@ else()
set(LINK_LIB_LIST zlmediakit zltoolkit)
endif()
if(MP4_H265RECORD)
message(STATUS "MP4_H265RECORD defined")
add_definitions(-DMP4_H265RECORD)
set(MediaServer_Root ${CMAKE_SOURCE_DIR}/3rdpart/media-server)
list(APPEND LINK_LIB_LIST mov flv)
endif()
#查找openssl是否安装
find_package(OpenSSL QUIET)
if (OPENSSL_FOUND AND ENABLE_OPENSSL)
......@@ -111,6 +118,21 @@ if(ENABLE_HLS)
endif(WIN32)
endif()
if(MP4_H265RECORD)
aux_source_directory(${MediaServer_Root}/libmov/include src_mov)
aux_source_directory(${MediaServer_Root}/libmov/source src_mov)
include_directories(${MediaServer_Root}/libmov/include)
aux_source_directory(${MediaServer_Root}/libflv/include src_flv)
aux_source_directory(${MediaServer_Root}/libflv/source src_flv)
include_directories(${MediaServer_Root}/libflv/include)
add_library(mov STATIC ${src_mov})
add_library(flv STATIC ${src_flv})
if(WIN32)
set_target_properties(mov flv PROPERTIES COMPILE_FLAGS ${VS_FALGS} )
endif(WIN32)
endif()
if (WIN32)
list(APPEND LINK_LIB_LIST WS2_32 Iphlpapi shlwapi)
set_target_properties(zltoolkit PROPERTIES COMPILE_FLAGS ${VS_FALGS} )
......
......@@ -76,8 +76,14 @@ Track::Ptr Factory::getTrackBySdp(const SdpTrack::Ptr &track) {
if (strcasecmp(track->_codec.data(), "h265") == 0) {
//a=fmtp:96 sprop-sps=QgEBAWAAAAMAsAAAAwAAAwBdoAKAgC0WNrkky/AIAAADAAgAAAMBlQg=; sprop-pps=RAHA8vA8kAA=
int pt;
int pt, id;
char sprop_vps[128] = {0},sprop_sps[128] = {0},sprop_pps[128] = {0};
if (5 == sscanf(track->_fmtp.data(), "%d profile-id=%d; sprop-sps=%127[^;]; sprop-pps=%127[^;]; sprop-vps=%127[^;]", &pt, &id, sprop_sps,sprop_pps, sprop_vps)) {
auto vps = decodeBase64(sprop_vps);
auto sps = decodeBase64(sprop_sps);
auto pps = decodeBase64(sprop_pps);
return std::make_shared<H265Track>(vps,sps,pps,0,0,0);
}
if (4 == sscanf(track->_fmtp.data(), "%d sprop-vps=%127[^;]; sprop-sps=%127[^;]; sprop-pps=%127[^;]", &pt, sprop_vps,sprop_sps, sprop_pps)) {
auto vps = decodeBase64(sprop_vps);
auto sps = decodeBase64(sprop_sps);
......
......@@ -23,6 +23,13 @@
#define INT_MAX 65535
#endif //INT_MAX
#ifndef FFMIN
#define FFMIN(a,b) ((a) > (b) ? (b) : (a))
#endif
#ifndef FFMAX
#define FFMAX(a,b) ((a) > (b) ? (a) : (b))
#endif
/* report level */
#define RPT_ERR (1) // error, system error
#define RPT_WRN (2) // warning, maybe wrong, maybe OK
......@@ -122,6 +129,108 @@ const uint8_t g_au8FfZigzagDirect[64] = {
};
static const uint8_t hevc_sub_width_c[] = {
1, 2, 2, 1
};
static const uint8_t hevc_sub_height_c[] = {
1, 2, 1, 1
};
static const uint8_t default_scaling_list_intra[] = {
16, 16, 16, 16, 17, 18, 21, 24,
16, 16, 16, 16, 17, 19, 22, 25,
16, 16, 17, 18, 20, 22, 25, 29,
16, 16, 18, 21, 24, 27, 31, 36,
17, 17, 20, 24, 30, 35, 41, 47,
18, 19, 22, 27, 35, 44, 54, 65,
21, 22, 25, 31, 41, 54, 70, 88,
24, 25, 29, 36, 47, 65, 88, 115
};
static const uint8_t default_scaling_list_inter[] = {
16, 16, 16, 16, 17, 18, 20, 24,
16, 16, 16, 17, 18, 20, 24, 25,
16, 16, 17, 18, 20, 24, 25, 28,
16, 17, 18, 20, 24, 25, 28, 33,
17, 18, 20, 24, 25, 28, 33, 41,
18, 20, 24, 25, 28, 33, 41, 54,
20, 24, 25, 28, 33, 41, 54, 71,
24, 25, 28, 33, 41, 54, 71, 91
};
const uint8_t ff_hevc_diag_scan4x4_x[16] = {
0, 0, 1, 0,
1, 2, 0, 1,
2, 3, 1, 2,
3, 2, 3, 3,
};
const uint8_t ff_hevc_diag_scan4x4_y[16] = {
0, 1, 0, 2,
1, 0, 3, 2,
1, 0, 3, 2,
1, 3, 2, 3,
};
const uint8_t ff_hevc_diag_scan8x8_x[64] = {
0, 0, 1, 0,
1, 2, 0, 1,
2, 3, 0, 1,
2, 3, 4, 0,
1, 2, 3, 4,
5, 0, 1, 2,
3, 4, 5, 6,
0, 1, 2, 3,
4, 5, 6, 7,
1, 2, 3, 4,
5, 6, 7, 2,
3, 4, 5, 6,
7, 3, 4, 5,
6, 7, 4, 5,
6, 7, 5, 6,
7, 6, 7, 7,
};
const uint8_t ff_hevc_diag_scan8x8_y[64] = {
0, 1, 0, 2,
1, 0, 3, 2,
1, 0, 4, 3,
2, 1, 0, 5,
4, 3, 2, 1,
0, 6, 5, 4,
3, 2, 1, 0,
7, 6, 5, 4,
3, 2, 1, 0,
7, 6, 5, 4,
3, 2, 1, 7,
6, 5, 4, 3,
2, 7, 6, 5,
4, 3, 7, 6,
5, 4, 7, 6,
5, 7, 6, 7,
};
static const T_AVRational vui_sar[] = {
{ 0, 1 },
{ 1, 1 },
{ 12, 11 },
{ 10, 11 },
{ 16, 11 },
{ 40, 33 },
{ 24, 11 },
{ 20, 11 },
{ 32, 11 },
{ 80, 33 },
{ 18, 11 },
{ 15, 11 },
{ 64, 33 },
{ 160, 99 },
{ 4, 3 },
{ 3, 2 },
{ 2, 1 },
};
static inline int getBitsLeft(void *pvHandle)
{
......@@ -253,6 +362,87 @@ exit:
}
/**
* Show 1-25 bits.
*/
static inline unsigned int showBits(void *pvHandle, int iN)
{
T_GetBitContext *ptPtr = (T_GetBitContext *)pvHandle;
uint8_t au8Temp[5] = {0};
uint8_t *pu8CurChar = NULL;
uint8_t u8Nbyte;
uint8_t u8Shift;
uint32_t u32Result = 0;
int iRet = 0;
int iResoLen = 0;
if(NULL == ptPtr)
{
RPT(RPT_ERR, "NULL pointer");
iRet = -1;
goto exit;
}
if(iN > MAX_LEN)
{
iN = MAX_LEN;
}
iResoLen = getBitsLeft(ptPtr);
if(iResoLen < iN)
{
iRet = -1;
goto exit;
}
if((ptPtr->iBitPos + iN) > ptPtr->iTotalBit)
{
iN = ptPtr->iTotalBit- ptPtr->iBitPos;
}
pu8CurChar = ptPtr->pu8Buf+ (ptPtr->iBitPos>>3);
u8Nbyte = (ptPtr->iCurBitPos + iN + 7) >> 3;
u8Shift = (8 - (ptPtr->iCurBitPos + iN))& 0x07;
if(iN == MAX_LEN)
{
RPT(RPT_DBG, "12(ptPtr->iBitPos(:%d) + iN(:%d)) > ptPtr->iTotalBit(:%d)!!! ",\
ptPtr->iBitPos, iN, ptPtr->iTotalBit);
RPT(RPT_DBG, "0x%x 0x%x 0x%x 0x%x", (*pu8CurChar), *(pu8CurChar+1),*(pu8CurChar+2),*(pu8CurChar+3));
}
memcpy(&au8Temp[5-u8Nbyte], pu8CurChar, u8Nbyte);
iRet = (uint32_t)au8Temp[0] << 24;
iRet = iRet << 8;
iRet = ((uint32_t)au8Temp[1]<<24)|((uint32_t)au8Temp[2] << 16)\
|((uint32_t)au8Temp[3] << 8)|au8Temp[4];
iRet = (iRet >> u8Shift) & (((uint64_t)1<<iN) - 1);
u32Result = iRet;
// ptPtr->iBitPos += iN;
// ptPtr->iCurBitPos = ptPtr->iBitPos & 0x7;
exit:
return u32Result;
}
/**
* Show 0-32 bits.
*/
static inline unsigned int showBitsLong(void *pvHandle, int iN)
{
T_GetBitContext *ptPtr = (T_GetBitContext *)pvHandle;
if (iN <= 32) {
return showBits(ptPtr, iN);
}
}
/**
* @brief Function parseCodenum() Ö¸Êý¸çÂײ¼±àÂë½âÎö£¬²Î¿¼h264±ê×¼µÚ9½Ú
......@@ -938,6 +1128,967 @@ exit:
return iRet;
}
static int decodeProfileTierLevel(T_GetBitContext *pvBuf, T_PTLCommon *ptl)
{
int i;
if (getBitsLeft(pvBuf) < 2+1+5 + 32 + 4 + 16 + 16 + 12)
return -1;
ptl->profile_space = getBits(pvBuf, 2);
ptl->tier_flag = getOneBit(pvBuf);
ptl->profile_idc = getBits(pvBuf, 5);
if (ptl->profile_idc == T_PROFILE_HEVC_MAIN)
RPT(RPT_DBG, "Main profile bitstream\n");
else if (ptl->profile_idc == T_PROFILE_HEVC_MAIN_10)
RPT(RPT_DBG, "Main 10 profile bitstream\n");
else if (ptl->profile_idc == T_PROFILE_HEVC_MAIN_STILL_PICTURE)
RPT(RPT_DBG, "Main Still Picture profile bitstream\n");
else if (ptl->profile_idc == T_PROFILE_HEVC_REXT)
RPT(RPT_DBG, "Range Extension profile bitstream\n");
else
RPT(RPT_WRN, "Unknown HEVC profile: %d\n", ptl->profile_idc);
for (i = 0; i < 32; i++) {
ptl->profile_compatibility_flag[i] = getOneBit(pvBuf);
if (ptl->profile_idc == 0 && i > 0 && ptl->profile_compatibility_flag[i])
ptl->profile_idc = i;
}
ptl->progressive_source_flag = getOneBit(pvBuf);
ptl->interlaced_source_flag = getOneBit(pvBuf);
ptl->non_packed_constraint_flag = getOneBit(pvBuf);
ptl->frame_only_constraint_flag = getOneBit(pvBuf);
getBits(pvBuf, 16); // XXX_reserved_zero_44bits[0..15]
getBits(pvBuf, 16); // XXX_reserved_zero_44bits[16..31]
getBits(pvBuf, 12); // XXX_reserved_zero_44bits[32..43]
return 0;
}
static int parsePtl(T_GetBitContext *pvBuf, T_PTL *ptl, int max_num_sub_layers)
{
int i;
if (decodeProfileTierLevel(pvBuf, &ptl->general_ptl) < 0 ||
getBitsLeft(pvBuf) < 8 + (8*2 * (max_num_sub_layers - 1 > 0))) {
RPT(RPT_ERR, "PTL information too short\n");
return -1;
}
ptl->general_ptl.level_idc = getBits(pvBuf, 8);
for (i = 0; i < max_num_sub_layers - 1; i++) {
ptl->sub_layer_profile_present_flag[i] = getOneBit(pvBuf);
ptl->sub_layer_level_present_flag[i] = getOneBit(pvBuf);
}
if (max_num_sub_layers - 1> 0)
for (i = max_num_sub_layers - 1; i < 8; i++)
getBits(pvBuf, 2); // reserved_zero_2bits[i]
for (i = 0; i < max_num_sub_layers - 1; i++) {
if (ptl->sub_layer_profile_present_flag[i] &&
decodeProfileTierLevel(pvBuf, &ptl->sub_layer_ptl[i]) < 0) {
RPT(RPT_ERR,
"PTL information for sublayer %i too short\n", i);
return -1;
}
if (ptl->sub_layer_level_present_flag[i]) {
if (getBitsLeft(pvBuf) < 8) {
RPT(RPT_ERR,
"Not enough data for sublayer %i level_idc\n", i);
return -1;
} else
ptl->sub_layer_ptl[i].level_idc = getBits(pvBuf, 8);
}
}
return 0;
}
static void setDefaultScalingListData(T_ScalingList *sl)
{
int matrixId;
for (matrixId = 0; matrixId < 6; matrixId++) {
// 4x4 default is 16
memset(sl->sl[0][matrixId], 16, 16);
sl->sl_dc[0][matrixId] = 16; // default for 16x16
sl->sl_dc[1][matrixId] = 16; // default for 32x32
}
memcpy(sl->sl[1][0], default_scaling_list_intra, 64);
memcpy(sl->sl[1][1], default_scaling_list_intra, 64);
memcpy(sl->sl[1][2], default_scaling_list_intra, 64);
memcpy(sl->sl[1][3], default_scaling_list_inter, 64);
memcpy(sl->sl[1][4], default_scaling_list_inter, 64);
memcpy(sl->sl[1][5], default_scaling_list_inter, 64);
memcpy(sl->sl[2][0], default_scaling_list_intra, 64);
memcpy(sl->sl[2][1], default_scaling_list_intra, 64);
memcpy(sl->sl[2][2], default_scaling_list_intra, 64);
memcpy(sl->sl[2][3], default_scaling_list_inter, 64);
memcpy(sl->sl[2][4], default_scaling_list_inter, 64);
memcpy(sl->sl[2][5], default_scaling_list_inter, 64);
memcpy(sl->sl[3][0], default_scaling_list_intra, 64);
memcpy(sl->sl[3][1], default_scaling_list_intra, 64);
memcpy(sl->sl[3][2], default_scaling_list_intra, 64);
memcpy(sl->sl[3][3], default_scaling_list_inter, 64);
memcpy(sl->sl[3][4], default_scaling_list_inter, 64);
memcpy(sl->sl[3][5], default_scaling_list_inter, 64);
}
static int scalingListData(T_GetBitContext *pvBuf, T_ScalingList *sl, T_HEVCSPS *sps)
{
uint8_t scaling_list_pred_mode_flag;
int32_t scaling_list_dc_coef[2][6];
int size_id, matrix_id, pos;
int i;
for (size_id = 0; size_id < 4; size_id++)
for (matrix_id = 0; matrix_id < 6; matrix_id += ((size_id == 3) ? 3 : 1)) {
scaling_list_pred_mode_flag = getOneBit(pvBuf);
if (!scaling_list_pred_mode_flag) {
unsigned int delta = parseUe(pvBuf);
/* Only need to handle non-zero delta. Zero means default,
* which should already be in the arrays. */
if (delta) {
// Copy from previous array.
delta *= (size_id == 3) ? 3 : 1;
if (matrix_id < delta) {
RPT(RPT_ERR,
"Invalid delta in scaling list data: %d.\n", delta);
return -1;
}
memcpy(sl->sl[size_id][matrix_id],
sl->sl[size_id][matrix_id - delta],
size_id > 0 ? 64 : 16);
if (size_id > 1)
sl->sl_dc[size_id - 2][matrix_id] = sl->sl_dc[size_id - 2][matrix_id - delta];
}
} else {
int next_coef, coef_num;
int32_t scaling_list_delta_coef;
next_coef = 8;
coef_num = FFMIN(64, 1 << (4 + (size_id << 1)));
if (size_id > 1) {
scaling_list_dc_coef[size_id - 2][matrix_id] = parseSe(pvBuf) + 8;
next_coef = scaling_list_dc_coef[size_id - 2][matrix_id];
sl->sl_dc[size_id - 2][matrix_id] = next_coef;
}
for (i = 0; i < coef_num; i++) {
if (size_id == 0)
pos = 4 * ff_hevc_diag_scan4x4_y[i] +
ff_hevc_diag_scan4x4_x[i];
else
pos = 8 * ff_hevc_diag_scan8x8_y[i] +
ff_hevc_diag_scan8x8_x[i];
scaling_list_delta_coef = parseSe(pvBuf);
next_coef = (next_coef + 256U + scaling_list_delta_coef) % 256;
sl->sl[size_id][matrix_id][pos] = next_coef;
}
}
}
if (sps->chroma_format_idc == 3) {
for (i = 0; i < 64; i++) {
sl->sl[3][1][i] = sl->sl[2][1][i];
sl->sl[3][2][i] = sl->sl[2][2][i];
sl->sl[3][4][i] = sl->sl[2][4][i];
sl->sl[3][5][i] = sl->sl[2][5][i];
}
sl->sl_dc[1][1] = sl->sl_dc[0][1];
sl->sl_dc[1][2] = sl->sl_dc[0][2];
sl->sl_dc[1][4] = sl->sl_dc[0][4];
sl->sl_dc[1][5] = sl->sl_dc[0][5];
}
return 0;
}
int hevcDecodeShortTermRps(T_GetBitContext *pvBuf,
T_ShortTermRPS *rps, const T_HEVCSPS *sps, int is_slice_header)
{
uint8_t rps_predict = 0;
int delta_poc;
int k0 = 0;
int k1 = 0;
int k = 0;
int i;
if (rps != sps->st_rps && sps->nb_st_rps)
rps_predict = getOneBit(pvBuf);
if (rps_predict) {
const T_ShortTermRPS *rps_ridx;
int delta_rps;
unsigned abs_delta_rps;
uint8_t use_delta_flag = 0;
uint8_t delta_rps_sign;
if (is_slice_header) {
unsigned int delta_idx = parseUe(pvBuf) + 1;
if (delta_idx > sps->nb_st_rps) {
RPT(RPT_ERR,
"Invalid value of delta_idx in slice header RPS: %d > %d.\n",
delta_idx, sps->nb_st_rps);
return -1;
}
rps_ridx = &sps->st_rps[sps->nb_st_rps - delta_idx];
rps->rps_idx_num_delta_pocs = rps_ridx->num_delta_pocs;
} else
rps_ridx = &sps->st_rps[rps - sps->st_rps - 1];
delta_rps_sign = getOneBit(pvBuf);
abs_delta_rps = parseUe(pvBuf) + 1;
if (abs_delta_rps < 1 || abs_delta_rps > 32768) {
RPT(RPT_ERR,
"Invalid value of abs_delta_rps: %d\n",
abs_delta_rps);
return -1;
}
delta_rps = (1 - (delta_rps_sign << 1)) * abs_delta_rps;
for (i = 0; i <= rps_ridx->num_delta_pocs; i++) {
int used = rps->used[k] = getOneBit(pvBuf);
if (!used)
use_delta_flag = getOneBit(pvBuf);
if (used || use_delta_flag) {
if (i < rps_ridx->num_delta_pocs)
delta_poc = delta_rps + rps_ridx->delta_poc[i];
else
delta_poc = delta_rps;
rps->delta_poc[k] = delta_poc;
if (delta_poc < 0)
k0++;
else
k1++;
k++;
}
}
if (k >= FF_ARRAY_ELEMS(rps->used)) {
RPT(RPT_ERR,
"Invalid num_delta_pocs: %d\n", k);
return -1;
}
rps->num_delta_pocs = k;
rps->num_negative_pics = k0;
// sort in increasing order (smallest first)
if (rps->num_delta_pocs != 0) {
int used, tmp;
for (i = 1; i < rps->num_delta_pocs; i++) {
delta_poc = rps->delta_poc[i];
used = rps->used[i];
for (k = i - 1; k >= 0; k--) {
tmp = rps->delta_poc[k];
if (delta_poc < tmp) {
rps->delta_poc[k + 1] = tmp;
rps->used[k + 1] = rps->used[k];
rps->delta_poc[k] = delta_poc;
rps->used[k] = used;
}
}
}
}
if ((rps->num_negative_pics >> 1) != 0) {
int used;
k = rps->num_negative_pics - 1;
// flip the negative values to largest first
for (i = 0; i < rps->num_negative_pics >> 1; i++) {
delta_poc = rps->delta_poc[i];
used = rps->used[i];
rps->delta_poc[i] = rps->delta_poc[k];
rps->used[i] = rps->used[k];
rps->delta_poc[k] = delta_poc;
rps->used[k] = used;
k--;
}
}
} else {
unsigned int prev, nb_positive_pics;
rps->num_negative_pics = parseUe(pvBuf);
nb_positive_pics = parseUe(pvBuf);
if (rps->num_negative_pics >= HEVC_MAX_REFS ||
nb_positive_pics >= HEVC_MAX_REFS) {
RPT(RPT_ERR, "Too many refs in a short term RPS.\n");
return -1;
}
rps->num_delta_pocs = rps->num_negative_pics + nb_positive_pics;
if (rps->num_delta_pocs) {
prev = 0;
for (i = 0; i < rps->num_negative_pics; i++) {
delta_poc = parseUe(pvBuf) + 1;
if (delta_poc < 1 || delta_poc > 32768) {
RPT(RPT_ERR,
"Invalid value of delta_poc: %d\n",
delta_poc);
return -1;
}
prev -= delta_poc;
rps->delta_poc[i] = prev;
rps->used[i] = getOneBit(pvBuf);
}
prev = 0;
for (i = 0; i < nb_positive_pics; i++) {
delta_poc = parseUe(pvBuf) + 1;
if (delta_poc < 1 || delta_poc > 32768) {
RPT(RPT_ERR,
"Invalid value of delta_poc: %d\n",
delta_poc);
return -1;
}
prev += delta_poc;
rps->delta_poc[rps->num_negative_pics + i] = prev;
rps->used[rps->num_negative_pics + i] = getOneBit(pvBuf);
}
}
}
return 0;
}
static void decodeSublayerHrd(T_GetBitContext *pvBuf, unsigned int nb_cpb,
int subpic_params_present)
{
int i;
for (i = 0; i < nb_cpb; i++) {
parseUe(pvBuf); // bit_rate_value_minus1
parseUe(pvBuf); // cpb_size_value_minus1
if (subpic_params_present) {
parseUe(pvBuf); // cpb_size_du_value_minus1
parseUe(pvBuf); // bit_rate_du_value_minus1
}
getOneBit(pvBuf); // cbr_flag
}
}
static int decodeHrd(T_GetBitContext *pvBuf, int common_inf_present,
int max_sublayers)
{
int nal_params_present = 0, vcl_params_present = 0;
int subpic_params_present = 0;
int i;
if (common_inf_present) {
nal_params_present = getOneBit(pvBuf);
vcl_params_present = getOneBit(pvBuf);
if (nal_params_present || vcl_params_present) {
subpic_params_present = getOneBit(pvBuf);
if (subpic_params_present) {
getBits(pvBuf, 8); // tick_divisor_minus2
getBits(pvBuf, 5); // du_cpb_removal_delay_increment_length_minus1
getBits(pvBuf, 1); // sub_pic_cpb_params_in_pic_timing_sei_flag
getBits(pvBuf, 5); // dpb_output_delay_du_length_minus1
}
getBits(pvBuf, 4); // bit_rate_scale
getBits(pvBuf, 4); // cpb_size_scale
if (subpic_params_present)
getBits(pvBuf, 4); // cpb_size_du_scale
getBits(pvBuf, 5); // initial_cpb_removal_delay_length_minus1
getBits(pvBuf, 5); // au_cpb_removal_delay_length_minus1
getBits(pvBuf, 5); // dpb_output_delay_length_minus1
}
}
for (i = 0; i < max_sublayers; i++) {
int low_delay = 0;
unsigned int nb_cpb = 1;
int fixed_rate = getOneBit(pvBuf);
if (!fixed_rate)
fixed_rate = getOneBit(pvBuf);
if (fixed_rate)
parseUe(pvBuf); // elemental_duration_in_tc_minus1
else
low_delay = getOneBit(pvBuf);
if (!low_delay) {
nb_cpb = parseUe(pvBuf) + 1;
if (nb_cpb < 1 || nb_cpb > 32) {
RPT(RPT_ERR, "nb_cpb %d invalid\n", nb_cpb);
return -1;
}
}
if (nal_params_present)
decodeSublayerHrd(pvBuf, nb_cpb, subpic_params_present);
if (vcl_params_present)
decodeSublayerHrd(pvBuf, nb_cpb, subpic_params_present);
}
return 0;
}
static void decodeVui(T_GetBitContext *pvBuf, T_HEVCSPS *sps)
{
T_VUI backup_vui, *vui = &sps->vui;
T_GetBitContext backup;
int sar_present, alt = 0;
RPT(RPT_DBG, "Decoding VUI\n");
sar_present = getOneBit(pvBuf);
if (sar_present) {
uint8_t sar_idx = getBits(pvBuf, 8);
if (sar_idx < FF_ARRAY_ELEMS(vui_sar))
vui->sar = vui_sar[sar_idx];
else if (sar_idx == 255) {
vui->sar.num = getBits(pvBuf, 16);
vui->sar.den = getBits(pvBuf, 16);
} else
RPT(RPT_WRN,
"Unknown SAR index: %u.\n", sar_idx);
}
vui->overscan_info_present_flag = getOneBit(pvBuf);
if (vui->overscan_info_present_flag)
vui->overscan_appropriate_flag = getOneBit(pvBuf);
vui->video_signal_type_present_flag = getOneBit(pvBuf);
if (vui->video_signal_type_present_flag) {
vui->video_format = getBits(pvBuf, 3);
vui->video_full_range_flag = getOneBit(pvBuf);
vui->colour_description_present_flag = getOneBit(pvBuf);
// if (vui->video_full_range_flag && sps->pix_fmt == AV_PIX_FMT_YUV420P)
// sps->pix_fmt = AV_PIX_FMT_YUVJ420P;
if (vui->colour_description_present_flag) {
vui->colour_primaries = getBits(pvBuf, 8);
vui->transfer_characteristic = getBits(pvBuf, 8);
vui->matrix_coeffs = getBits(pvBuf, 8);
#if 0
// Set invalid values to "unspecified"
if (!av_color_primaries_name(vui->colour_primaries))
vui->colour_primaries = AVCOL_PRI_UNSPECIFIED;
if (!av_color_transfer_name(vui->transfer_characteristic))
vui->transfer_characteristic = AVCOL_TRC_UNSPECIFIED;
if (!av_color_space_name(vui->matrix_coeffs))
vui->matrix_coeffs = AVCOL_SPC_UNSPECIFIED;
if (vui->matrix_coeffs == AVCOL_SPC_RGB) {
switch (sps->pix_fmt) {
case AV_PIX_FMT_YUV444P:
sps->pix_fmt = AV_PIX_FMT_GBRP;
break;
case AV_PIX_FMT_YUV444P10:
sps->pix_fmt = AV_PIX_FMT_GBRP10;
break;
case AV_PIX_FMT_YUV444P12:
sps->pix_fmt = AV_PIX_FMT_GBRP12;
break;
}
}
#endif
}
}
vui->chroma_loc_info_present_flag = getOneBit(pvBuf);
if (vui->chroma_loc_info_present_flag) {
vui->chroma_sample_loc_type_top_field = parseUe(pvBuf);
vui->chroma_sample_loc_type_bottom_field = parseUe(pvBuf);
}
vui->neutra_chroma_indication_flag = getOneBit(pvBuf);
vui->field_seq_flag = getOneBit(pvBuf);
vui->frame_field_info_present_flag = getOneBit(pvBuf);
// Backup context in case an alternate header is detected
memcpy(&backup, pvBuf, sizeof(backup));
memcpy(&backup_vui, vui, sizeof(backup_vui));
if (getBitsLeft(pvBuf) >= 68 && showBitsLong(pvBuf, 21) == 0x100000) {
vui->default_display_window_flag = 0;
RPT(RPT_WRN, "Invalid default display window\n");
} else
vui->default_display_window_flag = getOneBit(pvBuf);
if (vui->default_display_window_flag) {
int vert_mult = hevc_sub_height_c[sps->chroma_format_idc];
int horiz_mult = hevc_sub_width_c[sps->chroma_format_idc];
vui->def_disp_win.left_offset = parseUe(pvBuf) * horiz_mult;
vui->def_disp_win.right_offset = parseUe(pvBuf) * horiz_mult;
vui->def_disp_win.top_offset = parseUe(pvBuf) * vert_mult;
vui->def_disp_win.bottom_offset = parseUe(pvBuf) * vert_mult;
}
timing_info:
vui->vui_timing_info_present_flag = getOneBit(pvBuf);
if (vui->vui_timing_info_present_flag) {
if( getBitsLeft(pvBuf) < 66 && !alt) {
// The alternate syntax seem to have timing info located
// at where def_disp_win is normally located
RPT(RPT_WRN,
"Strange VUI timing information, retrying...\n");
memcpy(vui, &backup_vui, sizeof(backup_vui));
memcpy(pvBuf, &backup, sizeof(backup));
alt = 1;
goto timing_info;
}
vui->vui_num_units_in_tick = getBits(pvBuf, 32);
vui->vui_time_scale = getBits(pvBuf, 32);
if (alt) {
RPT(RPT_INF, "Retry got %u/%u fps\n",
vui->vui_time_scale, vui->vui_num_units_in_tick);
}
vui->vui_poc_proportional_to_timing_flag = getOneBit(pvBuf);
if (vui->vui_poc_proportional_to_timing_flag)
vui->vui_num_ticks_poc_diff_one_minus1 = parseUe(pvBuf);
vui->vui_hrd_parameters_present_flag = getOneBit(pvBuf);
if (vui->vui_hrd_parameters_present_flag)
decodeHrd(pvBuf, 1, sps->max_sub_layers);
}
vui->bitstream_restriction_flag = getOneBit(pvBuf);
if (vui->bitstream_restriction_flag) {
if (getBitsLeft(pvBuf) < 8 && !alt) {
RPT(RPT_WRN,
"Strange VUI bitstream restriction information, retrying"
" from timing information...\n");
memcpy(vui, &backup_vui, sizeof(backup_vui));
memcpy(pvBuf, &backup, sizeof(backup));
alt = 1;
goto timing_info;
}
vui->tiles_fixed_structure_flag = getOneBit(pvBuf);
vui->motion_vectors_over_pic_boundaries_flag = getOneBit(pvBuf);
vui->restricted_ref_pic_lists_flag = getOneBit(pvBuf);
vui->min_spatial_segmentation_idc = parseUe(pvBuf);
vui->max_bytes_per_pic_denom = parseUe(pvBuf);
vui->max_bits_per_min_cu_denom = parseUe(pvBuf);
vui->log2_max_mv_length_horizontal = parseUe(pvBuf);
vui->log2_max_mv_length_vertical = parseUe(pvBuf);
}
if (getBitsLeft(pvBuf) < 1 && !alt) {
// XXX: Alternate syntax when sps_range_extension_flag != 0?
RPT(RPT_WRN,
"Overread in VUI, retrying from timing information...\n");
memcpy(vui, &backup_vui, sizeof(backup_vui));
memcpy(pvBuf, &backup, sizeof(backup));
alt = 1;
goto timing_info;
}
}
int h265DecSeqParameterSet( void *pvBufSrc, T_HEVCSPS *sps )
{
T_HEVCWindow *ow;
// int ret = 0;
int log2_diff_max_min_transform_block_size;
int bit_depth_chroma, start, vui_present, sublayer_ordering_info;
int i;
int iRet = 0;
void *pvBuf = NULL;
if(NULL == pvBufSrc || NULL == sps)
{
RPT(RPT_ERR,"ERR null pointer\n");
iRet = -1;
goto exit;
}
memset((void *)sps, 0, sizeof(T_HEVCSPS));
pvBuf = deEmulationPrevention(pvBufSrc);
if(NULL == pvBuf)
{
RPT(RPT_ERR,"ERR null pointer\n");
iRet = -1;
goto exit;
}
// Coded parameters
sps->vps_id = getBits(pvBuf, 4);
if (sps->vps_id >= HEVC_MAX_VPS_COUNT) {
RPT(RPT_ERR, "VPS id out of range: %d\n", sps->vps_id);
iRet = -1;
goto exit;
}
#if 0
if (vps_list && !vps_list[sps->vps_id]) {
RPT(RPT_ERR, "VPS %d does not exist\n",
sps->vps_id);
return AVERROR_INVALIDDATA;
}
#endif
sps->max_sub_layers = getBits(pvBuf, 3) + 1;
if (sps->max_sub_layers > HEVC_MAX_SUB_LAYERS) {
RPT(RPT_ERR, "sps_max_sub_layers out of range: %d\n",
sps->max_sub_layers);
iRet = -1;
goto exit;
}
sps->temporal_id_nesting_flag = getBits(pvBuf, 1);
if ((iRet = parsePtl(pvBuf, &sps->ptl, sps->max_sub_layers)) < 0)
goto exit;
int sps_id = parseUe(pvBuf);
if (sps_id >= HEVC_MAX_SPS_COUNT) {
RPT(RPT_ERR, "SPS id out of range: %d\n", sps_id);
iRet = -1;
goto exit;
}
sps->chroma_format_idc = parseUe(pvBuf);
if (sps->chroma_format_idc > 3U) {
RPT(RPT_ERR, "chroma_format_idc %d is invalid\n", sps->chroma_format_idc);
iRet = -1;
goto exit;
}
if (sps->chroma_format_idc == 3)
sps->separate_colour_plane_flag = getOneBit(pvBuf);
if (sps->separate_colour_plane_flag)
sps->chroma_format_idc = 0;
sps->width = parseUe(pvBuf);
sps->height = parseUe(pvBuf);
// if ((iRet = av_image_check_size(sps->width,
// sps->height, 0, avctx)) < 0)
// goto exit;
if (getOneBit(pvBuf)) { // pic_conformance_flag
int vert_mult = hevc_sub_height_c[sps->chroma_format_idc];
int horiz_mult = hevc_sub_width_c[sps->chroma_format_idc];
sps->pic_conf_win.left_offset = parseUe(pvBuf) * horiz_mult;
sps->pic_conf_win.right_offset = parseUe(pvBuf) * horiz_mult;
sps->pic_conf_win.top_offset = parseUe(pvBuf) * vert_mult;
sps->pic_conf_win.bottom_offset = parseUe(pvBuf) * vert_mult;
sps->output_window = sps->pic_conf_win;
}
sps->bit_depth = parseUe(pvBuf) + 8;
bit_depth_chroma = parseUe(pvBuf) + 8;
if (sps->chroma_format_idc && bit_depth_chroma != sps->bit_depth) {
RPT(RPT_ERR,
"Luma bit depth (%d) is different from chroma bit depth (%d), "
"this is unsupported.\n",
sps->bit_depth, bit_depth_chroma);
iRet = -1;
goto exit;
}
sps->bit_depth_chroma = bit_depth_chroma;
#if 0
iRet = map_pixel_format(avctx, sps);
if (iRet < 0)
goto exit;
#endif
#if 0
sps->log2_max_poc_lsb = parseUe(pvBuf) + 4;
if (sps->log2_max_poc_lsb > 16) {
RPT(RPT_ERR, "log2_max_pic_order_cnt_lsb_minus4 out range: %d\n",
sps->log2_max_poc_lsb - 4);
iRet = -1;
goto exit;
}
sublayer_ordering_info = getOneBit(pvBuf);
start = sublayer_ordering_info ? 0 : sps->max_sub_layers - 1;
for (i = start; i < sps->max_sub_layers; i++) {
sps->temporal_layer[i].max_dec_pic_buffering = parseUe(pvBuf) + 1;
sps->temporal_layer[i].num_reorder_pics = parseUe(pvBuf);
sps->temporal_layer[i].max_latency_increase = parseUe(pvBuf) - 1;
if (sps->temporal_layer[i].max_dec_pic_buffering > (unsigned)HEVC_MAX_DPB_SIZE) {
RPT(RPT_ERR, "sps_max_dec_pic_buffering_minus1 out of range: %d\n",
sps->temporal_layer[i].max_dec_pic_buffering - 1U);
iRet = -1;
goto exit;
}
if (sps->temporal_layer[i].num_reorder_pics > sps->temporal_layer[i].max_dec_pic_buffering - 1) {
RPT(RPT_WRN, "sps_max_num_reorder_pics out of range: %d\n",
sps->temporal_layer[i].num_reorder_pics);
if (sps->temporal_layer[i].num_reorder_pics > HEVC_MAX_DPB_SIZE - 1) {
iRet = -1;
goto exit;
}
sps->temporal_layer[i].max_dec_pic_buffering = sps->temporal_layer[i].num_reorder_pics + 1;
}
}
if (!sublayer_ordering_info) {
for (i = 0; i < start; i++) {
sps->temporal_layer[i].max_dec_pic_buffering = sps->temporal_layer[start].max_dec_pic_buffering;
sps->temporal_layer[i].num_reorder_pics = sps->temporal_layer[start].num_reorder_pics;
sps->temporal_layer[i].max_latency_increase = sps->temporal_layer[start].max_latency_increase;
}
}
sps->log2_min_cb_size = parseUe(pvBuf) + 3;
sps->log2_diff_max_min_coding_block_size = parseUe(pvBuf);
sps->log2_min_tb_size = parseUe(pvBuf) + 2;
log2_diff_max_min_transform_block_size = parseUe(pvBuf);
sps->log2_max_trafo_size = log2_diff_max_min_transform_block_size +
sps->log2_min_tb_size;
if (sps->log2_min_cb_size < 3 || sps->log2_min_cb_size > 30) {
RPT(RPT_ERR, "Invalid value %d for log2_min_cb_size", sps->log2_min_cb_size);
iRet = -1;
goto exit;
}
if (sps->log2_diff_max_min_coding_block_size > 30) {
RPT(RPT_ERR, "Invalid value %d for log2_diff_max_min_coding_block_size", sps->log2_diff_max_min_coding_block_size);
iRet = -1;
goto exit;
}
if (sps->log2_min_tb_size >= sps->log2_min_cb_size || sps->log2_min_tb_size < 2) {
RPT(RPT_ERR, "Invalid value for log2_min_tb_size");
iRet = -1;
goto exit;
}
if (log2_diff_max_min_transform_block_size < 0 || log2_diff_max_min_transform_block_size > 30) {
RPT(RPT_ERR, "Invalid value %d for log2_diff_max_min_transform_block_size", log2_diff_max_min_transform_block_size);
iRet = -1;
goto exit;
}
sps->max_transform_hierarchy_depth_inter = parseUe(pvBuf);
sps->max_transform_hierarchy_depth_intra = parseUe(pvBuf);
sps->scaling_list_enable_flag = getOneBit(pvBuf);
printf("sps->scaling_list_enable_flag: %d\n",sps->scaling_list_enable_flag);
if (sps->scaling_list_enable_flag) {
setDefaultScalingListData(&sps->scaling_list);
if (getOneBit(pvBuf)) {
iRet = scalingListData(pvBuf, &sps->scaling_list, sps);
if (iRet < 0)
goto exit;
}
}
sps->amp_enabled_flag = getOneBit(pvBuf);
sps->sao_enabled = getOneBit(pvBuf);
sps->pcm_enabled_flag = getOneBit(pvBuf);
printf("sps->pcm_enabled_flag: %d\n",sps->pcm_enabled_flag);
if (sps->pcm_enabled_flag) {
sps->pcm.bit_depth = getBits(pvBuf, 4) + 1;
sps->pcm.bit_depth_chroma = getBits(pvBuf, 4) + 1;
sps->pcm.log2_min_pcm_cb_size = parseUe(pvBuf) + 3;
sps->pcm.log2_max_pcm_cb_size = sps->pcm.log2_min_pcm_cb_size +
parseUe(pvBuf);
if (FFMAX(sps->pcm.bit_depth, sps->pcm.bit_depth_chroma) > sps->bit_depth) {
RPT(RPT_ERR,
"PCM bit depth (%d, %d) is greater than normal bit depth (%d)\n",
sps->pcm.bit_depth, sps->pcm.bit_depth_chroma, sps->bit_depth);
iRet = -1;
goto exit;
}
sps->pcm.loop_filter_disable_flag = getOneBit(pvBuf);
}
sps->nb_st_rps = parseUe(pvBuf);
if (sps->nb_st_rps > HEVC_MAX_SHORT_TERM_REF_PIC_SETS) {
RPT(RPT_ERR, "Too many short term RPS: %d.\n",
sps->nb_st_rps);
iRet = -1;
goto exit;
}
printf("sps->nb_st_rps: %d\n",sps->nb_st_rps);
for (i = 0; i < sps->nb_st_rps; i++) {
if ((iRet = hevcDecodeShortTermRps(pvBuf, &sps->st_rps[i],
sps, 0)) < 0)
goto exit;
}
sps->long_term_ref_pics_present_flag = getOneBit(pvBuf);
if (sps->long_term_ref_pics_present_flag) {
sps->num_long_term_ref_pics_sps = parseUe(pvBuf);
if (sps->num_long_term_ref_pics_sps > HEVC_MAX_LONG_TERM_REF_PICS) {
RPT(RPT_ERR, "Too many long term ref pics: %d.\n",
sps->num_long_term_ref_pics_sps);
iRet = -1;
goto exit;
}
for (i = 0; i < sps->num_long_term_ref_pics_sps; i++) {
sps->lt_ref_pic_poc_lsb_sps[i] = getBits(pvBuf, sps->log2_max_poc_lsb);
sps->used_by_curr_pic_lt_sps_flag[i] = getOneBit(pvBuf);
}
}
sps->sps_temporal_mvp_enabled_flag = getOneBit(pvBuf);
sps->sps_strong_intra_smoothing_enable_flag = getOneBit(pvBuf);
sps->vui.sar = (T_AVRational){0, 1};
vui_present = getOneBit(pvBuf);
if (vui_present==1)
decodeVui(pvBuf, sps);
#endif
#if 0
if (getOneBit(pvBuf)) { // sps_extension_flag
int sps_range_extension_flag = getOneBit(pvBuf);
getBits(pvBuf, 7); //sps_extension_7bits = getBits(pvBuf, 7);
if (sps_range_extension_flag) {
int extended_precision_processing_flag;
int cabac_bypass_alignment_enabled_flag;
sps->transform_skip_rotation_enabled_flag = getOneBit(pvBuf);
sps->transform_skip_context_enabled_flag = getOneBit(pvBuf);
sps->implicit_rdpcm_enabled_flag = getOneBit(pvBuf);
sps->explicit_rdpcm_enabled_flag = getOneBit(pvBuf);
extended_precision_processing_flag = getOneBit(pvBuf);
if (extended_precision_processing_flag)
RPT(RPT_WRN,
"extended_precision_processing_flag not yet implemented\n");
sps->intra_smoothing_disabled_flag = getOneBit(pvBuf);
sps->high_precision_offsets_enabled_flag = getOneBit(pvBuf);
if (sps->high_precision_offsets_enabled_flag)
RPT(RPT_WRN,
"high_precision_offsets_enabled_flag not yet implemented\n");
sps->persistent_rice_adaptation_enabled_flag = getOneBit(pvBuf);
cabac_bypass_alignment_enabled_flag = getOneBit(pvBuf);
if (cabac_bypass_alignment_enabled_flag)
RPT(RPT_WRN,
"cabac_bypass_alignment_enabled_flag not yet implemented\n");
}
}
if (apply_defdispwin) {
sps->output_window.left_offset += sps->vui.def_disp_win.left_offset;
sps->output_window.right_offset += sps->vui.def_disp_win.right_offset;
sps->output_window.top_offset += sps->vui.def_disp_win.top_offset;
sps->output_window.bottom_offset += sps->vui.def_disp_win.bottom_offset;
}
ow = &sps->output_window;
if (ow->left_offset >= INT_MAX - ow->right_offset ||
ow->top_offset >= INT_MAX - ow->bottom_offset ||
ow->left_offset + ow->right_offset >= sps->width ||
ow->top_offset + ow->bottom_offset >= sps->height) {
RPT(RPT_WRN, "Invalid cropping offsets: %u/%u/%u/%u\n",
ow->left_offset, ow->right_offset, ow->top_offset, ow->bottom_offset);
if (avctx->err_recognition & AV_EF_EXPLODE) {
return AVERROR_INVALIDDATA;
}
RPT(RPT_WRN,
"Displaying the whole video surface.\n");
memset(ow, 0, sizeof(*ow));
memset(&sps->pic_conf_win, 0, sizeof(sps->pic_conf_win));
}
// Inferred parameters
sps->log2_ctb_size = sps->log2_min_cb_size +
sps->log2_diff_max_min_coding_block_size;
sps->log2_min_pu_size = sps->log2_min_cb_size - 1;
if (sps->log2_ctb_size > HEVC_MAX_LOG2_CTB_SIZE) {
RPT(RPT_ERR, "CTB size out of range: 2^%d\n", sps->log2_ctb_size);
iRet = -1;
iRet = -1;
goto exit;
}
if (sps->log2_ctb_size < 4) {
RPT(RPT_ERR,
"log2_ctb_size %d differs from the bounds of any known profile\n",
sps->log2_ctb_size);
avpriv_request_sample(avctx, "log2_ctb_size %d", sps->log2_ctb_size);
iRet = -1;
iRet = -1;
goto exit;
}
sps->ctb_width = (sps->width + (1 << sps->log2_ctb_size) - 1) >> sps->log2_ctb_size;
sps->ctb_height = (sps->height + (1 << sps->log2_ctb_size) - 1) >> sps->log2_ctb_size;
sps->ctb_size = sps->ctb_width * sps->ctb_height;
sps->min_cb_width = sps->width >> sps->log2_min_cb_size;
sps->min_cb_height = sps->height >> sps->log2_min_cb_size;
sps->min_tb_width = sps->width >> sps->log2_min_tb_size;
sps->min_tb_height = sps->height >> sps->log2_min_tb_size;
sps->min_pu_width = sps->width >> sps->log2_min_pu_size;
sps->min_pu_height = sps->height >> sps->log2_min_pu_size;
sps->tb_mask = (1 << (sps->log2_ctb_size - sps->log2_min_tb_size)) - 1;
sps->qp_bd_offset = 6 * (sps->bit_depth - 8);
if (av_mod_uintp2(sps->width, sps->log2_min_cb_size) ||
av_mod_uintp2(sps->height, sps->log2_min_cb_size)) {
RPT(RPT_ERR, "Invalid coded frame dimensions.\n");
iRet = -1;
goto exit;
}
if (sps->max_transform_hierarchy_depth_inter > sps->log2_ctb_size - sps->log2_min_tb_size) {
RPT(RPT_ERR, "max_transform_hierarchy_depth_inter out of range: %d\n",
sps->max_transform_hierarchy_depth_inter);
iRet = -1;
goto exit;
}
if (sps->max_transform_hierarchy_depth_intra > sps->log2_ctb_size - sps->log2_min_tb_size) {
RPT(RPT_ERR, "max_transform_hierarchy_depth_intra out of range: %d\n",
sps->max_transform_hierarchy_depth_intra);
iRet = -1;
goto exit;
}
if (sps->log2_max_trafo_size > FFMIN(sps->log2_ctb_size, 5)) {
RPT(RPT_ERR,
"max transform block size out of range: %d\n",
sps->log2_max_trafo_size);
iRet = -1;
goto exit;
}
if (getBitsLeft(pvBuf) < 0) {
RPT(RPT_ERR,
"Overread SPS by %d bits\n", -getBitsLeft(pvBuf));
iRet = -1;
goto exit;
}
#endif
exit:
getBitContextFree(pvBuf);
return iRet;
}
void h264GetWidthHeight(T_SPS *ptSps, int *piWidth, int *piHeight)
{
// ¿í¸ß¼ÆË㹫ʽ
......@@ -1036,3 +2187,32 @@ void h264GeFramerate(T_SPS *ptSps, float *pfFramerate)
void h265GetWidthHeight(T_HEVCSPS *ptSps, int *piWidth, int *piHeight)
{
#if 1
// ¿í¸ß¼ÆË㹫ʽ
int iCodeWidth = 0;
int iCodedHeight = 0;
iCodeWidth = ptSps->width;
iCodedHeight = ptSps->height;
*piWidth = ptSps->width - ptSps->pic_conf_win.left_offset - ptSps->pic_conf_win.right_offset;
*piHeight = ptSps->height - ptSps->pic_conf_win.top_offset - ptSps->pic_conf_win.bottom_offset;
RPT(RPT_DBG, "iCodeWidth:%d, iCodedHeight:%d\n", iCodeWidth, iCodedHeight);
RPT(RPT_DBG, "*piWidth:%d, *piHeight:%d\n", *piWidth, *piHeight);
// RPT(RPT_DBG, "ptSps->uiCropRight:%d, ptSps->uiCropLeft:%d\n", ptSps->uiCropRight, ptSps->uiCropLeft);
// RPT(RPT_DBG, "ptSps->uiCropTop:%d, ptSps->uiCropBottom:%d\n", ptSps->uiCropTop, ptSps->uiCropBottom);
#endif
}
void h265GeFramerate(T_HEVCSPS *ptSps, float *pfFramerate)
{
}
......@@ -7,6 +7,15 @@
#define QP_MAX_NUM (51 + 6*6) // The maximum supported qp
#define HEVC_MAX_SHORT_TERM_RPS_COUNT 64
#define T_PROFILE_HEVC_MAIN 1
#define T_PROFILE_HEVC_MAIN_10 2
#define T_PROFILE_HEVC_MAIN_STILL_PICTURE 3
#define T_PROFILE_HEVC_REXT 4
/**
* Chromaticity coordinates of the source primaries.
*/
......@@ -67,6 +76,62 @@ enum T_AVColorSpace {
};
enum {
// 7.4.3.1: vps_max_layers_minus1 is in [0, 62].
HEVC_MAX_LAYERS = 63,
// 7.4.3.1: vps_max_sub_layers_minus1 is in [0, 6].
HEVC_MAX_SUB_LAYERS = 7,
// 7.4.3.1: vps_num_layer_sets_minus1 is in [0, 1023].
HEVC_MAX_LAYER_SETS = 1024,
// 7.4.2.1: vps_video_parameter_set_id is u(4).
HEVC_MAX_VPS_COUNT = 16,
// 7.4.3.2.1: sps_seq_parameter_set_id is in [0, 15].
HEVC_MAX_SPS_COUNT = 16,
// 7.4.3.3.1: pps_pic_parameter_set_id is in [0, 63].
HEVC_MAX_PPS_COUNT = 64,
// A.4.2: MaxDpbSize is bounded above by 16.
HEVC_MAX_DPB_SIZE = 16,
// 7.4.3.1: vps_max_dec_pic_buffering_minus1[i] is in [0, MaxDpbSize - 1].
HEVC_MAX_REFS = HEVC_MAX_DPB_SIZE,
// 7.4.3.2.1: num_short_term_ref_pic_sets is in [0, 64].
HEVC_MAX_SHORT_TERM_REF_PIC_SETS = 64,
// 7.4.3.2.1: num_long_term_ref_pics_sps is in [0, 32].
HEVC_MAX_LONG_TERM_REF_PICS = 32,
// A.3: all profiles require that CtbLog2SizeY is in [4, 6].
HEVC_MIN_LOG2_CTB_SIZE = 4,
HEVC_MAX_LOG2_CTB_SIZE = 6,
// E.3.2: cpb_cnt_minus1[i] is in [0, 31].
HEVC_MAX_CPB_CNT = 32,
// A.4.1: in table A.6 the highest level allows a MaxLumaPs of 35 651 584.
HEVC_MAX_LUMA_PS = 35651584,
// A.4.1: pic_width_in_luma_samples and pic_height_in_luma_samples are
// constrained to be not greater than sqrt(MaxLumaPs * 8). Hence height/
// width are bounded above by sqrt(8 * 35651584) = 16888.2 samples.
HEVC_MAX_WIDTH = 16888,
HEVC_MAX_HEIGHT = 16888,
// A.4.1: table A.6 allows at most 22 tile rows for any level.
HEVC_MAX_TILE_ROWS = 22,
// A.4.1: table A.6 allows at most 20 tile columns for any level.
HEVC_MAX_TILE_COLUMNS = 20,
// 7.4.7.1: in the worst case (tiles_enabled_flag and
// entropy_coding_sync_enabled_flag are both set), entry points can be
// placed at the beginning of every Ctb row in every tile, giving an
// upper bound of (num_tile_columns_minus1 + 1) * PicHeightInCtbsY - 1.
// Only a stream with very high resolution and perverse parameters could
// get near that, though, so set a lower limit here with the maximum
// possible value for 4K video (at most 135 16x16 Ctb rows).
HEVC_MAX_ENTRY_POINT_OFFSETS = HEVC_MAX_TILE_COLUMNS * 135,
};
/**
* rational number numerator/denominator
*/
......@@ -170,6 +235,189 @@ typedef struct T_PPS {
int iChromaQpDiff;
} T_PPS;
typedef struct T_HEVCWindow {
unsigned int left_offset;
unsigned int right_offset;
unsigned int top_offset;
unsigned int bottom_offset;
} T_HEVCWindow;
typedef struct T_VUI {
T_AVRational sar;
int overscan_info_present_flag;
int overscan_appropriate_flag;
int video_signal_type_present_flag;
int video_format;
int video_full_range_flag;
int colour_description_present_flag;
uint8_t colour_primaries;
uint8_t transfer_characteristic;
uint8_t matrix_coeffs;
int chroma_loc_info_present_flag;
int chroma_sample_loc_type_top_field;
int chroma_sample_loc_type_bottom_field;
int neutra_chroma_indication_flag;
int field_seq_flag;
int frame_field_info_present_flag;
int default_display_window_flag;
T_HEVCWindow def_disp_win;
int vui_timing_info_present_flag;
uint32_t vui_num_units_in_tick;
uint32_t vui_time_scale;
int vui_poc_proportional_to_timing_flag;
int vui_num_ticks_poc_diff_one_minus1;
int vui_hrd_parameters_present_flag;
int bitstream_restriction_flag;
int tiles_fixed_structure_flag;
int motion_vectors_over_pic_boundaries_flag;
int restricted_ref_pic_lists_flag;
int min_spatial_segmentation_idc;
int max_bytes_per_pic_denom;
int max_bits_per_min_cu_denom;
int log2_max_mv_length_horizontal;
int log2_max_mv_length_vertical;
} T_VUI;
typedef struct T_PTLCommon {
uint8_t profile_space;
uint8_t tier_flag;
uint8_t profile_idc;
uint8_t profile_compatibility_flag[32];
uint8_t level_idc;
uint8_t progressive_source_flag;
uint8_t interlaced_source_flag;
uint8_t non_packed_constraint_flag;
uint8_t frame_only_constraint_flag;
} T_PTLCommon;
typedef struct T_PTL {
T_PTLCommon general_ptl;
T_PTLCommon sub_layer_ptl[HEVC_MAX_SUB_LAYERS];
uint8_t sub_layer_profile_present_flag[HEVC_MAX_SUB_LAYERS];
uint8_t sub_layer_level_present_flag[HEVC_MAX_SUB_LAYERS];
} T_PTL;
typedef struct T_ScalingList {
/* This is a little wasteful, since sizeID 0 only needs 8 coeffs,
* and size ID 3 only has 2 arrays, not 6. */
uint8_t sl[4][6][64];
uint8_t sl_dc[2][6];
} T_ScalingList;
typedef struct T_ShortTermRPS {
unsigned int num_negative_pics;
int num_delta_pocs;
int rps_idx_num_delta_pocs;
int32_t delta_poc[32];
uint8_t used[32];
} T_ShortTermRPS;
typedef struct T_HEVCSPS {
unsigned vps_id;
int chroma_format_idc;
uint8_t separate_colour_plane_flag;
///< output (i.e. cropped) values
int output_width, output_height;
T_HEVCWindow output_window;
T_HEVCWindow pic_conf_win;
int bit_depth;
int bit_depth_chroma;
int pixel_shift;
// enum AVPixelFormat pix_fmt;
unsigned int log2_max_poc_lsb;
int pcm_enabled_flag;
int max_sub_layers;
struct {
int max_dec_pic_buffering;
int num_reorder_pics;
int max_latency_increase;
} temporal_layer[HEVC_MAX_SUB_LAYERS];
uint8_t temporal_id_nesting_flag;
T_VUI vui;
T_PTL ptl;
uint8_t scaling_list_enable_flag;
T_ScalingList scaling_list;
unsigned int nb_st_rps;
T_ShortTermRPS st_rps[HEVC_MAX_SHORT_TERM_RPS_COUNT];
uint8_t amp_enabled_flag;
uint8_t sao_enabled;
uint8_t long_term_ref_pics_present_flag;
uint16_t lt_ref_pic_poc_lsb_sps[32];
uint8_t used_by_curr_pic_lt_sps_flag[32];
uint8_t num_long_term_ref_pics_sps;
struct {
uint8_t bit_depth;
uint8_t bit_depth_chroma;
unsigned int log2_min_pcm_cb_size;
unsigned int log2_max_pcm_cb_size;
uint8_t loop_filter_disable_flag;
} pcm;
uint8_t sps_temporal_mvp_enabled_flag;
uint8_t sps_strong_intra_smoothing_enable_flag;
unsigned int log2_min_cb_size;
unsigned int log2_diff_max_min_coding_block_size;
unsigned int log2_min_tb_size;
unsigned int log2_max_trafo_size;
unsigned int log2_ctb_size;
unsigned int log2_min_pu_size;
int max_transform_hierarchy_depth_inter;
int max_transform_hierarchy_depth_intra;
int transform_skip_rotation_enabled_flag;
int transform_skip_context_enabled_flag;
int implicit_rdpcm_enabled_flag;
int explicit_rdpcm_enabled_flag;
int intra_smoothing_disabled_flag;
int persistent_rice_adaptation_enabled_flag;
///< coded frame dimension in various units
int width;
int height;
int ctb_width;
int ctb_height;
int ctb_size;
int min_cb_width;
int min_cb_height;
int min_tb_width;
int min_tb_height;
int min_pu_width;
int min_pu_height;
int tb_mask;
int hshift[3];
int vshift[3];
int qp_bd_offset;
uint8_t data[4096];
int data_size;
}T_HEVCSPS;
typedef struct T_GetBitContext{
uint8_t *pu8Buf; /*Ö¸ÏòSPS start*/
int iBufSize; /*SPS ³¤¶È*/
......@@ -180,8 +428,13 @@ typedef struct T_GetBitContext{
int h264DecSeqParameterSet(void *pvBuf, T_SPS *ptSps);
int h265DecSeqParameterSet( void *pvBufSrc, T_HEVCSPS *p_sps );
void h264GetWidthHeight(T_SPS *ptSps, int *piWidth, int *piHeight);
void h265GetWidthHeight(T_HEVCSPS *ptSps, int *piWidth, int *piHeight);
void h264GeFramerate(T_SPS *ptSps, float *pfFramerate);
void h265GeFramerate(T_HEVCSPS *ptSps, float *pfFramerate);
#if defined (__cplusplus)
}
......
......@@ -35,9 +35,57 @@
#include "Util/util.h"
#include "Util/NoticeCenter.h"
#include "Extension/H264.h"
#include "Extension/H265.h"
#include "Extension/AAC.h"
#include "Thread/WorkThreadPool.h"
#ifdef MP4_H265RECORD
#include "mov-buffer.h"
#include "mov-format.h"
#if defined(_WIN32) || defined(_WIN64)
#define fseek64 _fseeki64
#define ftell64 _ftelli64
#else
#define fseek64 fseek
#define ftell64 ftell
#endif
static int mov_file_read(void* fp, void* data, uint64_t bytes)
{
if (bytes == fread(data, 1, bytes, (FILE*)fp))
return 0;
return 0 != ferror((FILE*)fp) ? ferror((FILE*)fp) : -1 /*EOF*/;
}
static int mov_file_write(void* fp, const void* data, uint64_t bytes)
{
return bytes == fwrite(data, 1, bytes, (FILE*)fp) ? 0 : ferror((FILE*)fp);
}
static int mov_file_seek(void* fp, uint64_t offset)
{
return fseek64((FILE*)fp, offset, SEEK_SET);
}
static uint64_t mov_file_tell(void* fp)
{
return ftell64((FILE*)fp);
}
const struct mov_buffer_t* mov_file_buffer(void)
{
static struct mov_buffer_t s_io = {
mov_file_read,
mov_file_write,
mov_file_seek,
mov_file_tell,
};
return &s_io;
}
#endif
using namespace toolkit;
namespace mediakit {
......@@ -101,17 +149,46 @@ void Mp4Maker::inputH264(void *pData, uint32_t ui32Length, uint32_t ui32TimeStam
break;
}
}
void Mp4Maker::inputH265(void *pData, uint32_t ui32Length, uint32_t ui32TimeStamp){
#ifdef MP4_H265RECORD
auto iType = H265_TYPE(((uint8_t*)pData)[4]);
if (iType <= 19 ){
if (_strLastVideo.size() && iType == 19){
_strLastVideo.append((char *)pData,ui32Length);
inputH265_l((char *) _strLastVideo.data(), _strLastVideo.size(), ui32TimeStamp);
_strLastVideo = "";
_ui32LastVideoTime = ui32TimeStamp;
}else
inputH265_l((char *) pData, ui32Length, ui32TimeStamp);
}else{
_strLastVideo.append((char *)pData,ui32Length);
_ui32LastVideoTime = ui32TimeStamp;
}
#endif
}
void Mp4Maker::inputAAC(void *pData, uint32_t ui32Length, uint32_t ui32TimeStamp){
if (_strLastAudio.size()) {
int64_t iTimeInc = (int64_t)ui32TimeStamp - (int64_t)_ui32LastAudioTime;
iTimeInc = MAX(0,MIN(iTimeInc,500));
if(iTimeInc == 0 || iTimeInc == 500){
WarnL << "abnormal time stamp increment:" << ui32TimeStamp << " " << _ui32LastAudioTime;
#ifdef MP4_H265RECORD
if (_h265Record){
inputAAC_l((char *) pData, ui32Length, ui32TimeStamp);
}else
#endif
{
if (_strLastAudio.size()) {
int64_t iTimeInc = (int64_t)ui32TimeStamp - (int64_t)_ui32LastAudioTime;
iTimeInc = MAX(0,MIN(iTimeInc,500));
if(iTimeInc == 0 || iTimeInc == 500){
WarnL << "abnormal time stamp increment:" << ui32TimeStamp << " " << _ui32LastAudioTime;
}
inputAAC_l((char *) _strLastAudio.data(), _strLastAudio.size(), iTimeInc);
}
inputAAC_l((char *) _strLastAudio.data(), _strLastAudio.size(), iTimeInc);
_strLastAudio.assign((char *)pData, ui32Length);
_ui32LastAudioTime = ui32TimeStamp;
}
_strLastAudio.assign((char *)pData, ui32Length);
_ui32LastAudioTime = ui32TimeStamp;
}
void Mp4Maker::inputH264_l(void *pData, uint32_t ui32Length, uint32_t ui32Duration) {
......@@ -127,17 +204,79 @@ void Mp4Maker::inputH264_l(void *pData, uint32_t ui32Length, uint32_t ui32Durati
}
}
void Mp4Maker::inputAAC_l(void *pData, uint32_t ui32Length, uint32_t ui32Duration) {
void Mp4Maker::inputH265_l(void *pData, uint32_t ui32Length, uint32_t ui32TimeStamp) {
GET_CONFIG(uint32_t,recordSec,Record::kFileSecond);
if (!_haveVideo && (_hMp4 == MP4_INVALID_FILE_HANDLE || _ticker.elapsedTime() > recordSec * 1000)) {
#ifdef MP4_H265RECORD
int32_t compositionTime;
auto iType = H265_TYPE(((uint8_t*)pData)[4]);
if( iType >= H265Frame::NAL_IDR_W_RADL && (_movH265info.pMov == NULL || _ticker.elapsedTime() > recordSec * 1000)){
//在I帧率处新建MP4文件
//如果文件未创建或者文件超过10分钟则创建新文件
_h265Record = 1;
createFile();
}
if (_hAudio != MP4_INVALID_TRACK_ID) {
auto duration = ui32Duration * _audioSampleRate /1000.0;
MP4WriteSample(_hMp4, _hAudio, (uint8_t*)pData, ui32Length,duration,0,false);
char *pNualData = (char *)pData;
if (/*iType <= 31 && */_movH265info.pMov!=NULL){
int vcl;
//media-server新版的api使用h265_annexbtomp4
//int n = h265_annexbtomp4(&_movH265info.hevc, pData, ui32Length, _sBbuffer, sizeof(_sBbuffer), &vcl);
int n = hevc_annexbtomp4(&_movH265info.hevc, pData, ui32Length, _sBbuffer, sizeof(_sBbuffer));
if (_movH265info.videoTrack < 0){
if (_movH265info.hevc.numOfArrays < 1){
return; // waiting for vps/sps/pps
}
uint8_t sExtraData[64 * 1024];
int extraDataSize = mpeg4_hevc_decoder_configuration_record_save(&_movH265info.hevc, sExtraData, sizeof(sExtraData));
if (extraDataSize <= 0){
// invalid HVCC
return;
}
// TODO: waiting for key frame ???
_movH265info.videoTrack = mov_writer_add_video(_movH265info.pMov, MOV_OBJECT_HEVC, _movH265info.width, _movH265info.height, sExtraData, extraDataSize);
if (_movH265info.videoTrack < 0)
return;
}
mov_writer_write(_movH265info.pMov,
_movH265info.videoTrack,
_sBbuffer,
n,
ui32TimeStamp,
ui32TimeStamp,
(iType >= 16 && iType <= 23) ? MOV_AV_FLAG_KEYFREAME : 0 );
// mov_writer_write(_movH265info.pMov, _movH265info.videoTrack, _sBbuffer, n, ui32TimeStamp, ui32TimeStamp, 1 == vcl ? MOV_AV_FLAG_KEYFREAME : 0);
}
#endif
}
void Mp4Maker::inputAAC_l(void *pData, uint32_t ui32Length, uint32_t ui32Duration) {
GET_CONFIG(uint32_t,recordSec,Record::kFileSecond);
#ifdef MP4_H265RECORD
if ( _h265Record )
{
if (!_haveVideo && (_movH265info.pMov == NULL || _ticker.elapsedTime() > recordSec * 1000)) {
createFile();
}
if (-1 != _movH265info.audioTrack && _movH265info.pMov != NULL){
mov_writer_write(_movH265info.pMov, _movH265info.audioTrack, (uint8_t*)pData, ui32Length, ui32Duration, ui32Duration, 0);
}
}else
#endif
{
if (!_haveVideo && (_hMp4 == MP4_INVALID_FILE_HANDLE || _ticker.elapsedTime() > recordSec * 1000)) {
//在I帧率处新建MP4文件
//如果文件未创建或者文件超过10分钟则创建新文件
createFile();
}
if (_hAudio != MP4_INVALID_TRACK_ID) {
auto duration = ui32Duration * _audioSampleRate /1000.0;
MP4WriteSample(_hMp4, _hAudio, (uint8_t*)pData, ui32Length,duration,0,false);
}
}
}
......@@ -169,60 +308,120 @@ void Mp4Maker::createFile() {
#else
File::createfile_path(strFileTmp.data(), 0);
#endif
_hMp4 = MP4Create(strFileTmp.data());
if (_hMp4 == MP4_INVALID_FILE_HANDLE) {
WarnL << "创建MP4文件失败:" << strFileTmp;
return;
#ifdef MP4_H265RECORD
if ( _h265Record ){
memset(&_movH265info, 0, sizeof(_movH265info));
_movH265info.videoTrack = -1;
_movH265info.audioTrack = -1;
_movH265info.width = 0;
_movH265info.height = 0;
_movH265info.ptr = NULL;
_movH265info.pFile = fopen(strFileTmp.data(), "wb+");
_movH265info.pMov = mov_writer_create(mov_file_buffer(), _movH265info.pFile, 0/*MOV_FLAG_FASTSTART*/);
}else
#endif
{
_hMp4 = MP4Create(strFileTmp.data(),MP4_CREATE_64BIT_DATA);
if (_hMp4 == MP4_INVALID_FILE_HANDLE) {
WarnL << "创建MP4文件失败:" << strFileTmp;
return;
}
}
//MP4SetTimeScale(_hMp4, 90000);
_strFileTmp = strFileTmp;
_strFile = strFile;
_ticker.resetTime();
auto videoTrack = dynamic_pointer_cast<H264Track>(getTrack(TrackVideo));
if(videoTrack){
auto &sps = videoTrack->getSps();
auto &pps = videoTrack->getPps();
_hVideo = MP4AddH264VideoTrack(_hMp4,
90000,
MP4_INVALID_DURATION,
videoTrack->getVideoWidth(),
videoTrack->getVideoHeight(),
sps[1],
sps[2],
sps[3],
3);
if(_hVideo != MP4_INVALID_TRACK_ID){
MP4AddH264SequenceParameterSet(_hMp4, _hVideo, (uint8_t *)sps.data(), sps.size());
MP4AddH264PictureParameterSet(_hMp4, _hVideo, (uint8_t *)pps.data(), pps.size());
}else{
WarnL << "添加视频通道失败:" << strFileTmp;
if ( _h265Record ){
auto videoTrack = dynamic_pointer_cast<H265Track>(getTrack(TrackVideo));
#ifdef MP4_H265RECORD
if(videoTrack){
_movH265info.width = videoTrack->getVideoWidth();
_movH265info.height = videoTrack->getVideoHeight();
}
#endif
}else {
auto videoTrack = dynamic_pointer_cast<H264Track>(getTrack(TrackVideo));
if(videoTrack){
auto &sps = videoTrack->getSps();
auto &pps = videoTrack->getPps();
_hVideo = MP4AddH264VideoTrack(_hMp4,
90000,
MP4_INVALID_DURATION,
videoTrack->getVideoWidth(),
videoTrack->getVideoHeight(),
sps[1],
sps[2],
sps[3],
3);
if(_hVideo != MP4_INVALID_TRACK_ID){
MP4AddH264SequenceParameterSet(_hMp4, _hVideo, (uint8_t *)sps.data(), sps.size());
MP4AddH264PictureParameterSet(_hMp4, _hVideo, (uint8_t *)pps.data(), pps.size());
}else{
WarnL << "添加视频通道失败:" << strFileTmp;
}
}
}
auto audioTrack = dynamic_pointer_cast<AACTrack>(getTrack(TrackAudio));
if(audioTrack){
_audioSampleRate = audioTrack->getAudioSampleRate();
_hAudio = MP4AddAudioTrack(_hMp4, _audioSampleRate, MP4_INVALID_DURATION, MP4_MPEG4_AUDIO_TYPE);
if (_hAudio != MP4_INVALID_TRACK_ID) {
auto &cfg = audioTrack->getAacCfg();
MP4SetTrackESConfiguration(_hMp4, _hAudio,(uint8_t *)cfg.data(), cfg.size());
}else{
WarnL << "添加音频通道失败:" << strFileTmp;
_audioChannel = audioTrack->getAudioChannel();
#ifdef MP4_H265RECORD
uint8_t extra_data[64 * 1024];
if ( _h265Record ){
_movH265info.audioTrack = mov_writer_add_audio(_movH265info.pMov, MOV_OBJECT_AAC, _audioChannel, 16, _audioSampleRate, audioTrack->getAacCfg().data(), 2);
if (-1 == _movH265info.audioTrack)
WarnL << "添加音频通道失败:" << strFileTmp;
}else
#endif
{
_hAudio = MP4AddAudioTrack(_hMp4, _audioSampleRate, MP4_INVALID_DURATION, MP4_MPEG4_AUDIO_TYPE);
if (_hAudio != MP4_INVALID_TRACK_ID) {
auto &cfg = audioTrack->getAacCfg();
MP4SetTrackESConfiguration(_hMp4, _hAudio,(uint8_t *)cfg.data(), cfg.size());
}else{
WarnL << "添加音频通道失败:" << strFileTmp;
}
}
}
}
void Mp4Maker::asyncClose() {
auto hMp4 = _hMp4;
// auto hMp4 = (_h265Record==0)?_hMp4:_movH265info.pMov;
auto strFileTmp = _strFileTmp;
auto strFile = _strFile;
auto info = _info;
WorkThreadPool::Instance().getExecutor()->async([hMp4,strFileTmp,strFile,info]() {
int h265Record = _h265Record;
#ifdef MP4_H265RECORD
FILE *pFile = (_h265Record)?_movH265info.pFile:NULL;
void * hMp4 = (_h265Record)?(void*)_movH265info.pMov:(void*)_hMp4;
#else
auto hMp4 = _hMp4;
FILE *pFile = NULL;
#endif
WorkThreadPool::Instance().getExecutor()->async([hMp4,strFileTmp,strFile,info,pFile,h265Record]() {
//获取文件录制时间,放在MP4Close之前是为了忽略MP4Close执行时间
const_cast<Mp4Info&>(info).ui64TimeLen = ::time(NULL) - info.ui64StartedTime;
//MP4Close非常耗时,所以要放在后台线程执行
MP4Close(hMp4,MP4_CLOSE_DO_NOT_COMPUTE_BITRATE);
#ifdef MP4_H265RECORD
if (h265Record){
mov_writer_destroy((mov_writer_t*)hMp4);
fclose(pFile);
}else
#endif
{
MP4Close(hMp4,MP4_CLOSE_DO_NOT_COMPUTE_BITRATE);
}
//临时文件名改成正式文件名,防止mp4未完成时被访问
rename(strFileTmp.data(),strFile.data());
//获取文件大小
......@@ -235,11 +434,20 @@ void Mp4Maker::asyncClose() {
}
void Mp4Maker::closeFile() {
if (_hMp4 != MP4_INVALID_FILE_HANDLE) {
asyncClose();
_hMp4 = MP4_INVALID_FILE_HANDLE;
_hVideo = MP4_INVALID_TRACK_ID;
_hAudio = MP4_INVALID_TRACK_ID;
#ifdef MP4_H265RECORD
if (_h265Record){
if (_movH265info.pMov != NULL) {
asyncClose();
}
}else
#endif
{
if (_hMp4 != MP4_INVALID_FILE_HANDLE) {
asyncClose();
_hMp4 = MP4_INVALID_FILE_HANDLE;
_hVideo = MP4_INVALID_TRACK_ID;
_hAudio = MP4_INVALID_TRACK_ID;
}
}
}
......@@ -253,7 +461,10 @@ void Mp4Maker::onTrackFrame(const Frame::Ptr &frame) {
inputAAC(frame->data() + frame->prefixSize(), frame->size() - frame->prefixSize(),frame->stamp());
}
break;
case CodecH265:{
inputH265(frame->data() , frame->size(),frame->stamp());
}
break;
default:
break;
}
......
......@@ -40,6 +40,12 @@
#include "Common/MediaSink.h"
#include "Extension/Track.h"
#ifdef MP4_H265RECORD
#include "mov-writer.h"
#include "mpeg4-hevc.h"
#endif
using namespace toolkit;
namespace mediakit {
......@@ -57,6 +63,22 @@ public:
string strStreamId;//流ID
string strVhost;//vhost
};
class MovH265Info {
public:
#ifdef MP4_H265RECORD
mov_writer_t* pMov;
struct mpeg4_hevc_t hevc;
int videoTrack;
int audioTrack;
int width;
int height;
const uint8_t* ptr;
FILE * pFile;
#endif
};
class Mp4Maker : public MediaSink{
public:
typedef std::shared_ptr<Mp4Maker> Ptr;
......@@ -82,14 +104,22 @@ private:
void asyncClose();
//时间戳:参考频率1000
void inputH264(void *pData, uint32_t ui32Length, uint32_t ui32TimeStamp);
void inputH264(void *pData, uint32_t ui32Length, uint32_t ui32TimeStamp);
void inputH265(void *pData, uint32_t ui32Length, uint32_t ui32TimeStamp);
//时间戳:参考频率1000
void inputAAC(void *pData, uint32_t ui32Length, uint32_t ui32TimeStamp);
void inputH264_l(void *pData, uint32_t ui32Length, uint32_t ui64Duration);
void inputH264_l(void *pData, uint32_t ui32Length, uint32_t ui64Duration);
void inputH265_l(void *pData, uint32_t ui32Length, uint32_t ui32TimeStamp);
void inputAAC_l(void *pData, uint32_t ui32Length, uint32_t ui64Duration);
private:
MovH265Info _movH265info;
int _h265Record = 0;
uint8_t _sBbuffer[2 * 1024 * 1024];
MP4FileHandle _hMp4 = MP4_INVALID_FILE_HANDLE;
MP4TrackId _hVideo = MP4_INVALID_TRACK_ID;
MP4TrackId _hAudio = MP4_INVALID_TRACK_ID;
string _strPath;
......@@ -106,6 +136,7 @@ private:
bool _haveVideo = false;
int _audioSampleRate;
int _audioChannel;
};
} /* namespace mediakit */
......
......@@ -180,7 +180,7 @@ void initEventListener() {
NoticeCenter::Instance().addListener(nullptr, Broadcast::kBroadcastMediaChanged, [](BroadcastMediaChangedArgs) {
if (schema == RTMP_SCHEMA && app == "live") {
lock_guard<mutex> lck(s_mtxFlvRecorder);
if (bRegist) {
if (/*bRegist*/0) {
DebugL << "开始录制RTMP:" << schema << " " << vhost << " " << app << " " << stream;
GET_CONFIG(string, http_root, Http::kRootPath);
auto path =
......@@ -239,8 +239,9 @@ int main(int argc,char *argv[]) {
//这里是拉流地址,支持rtmp/rtsp协议,负载必须是H264+AAC
//如果是其他不识别的音视频将会被忽略(譬如说h264+adpcm转发后会去除音频)
auto urlList = {"rtmp://live.hkstv.hk.lxdns.com/live/hks1",
"rtmp://live.hkstv.hk.lxdns.com/live/hks2"
auto urlList = {
// "rtsp://admin:admin123@192.168.5.82/",
"rtsp://192.168.5.24/live/chn0",
//rtsp链接支持输入用户名密码
/*"rtsp://admin:jzan123456@192.168.0.122/"*/};
map<string, PlayerProxy::Ptr> proxyMap;
......@@ -259,7 +260,7 @@ int main(int argc,char *argv[]) {
//rtsp://127.0.0.1/record/live/0/2017-04-11/11-09-38.mp4
//rtmp://127.0.0.1/record/live/0/2017-04-11/11-09-38.mp4
PlayerProxy::Ptr player(new PlayerProxy(DEFAULT_VHOST, "live", to_string(i).data()));
PlayerProxy::Ptr player(new PlayerProxy(DEFAULT_VHOST, "live", to_string(i).data(),true,true,true,true));
//指定RTP over TCP(播放rtsp时有效)
(*player)[kRtpType] = Rtsp::RTP_TCP;
//开始播放,如果播放失败或者播放中止,将会自动重试若干次,重试次数在配置文件中配置,默认一直重试
......@@ -276,7 +277,7 @@ int main(int argc,char *argv[]) {
" http-flv地址 : http://127.0.0.1/live/0.flv\n"
" rtsp地址 : rtsp://127.0.0.1/live/0\n"
" rtmp地址 : rtmp://127.0.0.1/live/0";
#if 1
//加载证书,证书包含公钥和私钥
SSL_Initor::Instance().loadCertificate((exeDir() + "ssl.p12").data());
//信任某个自签名证书
......@@ -293,12 +294,12 @@ int main(int argc,char *argv[]) {
//简单的telnet服务器,可用于服务器调试,但是不能使用23端口,否则telnet上了莫名其妙的现象
//测试方法:telnet 127.0.0.1 9000
TcpServer::Ptr shellSrv(new TcpServer());
// TcpServer::Ptr shellSrv(new TcpServer());
TcpServer::Ptr rtspSrv(new TcpServer());
TcpServer::Ptr rtmpSrv(new TcpServer());
TcpServer::Ptr httpSrv(new TcpServer());
shellSrv->start<ShellSession>(shellPort);
// shellSrv->start<ShellSession>(shellPort);
rtspSrv->start<RtspSession>(rtspPort);//默认554
rtmpSrv->start<RtmpSession>(rtmpPort);//默认1935
//http服务器,支持websocket
......@@ -312,15 +313,17 @@ int main(int argc,char *argv[]) {
//支持ssl加密的rtsp服务器,可用于诸如亚马逊echo show这样的设备访问
TcpServer::Ptr rtspSSLSrv(new TcpServer());
rtspSSLSrv->start<RtspSessionWithSSL>(rtspsPort);//默认322
//服务器支持动态切换端口(不影响现有连接)
NoticeCenter::Instance().addListener(ReloadConfigTag,Broadcast::kBroadcastReloadConfig,[&](BroadcastReloadConfigArgs){
//重新创建服务器
#if 0
if(shellPort != mINI::Instance()[Shell::kPort].as<uint16_t>()){
shellPort = mINI::Instance()[Shell::kPort];
shellSrv->start<ShellSession>(shellPort);
InfoL << "重启shell服务器:" << shellPort;
}
#endif
if(rtspPort != mINI::Instance()[Rtsp::kPort].as<uint16_t>()){
rtspPort = mINI::Instance()[Rtsp::kPort];
rtspSrv->start<RtspSession>(rtspPort);
......@@ -331,6 +334,7 @@ int main(int argc,char *argv[]) {
rtmpSrv->start<RtmpSession>(rtmpPort);
InfoL << "重启rtmp服务器" << rtmpPort;
}
#if 1
if(httpPort != mINI::Instance()[Http::kPort].as<uint16_t>()){
httpPort = mINI::Instance()[Http::kPort];
httpSrv->start<EchoWebSocketSession>(httpPort);
......@@ -341,6 +345,7 @@ int main(int argc,char *argv[]) {
httpsSrv->start<SSLEchoWebSocketSession>(httpsPort);
InfoL << "重启https服务器" << httpsPort;
}
#endif
if(rtspsPort != mINI::Instance()[Rtsp::kSSLPort].as<uint16_t>()){
rtspsPort = mINI::Instance()[Rtsp::kSSLPort];
......@@ -348,7 +353,7 @@ int main(int argc,char *argv[]) {
InfoL << "重启rtsps服务器" << rtspsPort;
}
});
#endif
//设置退出信号处理函数
static semaphore sem;
signal(SIGINT, [](int) { sem.post(); });// 设置退出信号
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论