diff --git a/3rdpart/ZLToolKit b/3rdpart/ZLToolKit
index 936d3c0..2bb2340 160000
--- a/3rdpart/ZLToolKit
+++ b/3rdpart/ZLToolKit
@@ -1 +1 @@
-Subproject commit 936d3c05b183cba279bb348f8eac9eca0cc810c2
+Subproject commit 2bb234006c852b1d1a61a0e9a7f39dde7105fe34
diff --git a/3rdpart/media-server b/3rdpart/media-server
index e399b93..6df71e0 160000
--- a/3rdpart/media-server
+++ b/3rdpart/media-server
@@ -1 +1 @@
-Subproject commit e399b93802610dcf574ff64bcb7677572cd028c1
+Subproject commit 6df71e01c174cdfe69e597cc4acb766a20b28620
diff --git a/Android/app/src/main/cpp/native-lib.cpp b/Android/app/src/main/cpp/native-lib.cpp
index 79538cf..91faeb2 100644
--- a/Android/app/src/main/cpp/native-lib.cpp
+++ b/Android/app/src/main/cpp/native-lib.cpp
@@ -1,4 +1,4 @@
-#include <jni.h>
+#include <jni.h>
 #include <string>
 #include "test_server.cpp"
 
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 4528654..e7683d8 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -36,6 +36,7 @@ set(ENABLE_MYSQL true)
 set(ENABLE_MP4V2 true)
 set(ENABLE_FAAC true)
 set(ENABLE_X264 true)
+set(ENABLE_MP4RECORD true)
 
 #添加两个静态库
 if(ENABLE_HLS)
@@ -47,6 +48,12 @@ else()
     set(LINK_LIB_LIST zlmediakit zltoolkit)
 endif()
 
+if(ENABLE_MP4RECORD)
+    message(STATUS "ENABLE_MP4RECORD defined")
+    add_definitions(-DENABLE_MP4RECORD)
+    set(MediaServer_Root ${CMAKE_SOURCE_DIR}/3rdpart/media-server)
+	list(APPEND LINK_LIB_LIST mov flv)
+endif()
 #查找openssl是否安装
 find_package(OpenSSL QUIET)
 if (OPENSSL_FOUND AND ENABLE_OPENSSL)
@@ -111,6 +118,21 @@ if(ENABLE_HLS)
 	endif(WIN32)
 endif()
 
+if(ENABLE_MP4RECORD)
+    aux_source_directory(${MediaServer_Root}/libmov/include src_mov)
+    aux_source_directory(${MediaServer_Root}/libmov/source src_mov)	
+	include_directories(${MediaServer_Root}/libmov/include)
+    aux_source_directory(${MediaServer_Root}/libflv/include src_flv)
+    aux_source_directory(${MediaServer_Root}/libflv/source src_flv)	
+	include_directories(${MediaServer_Root}/libflv/include)
+	add_library(mov STATIC ${src_mov})
+	add_library(flv STATIC ${src_flv})
+	if(WIN32)
+		set_target_properties(mov flv PROPERTIES COMPILE_FLAGS  ${VS_FALGS} )
+	endif(WIN32)
+endif()
+
+
 if (WIN32)
     list(APPEND LINK_LIB_LIST WS2_32 Iphlpapi shlwapi)
 	set_target_properties(zltoolkit PROPERTIES COMPILE_FLAGS ${VS_FALGS} )
diff --git a/README.md b/README.md
index bfe534f..4e09398 100644
--- a/README.md
+++ b/README.md
@@ -63,7 +63,7 @@
 |         RTMP --> RTSP[S]         |  Y   |  N   |  Y   |   N   |
 |         RTSP[S] --> HLS          |  Y   |  Y   |  Y   |   N   |
 |           RTMP --> HLS           |  Y   |  N   |  Y   |   N   |
-|         RTSP[S] --> MP4          |  Y   |  N   |  Y   |   N   |
+|         RTSP[S] --> MP4          |  Y   |  Y   |  Y   |   N   |
 |           RTMP --> MP4           |  Y   |  N   |  Y   |   N   |
 |         MP4 --> RTSP[S]          |  Y   |  N   |  Y   |   N   |
 |           MP4 --> RTMP           |  Y   |  N   |  Y   |   N   |
@@ -73,9 +73,9 @@
 | feature/codec | H264 | H265 | AAC  | other |
 | :-----------: | :--: | :--: | :--: | :---: |
 | RTSP[S] push  |  Y   |  Y   |  Y   |   Y   |
-|  RTSP proxy   |  Y   |  Y   |  Y   |   N   |
+|  RTSP proxy   |  Y   |  Y   |  Y   |   Y   |
 |   RTMP push   |  Y   |  Y   |  Y   |   Y   |
-|  RTMP proxy   |  Y   |  N   |  Y   |   N   |
+|  RTMP proxy   |  Y   |  Y   |  Y   |   Y   |
 
 - RTP transport:
 
diff --git a/README_CN.md b/README_CN.md
index bc27736..866c5b3 100644
--- a/README_CN.md
+++ b/README_CN.md
@@ -78,7 +78,7 @@
     |         RTMP --> RTSP[S]         |  Y   |  N   |  Y   |   N   |
     |         RTSP[S] --> HLS          |  Y   |  Y   |  Y   |   N   |
     |           RTMP --> HLS           |  Y   |  N   |  Y   |   N   |
-    |         RTSP[S] --> MP4          |  Y   |  N   |  Y   |   N   |
+    |         RTSP[S] --> MP4          |  Y   |  Y   |  Y   |   N   |
     |           RTMP --> MP4           |  Y   |  N   |  Y   |   N   |
     |         MP4 --> RTSP[S]          |  Y   |  N   |  Y   |   N   |
     |           MP4 --> RTMP           |  Y   |  N   |  Y   |   N   |
@@ -88,9 +88,9 @@
   |          功能/编码格式             | H264 | H265 | AAC  | other |
   | :------------------------------: | :--: | :--: | :--: | :---: |
   | RTSP[S]推流 |  Y   |  Y  |  Y   |   Y   |
-  |         RTSP拉流代理         |  Y   |  Y  |  Y   |   N   |
+  |         RTSP拉流代理         |  Y   |  Y  |  Y   |   Y   |
   |   RTMP推流    |  Y   |  Y   |  Y   |   Y   |
-  | RTMP拉流代理  |  Y   |  N   |  Y   |   N   |
+  | RTMP拉流代理  |  Y   |  Y   |  Y   |   Y   |
 
 - RTP传输方式:
 
diff --git a/server/FFmpegSource.cpp b/server/FFmpegSource.cpp
index c05c8dc..90805cd 100644
--- a/server/FFmpegSource.cpp
+++ b/server/FFmpegSource.cpp
@@ -1,4 +1,4 @@
-/*
+/*
  * MIT License
  *
  * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
diff --git a/server/FFmpegSource.h b/server/FFmpegSource.h
index 9607bc1..6be92fc 100644
--- a/server/FFmpegSource.h
+++ b/server/FFmpegSource.h
@@ -1,4 +1,4 @@
-/*
+/*
  * MIT License
  *
  * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
diff --git a/server/Process.cpp b/server/Process.cpp
index bde961b..984657a 100644
--- a/server/Process.cpp
+++ b/server/Process.cpp
@@ -1,4 +1,4 @@
-/*
+/*
  * MIT License
  *
  * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
diff --git a/server/Process.h b/server/Process.h
index c86ef67..1276cf2 100644
--- a/server/Process.h
+++ b/server/Process.h
@@ -1,4 +1,4 @@
-/*
+/*
  * MIT License
  *
  * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
diff --git a/server/System.cpp b/server/System.cpp
index c74c369..0a0e3dd 100644
--- a/server/System.cpp
+++ b/server/System.cpp
@@ -1,4 +1,4 @@
-/*
+/*
  * MIT License
  *
  * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
diff --git a/server/System.h b/server/System.h
index 84cd46a..c8857a7 100644
--- a/server/System.h
+++ b/server/System.h
@@ -1,4 +1,4 @@
-/*
+/*
  * MIT License
  *
  * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
diff --git a/server/WebApi.h b/server/WebApi.h
index e758abe..c3ffecf 100644
--- a/server/WebApi.h
+++ b/server/WebApi.h
@@ -1,4 +1,4 @@
-/*
+/*
  * MIT License
  *
  * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
diff --git a/server/WebHook.cpp b/server/WebHook.cpp
index 5e41319..dffaf73 100644
--- a/server/WebHook.cpp
+++ b/server/WebHook.cpp
@@ -1,4 +1,4 @@
-/*
+/*
  * MIT License
  *
  * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
diff --git a/server/WebHook.h b/server/WebHook.h
index ce3b49b..14af597 100644
--- a/server/WebHook.h
+++ b/server/WebHook.h
@@ -1,4 +1,4 @@
-/*
+/*
  * MIT License
  *
  * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
diff --git a/server/main.cpp b/server/main.cpp
index e796d06..cb64df2 100644
--- a/server/main.cpp
+++ b/server/main.cpp
@@ -127,6 +127,14 @@ public:
                              "日志等级,LTrace~LError(0~4)",/*该选项说明文字*/
                              nullptr);
 
+        (*_parser) << Option('m',/*该选项简称,如果是\x00则说明无简称*/
+                             "max_day",/*该选项全称,每个选项必须有全称;不得为null或空字符串*/
+                             Option::ArgRequired,/*该选项后面必须跟值*/
+                             "7",/*该选项默认值*/
+                             false,/*该选项是否必须赋值,如果没有默认值且为ArgRequired时用户必须提供该参数否则将抛异常*/
+                             "日志最多保存天数",/*该选项说明文字*/
+                             nullptr);
+
         (*_parser) << Option('c',/*该选项简称,如果是\x00则说明无简称*/
                              "config",/*该选项全称,每个选项必须有全称;不得为null或空字符串*/
                              Option::ArgRequired,/*该选项后面必须跟值*/
@@ -216,11 +224,10 @@ int main(int argc,char *argv[]) {
 
         //设置日志
         Logger::Instance().add(std::make_shared<ConsoleChannel>("ConsoleChannel", logLevel));
-#if defined(__linux__) || defined(__linux)
-        Logger::Instance().add(std::make_shared<SysLogChannel>("SysLogChannel",logLevel));
-#else
-        Logger::Instance().add(std::make_shared<FileChannel>("FileChannel", exePath() + ".log", logLevel));
-#endif
+        auto fileChannel = std::make_shared<FileChannel>("FileChannel", exeDir() + "log/", logLevel);
+        //日志最多保存天数
+        fileChannel->setMaxDay(cmd_main["max_day"]);
+        Logger::Instance().add(fileChannel);
 
 #if !defined(_WIN32)
         if (bDaemon) {
diff --git a/src/Common/MediaSink.cpp b/src/Common/MediaSink.cpp
index a83fc85..4d45f82 100644
--- a/src/Common/MediaSink.cpp
+++ b/src/Common/MediaSink.cpp
@@ -41,7 +41,7 @@ void MediaSink::addTrack(const Track::Ptr &track_in) {
         if(!strongSelf){
             return;
         }
-        if(strongSelf->_allTrackReady){
+        if(!strongSelf->_anyTrackUnReady){
             strongSelf->onTrackFrame(frame);
         }
     }));
@@ -53,6 +53,7 @@ void MediaSink::addTrack(const Track::Ptr &track_in) {
     if(track->ready()){
         lam();
     }else{
+        _anyTrackUnReady = true;
         _allTrackReady = false;
         _trackReadyCallback[codec_id] = lam;
         _ticker.resetTime();
@@ -79,7 +80,7 @@ void MediaSink::inputFrame(const Frame::Ptr &frame) {
 
     if(!_allTrackReady && (_trackReadyCallback.empty() || _ticker.elapsedTime() > MAX_WAIT_MS)){
         _allTrackReady = true;
-
+        _anyTrackUnReady = false;
         if(!_trackReadyCallback.empty()){
             //这是超时强制忽略未准备好的Track
             _trackReadyCallback.clear();
diff --git a/src/Common/MediaSink.h b/src/Common/MediaSink.h
index 7dc9fff..a45b963 100644
--- a/src/Common/MediaSink.h
+++ b/src/Common/MediaSink.h
@@ -99,6 +99,7 @@ private:
     map<int,Track::Ptr> _track_map;
     map<int,function<void()> > _trackReadyCallback;
     bool _allTrackReady = false;
+    bool _anyTrackUnReady = false;
     Ticker _ticker;
 };
 
diff --git a/src/Common/Parser.cpp b/src/Common/Parser.cpp
index 58bb8ff..e24fa63 100644
--- a/src/Common/Parser.cpp
+++ b/src/Common/Parser.cpp
@@ -1,4 +1,4 @@
-//
+//
 // Created by xzl on 2019/6/28.
 //
 
diff --git a/src/Common/Parser.h b/src/Common/Parser.h
index bfe2cee..bf9854a 100644
--- a/src/Common/Parser.h
+++ b/src/Common/Parser.h
@@ -1,4 +1,4 @@
-//
+//
 // Created by xzl on 2019/6/28.
 //
 
diff --git a/src/Common/config.cpp b/src/Common/config.cpp
index 42aedb2..1b4bf79 100644
--- a/src/Common/config.cpp
+++ b/src/Common/config.cpp
@@ -260,11 +260,15 @@ const string kFileSecond = RECORD_FIELD"fileSecond";
 #define RECORD_FILE_PATH HTTP_ROOT_PATH
 const string kFilePath = RECORD_FIELD"filePath";
 
+//mp4文件写缓存大小
+const string kFileBufSize = RECORD_FIELD"fileBufSize";
+
 onceToken token([](){
 	mINI::Instance()[kAppName] = RECORD_APP_NAME;
 	mINI::Instance()[kSampleMS] = RECORD_SAMPLE_MS;
 	mINI::Instance()[kFileSecond] = RECORD_FILE_SECOND;
 	mINI::Instance()[kFilePath] = RECORD_FILE_PATH;
+	mINI::Instance()[kFileBufSize] = 64 * 1024;
 },nullptr);
 
 } //namespace Record
diff --git a/src/Common/config.h b/src/Common/config.h
index 8807d17..6817e02 100644
--- a/src/Common/config.h
+++ b/src/Common/config.h
@@ -255,6 +255,8 @@ extern const string kSampleMS;
 extern const string kFileSecond;
 //录制文件路径
 extern const string kFilePath;
+//mp4文件写缓存大小
+extern const string kFileBufSize;
 } //namespace Record
 
 ////////////HLS相关配置///////////
diff --git a/src/Extension/AAC.h b/src/Extension/AAC.h
index e445b7f..2af513f 100644
--- a/src/Extension/AAC.h
+++ b/src/Extension/AAC.h
@@ -79,6 +79,10 @@ public:
     bool keyFrame() const override {
         return false;
     }
+
+    bool configFrame() const override{
+        return false;
+    }
 public:
     unsigned int syncword = 0; //12 bslbf 同步字The bit string ‘1111 1111 1111’,说明一个ADTS帧的开始
     unsigned int id;        //1 bslbf   MPEG 标示符, 设置为1
@@ -127,6 +131,10 @@ public:
     bool keyFrame() const override {
         return false;
     }
+
+    bool configFrame() const override{
+        return false;
+    }
 } ;
 
 
diff --git a/src/Extension/Factory.cpp b/src/Extension/Factory.cpp
index ced72b6..36dbc77 100644
--- a/src/Extension/Factory.cpp
+++ b/src/Extension/Factory.cpp
@@ -36,10 +36,10 @@ namespace mediakit{
 Track::Ptr Factory::getTrackBySdp(const SdpTrack::Ptr &track) {
     if (strcasecmp(track->_codec.data(), "mpeg4-generic") == 0) {
         string aac_cfg_str = FindField(track->_fmtp.data(), "config=", nullptr);
-        if (aac_cfg_str.size() != 4) {
+        if (aac_cfg_str.empty()) {
             aac_cfg_str = FindField(track->_fmtp.data(), "config=", ";");
         }
-        if (aac_cfg_str.size() != 4) {
+        if (aac_cfg_str.empty()) {
             //延后获取adts头
             return std::make_shared<AACTrack>();
         }
@@ -76,8 +76,14 @@ Track::Ptr Factory::getTrackBySdp(const SdpTrack::Ptr &track) {
 
     if (strcasecmp(track->_codec.data(), "h265") == 0) {
         //a=fmtp:96 sprop-sps=QgEBAWAAAAMAsAAAAwAAAwBdoAKAgC0WNrkky/AIAAADAAgAAAMBlQg=; sprop-pps=RAHA8vA8kAA=
-        int pt;
+        int pt, id;
         char sprop_vps[128] = {0},sprop_sps[128] = {0},sprop_pps[128] = {0};
+        if (5 == sscanf(track->_fmtp.data(), "%d profile-id=%d; sprop-sps=%127[^;]; sprop-pps=%127[^;]; sprop-vps=%127[^;]", &pt, &id, sprop_sps,sprop_pps, sprop_vps)) {
+            auto vps = decodeBase64(sprop_vps);
+            auto sps = decodeBase64(sprop_sps);
+            auto pps = decodeBase64(sprop_pps);
+            return std::make_shared<H265Track>(vps,sps,pps,0,0,0);
+        }
         if (4 == sscanf(track->_fmtp.data(), "%d sprop-vps=%127[^;]; sprop-sps=%127[^;]; sprop-pps=%127[^;]", &pt, sprop_vps,sprop_sps, sprop_pps)) {
             auto vps = decodeBase64(sprop_vps);
             auto sps = decodeBase64(sprop_sps);
diff --git a/src/Extension/Frame.h b/src/Extension/Frame.h
index 5d49753..79b5d02 100644
--- a/src/Extension/Frame.h
+++ b/src/Extension/Frame.h
@@ -118,6 +118,12 @@ public:
     virtual bool keyFrame() const = 0;
 
     /**
+     * 是否为配置帧,譬如sps pps vps
+     * @return
+     */
+    virtual bool configFrame() const = 0;
+
+    /**
      * 是否可以缓存
      */
     virtual bool cacheAble() const { return true; }
@@ -371,6 +377,7 @@ public:
         _trackType = frame->getTrackType();
         _codec = frame->getCodecId();
         _key = frame->keyFrame();
+        _config = frame->configFrame();
     }
 
     virtual ~FrameCacheAble() = default;
@@ -394,12 +401,17 @@ public:
     bool keyFrame() const override{
         return _key;
     }
+
+    bool configFrame() const override{
+        return _config;
+    }
 private:
     Frame::Ptr _frame;
     BufferRaw::Ptr _buffer;
     TrackType _trackType;
     CodecId _codec;
     bool _key;
+    bool _config;
 };
 
 
diff --git a/src/Extension/H264.cpp b/src/Extension/H264.cpp
index 8333624..2d93db3 100644
--- a/src/Extension/H264.cpp
+++ b/src/Extension/H264.cpp
@@ -32,9 +32,6 @@ using namespace toolkit;
 
 namespace mediakit{
 
-bool getAVCInfo(const string& strSps,int &iVideoWidth, int &iVideoHeight, float  &iVideoFps) {
-    return getAVCInfo(strSps.data(),strSps.size(),iVideoWidth,iVideoHeight,iVideoFps);
-}
 bool getAVCInfo(const char * sps,int sps_len,int &iVideoWidth, int &iVideoHeight, float  &iVideoFps){
     T_GetBitContext tGetBitBuf;
     T_SPS tH264SpsInfo;
@@ -51,6 +48,9 @@ bool getAVCInfo(const char * sps,int sps_len,int &iVideoWidth, int &iVideoHeight
     return true;
 }
 
+bool getAVCInfo(const string& strSps,int &iVideoWidth, int &iVideoHeight, float  &iVideoFps) {
+    return getAVCInfo(strSps.data(),strSps.size(),iVideoWidth,iVideoHeight,iVideoFps);
+}
 
 const char *memfind(const char *buf, int len, const char *subbuf, int sublen) {
     for (auto i = 0; i < len - sublen; ++i) {
diff --git a/src/Extension/H264.h b/src/Extension/H264.h
index 400f547..df14031 100644
--- a/src/Extension/H264.h
+++ b/src/Extension/H264.h
@@ -1,4 +1,4 @@
-/*
+/*
  * MIT License
  *
  * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
@@ -36,7 +36,6 @@ using namespace toolkit;
 namespace mediakit{
 
 bool getAVCInfo(const string &strSps,int &iVideoWidth, int &iVideoHeight, float  &iVideoFps);
-bool getAVCInfo(const char * sps,int sps_len,int &iVideoWidth, int &iVideoHeight, float  &iVideoFps);
 void splitH264(const char *ptr, int len, const std::function<void(const char *, int)> &cb);
 
 /**
@@ -80,13 +79,21 @@ public:
     }
 
     bool keyFrame() const override {
-        return type == NAL_IDR;
+        return H264_TYPE(buffer[iPrefixSize]) == H264Frame::NAL_IDR;
+    }
+
+    bool configFrame() const override{
+        switch(H264_TYPE(buffer[iPrefixSize]) ){
+            case H264Frame::NAL_SPS:
+            case H264Frame::NAL_PPS:
+                return true;
+            default:
+                return false;
+        }
     }
 public:
-    uint16_t sequence;
     uint32_t timeStamp;
     uint32_t ptsStamp = 0;
-    unsigned char type;
     string buffer;
     uint32_t iPrefixSize = 4;
 };
@@ -120,6 +127,16 @@ public:
     bool keyFrame() const override {
         return H264_TYPE(_ptr[_prefixSize]) == H264Frame::NAL_IDR;
     }
+
+    bool configFrame() const override{
+        switch(H264_TYPE(_ptr[_prefixSize])){
+            case H264Frame::NAL_SPS:
+            case H264Frame::NAL_PPS:
+                return true;
+            default:
+                return false;
+        }
+    }
 };
 
 /**
@@ -326,7 +343,6 @@ private:
 
         if(!_sps.empty()){
             auto spsFrame = std::make_shared<H264Frame>();
-            spsFrame->type = H264Frame::NAL_SPS;
             spsFrame->iPrefixSize = 4;
             spsFrame->buffer.assign("\x0\x0\x0\x1",4);
             spsFrame->buffer.append(_sps);
@@ -336,7 +352,6 @@ private:
 
         if(!_pps.empty()){
             auto ppsFrame = std::make_shared<H264Frame>();
-            ppsFrame->type = H264Frame::NAL_PPS;
             ppsFrame->iPrefixSize = 4;
             ppsFrame->buffer.assign("\x0\x0\x0\x1",4);
             ppsFrame->buffer.append(_pps);
diff --git a/src/Extension/H264Rtmp.cpp b/src/Extension/H264Rtmp.cpp
index 9ff10e2..2f56b9e 100644
--- a/src/Extension/H264Rtmp.cpp
+++ b/src/Extension/H264Rtmp.cpp
@@ -80,7 +80,6 @@ bool H264RtmpDecoder::decodeRtmp(const RtmpPacket::Ptr &pkt) {
 
 inline void H264RtmpDecoder::onGetH264(const char* pcData, int iLen, uint32_t dts,uint32_t pts) {
 #if 1
-    _h264frame->type = H264_TYPE(pcData[0]);
     _h264frame->timeStamp = dts;
     _h264frame->ptsStamp = pts;
     _h264frame->buffer.assign("\x0\x0\x0\x1", 4);  //添加264头
diff --git a/src/Extension/H264Rtp.cpp b/src/Extension/H264Rtp.cpp
index 7dd58c4..51581c6 100644
--- a/src/Extension/H264Rtp.cpp
+++ b/src/Extension/H264Rtp.cpp
@@ -100,12 +100,10 @@ bool H264RtpDecoder::decodeRtp(const RtpPacket::Ptr &rtppack) {
         //a full frame
         _h264frame->buffer.assign("\x0\x0\x0\x1", 4);
         _h264frame->buffer.append((char *)frame, length);
-        _h264frame->type = nal.type;
         _h264frame->timeStamp = rtppack->timeStamp;
-        _h264frame->sequence = rtppack->sequence;
-        auto isIDR = _h264frame->type == H264Frame::NAL_IDR;
+        auto key = _h264frame->keyFrame();
         onGetH264(_h264frame);
-        return (isIDR); //i frame
+        return (key); //i frame
     }
 
     switch (nal.type){
@@ -131,9 +129,7 @@ bool H264RtpDecoder::decodeRtp(const RtpPacket::Ptr &rtppack) {
                     MakeNalu(ptr[0], nal);
                     _h264frame->buffer.assign("\x0\x0\x0\x1", 4);
                     _h264frame->buffer.append((char *)ptr, len);
-                    _h264frame->type = nal.type;
                     _h264frame->timeStamp = rtppack->timeStamp;
-                    _h264frame->sequence = rtppack->sequence;
                     if(nal.type == H264Frame::NAL_IDR){
                         haveIDR = true;
                     }
@@ -148,35 +144,39 @@ bool H264RtpDecoder::decodeRtp(const RtpPacket::Ptr &rtppack) {
             //FU-A
             FU fu;
             MakeFU(frame[1], fu);
-            if (fu.S == 1) {
-                //FU-A start
+            if (fu.S) {
+                //该帧的第一个rtp包
                 char tmp = (nal.forbidden_zero_bit << 7 | nal.nal_ref_idc << 5 | fu.type);
                 _h264frame->buffer.assign("\x0\x0\x0\x1", 4);
                 _h264frame->buffer.push_back(tmp);
                 _h264frame->buffer.append((char *)frame + 2, length - 2);
-                _h264frame->type = fu.type;
                 _h264frame->timeStamp = rtppack->timeStamp;
-                _h264frame->sequence = rtppack->sequence;
-                return (_h264frame->type == H264Frame::NAL_IDR); //i frame
+                //该函数return时,保存下当前sequence,以便下次对比seq是否连续
+                _lastSeq = rtppack->sequence;
+                return _h264frame->keyFrame();
             }
 
-            if (rtppack->sequence != (uint16_t)(_h264frame->sequence + 1)) {
+            if (rtppack->sequence != _lastSeq + 1 && rtppack->sequence != 0) {
+                //中间的或末尾的rtp包,其seq必须连续(如果回环了则判定为连续),否则说明rtp丢包,那么该帧不完整,必须得丢弃
                 _h264frame->buffer.clear();
-                WarnL << "丢包,帧废弃:" << rtppack->sequence << "," << _h264frame->sequence;
+                WarnL << "rtp sequence不连续: " << rtppack->sequence << " != " << _lastSeq << " + 1,该帧被废弃";
                 return false;
             }
-            _h264frame->sequence = rtppack->sequence;
-            if (fu.E == 1) {
-                //FU-A end
+
+            if (!fu.E) {
+                //该帧的中间rtp包
                 _h264frame->buffer.append((char *)frame + 2, length - 2);
-                _h264frame->timeStamp = rtppack->timeStamp;
-                auto isIDR = _h264frame->type == H264Frame::NAL_IDR;
-                onGetH264(_h264frame);
-                return isIDR;
+                //该函数return时,保存下当前sequence,以便下次对比seq是否连续
+                _lastSeq = rtppack->sequence;
+                return false;
             }
-            //FU-A mid
+
+            //该帧最后一个rtp包
             _h264frame->buffer.append((char *)frame + 2, length - 2);
-            return false;
+            _h264frame->timeStamp = rtppack->timeStamp;
+            auto key = _h264frame->keyFrame();
+            onGetH264(_h264frame);
+            return key;
         }
 
         default:{
@@ -195,10 +195,8 @@ bool H264RtpDecoder::decodeRtp(const RtpPacket::Ptr &rtppack) {
 
 void H264RtpDecoder::onGetH264(const H264Frame::Ptr &frame) {
     //写入环形缓存
-    auto lastSeq = _h264frame->sequence;
     RtpCodec::inputFrame(frame);
     _h264frame = obtainFrame();
-    _h264frame->sequence = lastSeq;
 }
 
 
diff --git a/src/Extension/H264Rtp.h b/src/Extension/H264Rtp.h
index 4719fe3..09f2346 100644
--- a/src/Extension/H264Rtp.h
+++ b/src/Extension/H264Rtp.h
@@ -64,6 +64,7 @@ private:
     H264Frame::Ptr obtainFrame();
 private:
     H264Frame::Ptr _h264frame;
+    int _lastSeq = 0;
 };
 
 /**
diff --git a/src/Extension/H265.cpp b/src/Extension/H265.cpp
index 6894264..fc7fd8f 100644
--- a/src/Extension/H265.cpp
+++ b/src/Extension/H265.cpp
@@ -25,9 +25,47 @@
  */
 
 #include "H265.h"
+#include "SPSParser.h"
+#include "Util/logger.h"
 
 namespace mediakit{
 
+bool getHEVCInfo(const char * vps, int vps_len,const char * sps,int sps_len,int &iVideoWidth, int &iVideoHeight, float  &iVideoFps){
+    T_GetBitContext tGetBitBuf;
+    T_HEVCSPS tH265SpsInfo;	
+    T_HEVCVPS tH265VpsInfo;
+	if ( vps_len > 2 ){
+	    memset(&tGetBitBuf,0,sizeof(tGetBitBuf));	
+	    memset(&tH265VpsInfo,0,sizeof(tH265VpsInfo));
+	    tGetBitBuf.pu8Buf = (uint8_t*)vps+2;
+	    tGetBitBuf.iBufSize = vps_len-2;
+	    if(0 != h265DecVideoParameterSet((void *) &tGetBitBuf, &tH265VpsInfo)){
+	        return false;
+    	}
+	}
+
+	if ( sps_len > 2 ){
+	    memset(&tGetBitBuf,0,sizeof(tGetBitBuf));
+	    memset(&tH265SpsInfo,0,sizeof(tH265SpsInfo));
+	    tGetBitBuf.pu8Buf = (uint8_t*)sps+2;
+	    tGetBitBuf.iBufSize = sps_len-2;
+	    if(0 != h265DecSeqParameterSet((void *) &tGetBitBuf, &tH265SpsInfo)){
+	        return false;
+	    }
+	}
+	else 
+		return false;
+    h265GetWidthHeight(&tH265SpsInfo, &iVideoWidth, &iVideoHeight);
+	iVideoFps = 0;
+    h265GeFramerate(&tH265VpsInfo, &tH265SpsInfo, &iVideoFps);
+//    ErrorL << iVideoWidth << " " << iVideoHeight << " " << iVideoFps;
+    return true;
+}
+
+bool getHEVCInfo(const string &strVps, const string &strSps, int &iVideoWidth, int &iVideoHeight, float &iVideoFps) {
+	return getHEVCInfo(strVps.data(),strVps.size(),strSps.data(),strSps.size(),iVideoWidth,iVideoHeight,iVideoFps);
+}
+
 Sdp::Ptr H265Track::getSdp() {
     if(!ready()){
         WarnL << "H265 Track未准备好";
diff --git a/src/Extension/H265.h b/src/Extension/H265.h
index cd367b6..295a98a 100644
--- a/src/Extension/H265.h
+++ b/src/Extension/H265.h
@@ -36,6 +36,8 @@ using namespace toolkit;
 
 namespace mediakit {
 
+bool getHEVCInfo(const string &strVps, const string &strSps, int &iVideoWidth, int &iVideoHeight, float &iVideoFps);
+
 /**
 * 265帧类
 */
@@ -83,6 +85,10 @@ public:
         return timeStamp;
     }
 
+    uint32_t pts() const override {
+        return ptsStamp ? ptsStamp : timeStamp;
+    }
+
     uint32_t prefixSize() const override {
         return iPrefixSize;
     }
@@ -96,7 +102,18 @@ public:
     }
 
     bool keyFrame() const override {
-        return isKeyFrame(type);
+        return isKeyFrame(H265_TYPE(buffer[iPrefixSize]));
+    }
+
+    bool configFrame() const override{
+        switch(H265_TYPE(buffer[iPrefixSize])){
+            case H265Frame::NAL_VPS:
+            case H265Frame::NAL_SPS:
+            case H265Frame::NAL_PPS:
+                return true;
+            default:
+                return false;
+        }
     }
 
     static bool isKeyFrame(int type) {
@@ -114,9 +131,8 @@ public:
     }
 
 public:
-    uint16_t sequence;
     uint32_t timeStamp;
-    unsigned char type;
+    uint32_t ptsStamp = 0;
     string buffer;
     uint32_t iPrefixSize = 4;
 };
@@ -143,8 +159,18 @@ public:
     }
 
     bool keyFrame() const override {
-        int type = H265_TYPE(((uint8_t *) _ptr)[_prefixSize]);
-        return H265Frame::isKeyFrame(type);
+        return H265Frame::isKeyFrame(H265_TYPE(((uint8_t *) _ptr)[_prefixSize]));
+    }
+
+    bool configFrame() const override{
+        switch(H265_TYPE(((uint8_t *) _ptr)[_prefixSize])){
+            case H265Frame::NAL_VPS:
+            case H265Frame::NAL_SPS:
+            case H265Frame::NAL_PPS:
+                return true;
+            default:
+                return false;
+        }
     }
 };
 
@@ -176,6 +202,7 @@ public:
         _vps = vps.substr(vps_prefix_len);
         _sps = sps.substr(sps_prefix_len);
         _pps = pps.substr(pps_prefix_len);
+		onReady();
     }
 
     /**
@@ -206,6 +233,30 @@ public:
         return CodecH265;
     }
 
+    /**
+     * 返回视频高度
+     * @return
+     */
+    int getVideoHeight() const override{
+        return _height ;
+    }
+
+    /**
+     * 返回视频宽度
+     * @return
+     */
+    int getVideoWidth() const override{
+        return _width;
+    }
+
+    /**
+     * 返回视频fps
+     * @return
+     */
+    float getVideoFps() const override{
+        return _fps;
+    }
+
     bool ready() override {
         return !_vps.empty() && !_sps.empty() && !_pps.empty();
     }
@@ -280,6 +331,12 @@ private:
         }
     }
 
+	/**
+     * 解析sps获取宽高fps
+     */
+    void onReady(){
+        getHEVCInfo(_vps, _sps, _width, _height, _fps);
+    }
     Track::Ptr clone() override {
         return std::make_shared<std::remove_reference<decltype(*this)>::type>(*this);
     }
@@ -294,7 +351,6 @@ private:
         }
         if(!_vps.empty()){
             auto vpsFrame = std::make_shared<H265Frame>();
-            vpsFrame->type = H265Frame::NAL_VPS;
             vpsFrame->iPrefixSize = 4;
             vpsFrame->buffer.assign("\x0\x0\x0\x1", 4);
             vpsFrame->buffer.append(_vps);
@@ -303,7 +359,6 @@ private:
         }
         if (!_sps.empty()) {
             auto spsFrame = std::make_shared<H265Frame>();
-            spsFrame->type = H265Frame::NAL_SPS;
             spsFrame->iPrefixSize = 4;
             spsFrame->buffer.assign("\x0\x0\x0\x1", 4);
             spsFrame->buffer.append(_sps);
@@ -313,7 +368,6 @@ private:
 
         if (!_pps.empty()) {
             auto ppsFrame = std::make_shared<H265Frame>();
-            ppsFrame->type = H265Frame::NAL_PPS;
             ppsFrame->iPrefixSize = 4;
             ppsFrame->buffer.assign("\x0\x0\x0\x1", 4);
             ppsFrame->buffer.append(_pps);
@@ -325,6 +379,9 @@ private:
     string _vps;
     string _sps;
     string _pps;
+    int _width = 0;
+    int _height = 0;	
+    float _fps = 0;
     bool _last_frame_is_idr = false;
 };
 
diff --git a/src/Extension/H265Rtp.cpp b/src/Extension/H265Rtp.cpp
index 9d34d92..4fc0004 100644
--- a/src/Extension/H265Rtp.cpp
+++ b/src/Extension/H265Rtp.cpp
@@ -99,56 +99,56 @@ bool H265RtpDecoder::decodeRtp(const RtpPacket::Ptr &rtppack) {
             // fragmentation unit (FU)
             FU fu;
             MakeFU(frame[2], fu);
-            if (fu.S == 1) {
-                //FU-A start
+            if (fu.S) {
+                //该帧的第一个rtp包
                 _h265frame->buffer.assign("\x0\x0\x0\x1", 4);
                 _h265frame->buffer.push_back(fu.type << 1);
                 _h265frame->buffer.push_back(0x01);
                 _h265frame->buffer.append((char *) frame + 3, length - 3);
-                _h265frame->type = fu.type;
                 _h265frame->timeStamp = rtppack->timeStamp;
-                _h265frame->sequence = rtppack->sequence;
+                //该函数return时,保存下当前sequence,以便下次对比seq是否连续
+                _lastSeq = rtppack->sequence;
                 return (_h265frame->keyFrame()); //i frame
             }
 
-            if (rtppack->sequence != (uint16_t) (_h265frame->sequence + 1)) {
+            if (rtppack->sequence != _lastSeq + 1 && rtppack->sequence != 0) {
+                //中间的或末尾的rtp包,其seq必须连续(如果回环了则判定为连续),否则说明rtp丢包,那么该帧不完整,必须得丢弃
                 _h265frame->buffer.clear();
-                WarnL << "丢包,帧废弃:" << rtppack->sequence << "," << _h265frame->sequence;
+                WarnL << "rtp sequence不连续: " << rtppack->sequence << " != " << _lastSeq << " + 1,该帧被废弃";
                 return false;
             }
-            _h265frame->sequence = rtppack->sequence;
-            if (fu.E == 1) {
-                //FU-A end
+
+            if (!fu.E) {
+                //该帧的中间rtp包
                 _h265frame->buffer.append((char *) frame + 3, length - 3);
-                _h265frame->timeStamp = rtppack->timeStamp;
-                auto isIDR = _h265frame->keyFrame();
-                onGetH265(_h265frame);
-                return isIDR;
+                //该函数return时,保存下当前sequence,以便下次对比seq是否连续
+                _lastSeq = rtppack->sequence;
+                return false;
             }
-            //FU-A mid
+
+            //该帧最后一个rtp包
             _h265frame->buffer.append((char *) frame + 3, length - 3);
-            return false;
+            _h265frame->timeStamp = rtppack->timeStamp;
+            auto key = _h265frame->keyFrame();
+            onGetH265(_h265frame);
+            return key;
         }
 
         default: // 4.4.1. Single NAL Unit Packets (p24)
             //a full frame
             _h265frame->buffer.assign("\x0\x0\x0\x1", 4);
             _h265frame->buffer.append((char *)frame, length);
-            _h265frame->type = nal;
             _h265frame->timeStamp = rtppack->timeStamp;
-            _h265frame->sequence = rtppack->sequence;
-            auto isIDR = _h265frame->keyFrame();
+            auto key = _h265frame->keyFrame();
             onGetH265(_h265frame);
-            return (isIDR); //i frame
+            return key;
     }
 }
 
 void H265RtpDecoder::onGetH265(const H265Frame::Ptr &frame) {
     //写入环形缓存
-    auto lastSeq = _h265frame->sequence;
     RtpCodec::inputFrame(frame);
     _h265frame = obtainFrame();
-    _h265frame->sequence = lastSeq;
 }
 
 
diff --git a/src/Extension/H265Rtp.h b/src/Extension/H265Rtp.h
index 9c42937..f243baf 100644
--- a/src/Extension/H265Rtp.h
+++ b/src/Extension/H265Rtp.h
@@ -65,6 +65,7 @@ private:
     H265Frame::Ptr obtainFrame();
 private:
     H265Frame::Ptr _h265frame;
+    int _lastSeq = 0;
 };
 
 /**
diff --git a/src/Extension/SPSParser.c b/src/Extension/SPSParser.c
index 1389816..dbc90ed 100644
--- a/src/Extension/SPSParser.c
+++ b/src/Extension/SPSParser.c
@@ -23,6 +23,13 @@
 #define INT_MAX						65535
 #endif //INT_MAX
 
+#ifndef FFMIN
+#define FFMIN(a,b) ((a) > (b) ? (b) : (a))
+#endif
+#ifndef FFMAX
+#define FFMAX(a,b) ((a) > (b) ? (a) : (b))
+#endif
+
 /* report level */
 #define RPT_ERR (1) // error, system error
 #define RPT_WRN (2) // warning, maybe wrong, maybe OK
@@ -122,6 +129,108 @@ const uint8_t g_au8FfZigzagDirect[64] = {
 };
 
 
+static const uint8_t sg_au8HevcSubWidthC[] = {
+    1, 2, 2, 1
+};
+
+static const uint8_t sg_au8HevcSubHeightC[] = {
+    1, 2, 1, 1
+};
+
+static const uint8_t sg_au8DefaultScalingListIntra[] = {
+    16, 16, 16, 16, 17, 18, 21, 24,
+    16, 16, 16, 16, 17, 19, 22, 25,
+    16, 16, 17, 18, 20, 22, 25, 29,
+    16, 16, 18, 21, 24, 27, 31, 36,
+    17, 17, 20, 24, 30, 35, 41, 47,
+    18, 19, 22, 27, 35, 44, 54, 65,
+    21, 22, 25, 31, 41, 54, 70, 88,
+    24, 25, 29, 36, 47, 65, 88, 115
+};
+
+static const uint8_t sg_au8DefaultScalingListInter[] = {
+    16, 16, 16, 16, 17, 18, 20, 24,
+    16, 16, 16, 17, 18, 20, 24, 25,
+    16, 16, 17, 18, 20, 24, 25, 28,
+    16, 17, 18, 20, 24, 25, 28, 33,
+    17, 18, 20, 24, 25, 28, 33, 41,
+    18, 20, 24, 25, 28, 33, 41, 54,
+    20, 24, 25, 28, 33, 41, 54, 71,
+    24, 25, 28, 33, 41, 54, 71, 91
+};
+
+
+const uint8_t g_au8HevcDiagScan4x4X[16] = {
+    0, 0, 1, 0,
+    1, 2, 0, 1,
+    2, 3, 1, 2,
+    3, 2, 3, 3,
+};
+
+const uint8_t g_au8HevcDiagScan4x4Y[16] = {
+    0, 1, 0, 2,
+    1, 0, 3, 2,
+    1, 0, 3, 2,
+    1, 3, 2, 3,
+};
+
+const uint8_t g_au8HevcDiagScan8x8X[64] = {
+    0, 0, 1, 0,
+    1, 2, 0, 1,
+    2, 3, 0, 1,
+    2, 3, 4, 0,
+    1, 2, 3, 4,
+    5, 0, 1, 2,
+    3, 4, 5, 6,
+    0, 1, 2, 3,
+    4, 5, 6, 7,
+    1, 2, 3, 4,
+    5, 6, 7, 2,
+    3, 4, 5, 6,
+    7, 3, 4, 5,
+    6, 7, 4, 5,
+    6, 7, 5, 6,
+    7, 6, 7, 7,
+};
+
+const uint8_t g_au8HevcDiagScan8x8Y[64] = {
+    0, 1, 0, 2,
+    1, 0, 3, 2,
+    1, 0, 4, 3,
+    2, 1, 0, 5,
+    4, 3, 2, 1,
+    0, 6, 5, 4,
+    3, 2, 1, 0,
+    7, 6, 5, 4,
+    3, 2, 1, 0,
+    7, 6, 5, 4,
+    3, 2, 1, 7,
+    6, 5, 4, 3,
+    2, 7, 6, 5,
+    4, 3, 7, 6,
+    5, 4, 7, 6,
+    5, 7, 6, 7,
+};
+
+static const T_AVRational sg_atVuiSar[] = {
+    {  0,   1 },
+    {  1,   1 },
+    { 12,  11 },
+    { 10,  11 },
+    { 16,  11 },
+    { 40,  33 },
+    { 24,  11 },
+    { 20,  11 },
+    { 32,  11 },
+    { 80,  33 },
+    { 18,  11 },
+    { 15,  11 },
+    { 64,  33 },
+    { 160, 99 },
+    {  4,   3 },
+    {  3,   2 },
+    {  2,   1 },
+};
 
 static inline int getBitsLeft(void *pvHandle)
 {
@@ -253,6 +362,87 @@ exit:
 }
 
 
+/**
+ * Show 1-25 bits.
+ */
+static inline unsigned int showBits(void *pvHandle, int iN)
+{
+    T_GetBitContext *ptPtr = (T_GetBitContext *)pvHandle;
+    uint8_t au8Temp[5] = {0};
+    uint8_t *pu8CurChar = NULL;
+    uint8_t u8Nbyte;
+    uint8_t u8Shift;
+    uint32_t u32Result = 0;
+    int iRet = 0;
+	int iResoLen = 0;
+
+    if(NULL == ptPtr)
+    {
+        RPT(RPT_ERR, "NULL pointer");
+        iRet = -1;
+        goto exit;
+    }
+
+    if(iN > MAX_LEN)
+    {
+        iN = MAX_LEN;
+    }
+
+	iResoLen = getBitsLeft(ptPtr);
+	if(iResoLen < iN)
+	{
+        iRet = -1;
+        goto exit;
+	}
+
+
+    if((ptPtr->iBitPos + iN) > ptPtr->iTotalBit)
+    {
+        iN = ptPtr->iTotalBit- ptPtr->iBitPos;
+    }
+
+    pu8CurChar = ptPtr->pu8Buf+ (ptPtr->iBitPos>>3);
+    u8Nbyte = (ptPtr->iCurBitPos + iN + 7) >> 3;
+    u8Shift = (8 - (ptPtr->iCurBitPos + iN))& 0x07;
+
+    if(iN == MAX_LEN)
+    {
+        RPT(RPT_DBG, "12(ptPtr->iBitPos(:%d) + iN(:%d)) > ptPtr->iTotalBit(:%d)!!! ",\
+                ptPtr->iBitPos, iN, ptPtr->iTotalBit);
+        RPT(RPT_DBG, "0x%x 0x%x 0x%x 0x%x", (*pu8CurChar), *(pu8CurChar+1),*(pu8CurChar+2),*(pu8CurChar+3));
+    }
+
+    memcpy(&au8Temp[5-u8Nbyte], pu8CurChar, u8Nbyte);
+    iRet = (uint32_t)au8Temp[0] << 24;
+    iRet = iRet << 8;
+    iRet = ((uint32_t)au8Temp[1]<<24)|((uint32_t)au8Temp[2] << 16)\
+                        |((uint32_t)au8Temp[3] << 8)|au8Temp[4];
+
+    iRet = (iRet >> u8Shift) & (((uint64_t)1<<iN) - 1);
+
+    u32Result = iRet;
+//    ptPtr->iBitPos += iN;
+//    ptPtr->iCurBitPos = ptPtr->iBitPos & 0x7;
+
+exit:
+    return u32Result;
+}
+
+
+
+/**
+ * Show 0-32 bits.
+ */
+static inline unsigned int showBitsLong(void *pvHandle, int iN)
+{
+	T_GetBitContext *ptPtr = (T_GetBitContext *)pvHandle;
+
+    if (iN <= 32) {
+        return showBits(ptPtr, iN);
+    } 
+}
+
+
 
 /**
  *  @brief Function parseCodenum() Ö¸Êý¸çÂײ¼±àÂë½âÎö£¬²Î¿¼h264±ê×¼µÚ9½Ú
@@ -543,17 +733,17 @@ static inline int decodeVuiParameters(void *pvBuf, T_SPS *ptSps)
         ptSps->tSar.den = 0;
     }
 
-    if (getOneBit(pvBuf))      /* overscan_info_present_flag */
-        getOneBit(pvBuf);      /* overscan_appropriate_flag */
+    if (getOneBit(pvBuf))      /* iOverscanInfoPresentFlag */
+        getOneBit(pvBuf);      /* iOverscanAppropriateFlag */
 
     ptSps->iVideoSignalTypePresentFlag = getOneBit(pvBuf);
     if (ptSps->iVideoSignalTypePresentFlag) {
         getBits(pvBuf, 3);                 /* video_format */
-        ptSps->iFullRange = getOneBit(pvBuf); /* video_full_range_flag */
+        ptSps->iFullRange = getOneBit(pvBuf); /* iVideoFullRangeFlag */
 
         ptSps->iColourDescriptionPresentFlag = getOneBit(pvBuf);
         if (ptSps->iColourDescriptionPresentFlag) {
-            ptSps->tColorPrimaries = getBits(pvBuf, 8); /* colour_primaries */
+            ptSps->tColorPrimaries = getBits(pvBuf, 8); /* u8ColourPrimaries */
             ptSps->tColorTrc       = getBits(pvBuf, 8); /* transfer_characteristics */
             ptSps->tColorspace      = getBits(pvBuf, 8); /* matrix_coefficients */
             if (ptSps->tColorPrimaries >= AVCOL_PRI_NB)
@@ -610,7 +800,7 @@ static inline int decodeVuiParameters(void *pvBuf, T_SPS *ptSps)
 		return 0;
     ptSps->iBitstreamRestrictionFlag = getOneBit(pvBuf);
     if (ptSps->iBitstreamRestrictionFlag) {
-        getOneBit(pvBuf);     /* motion_vectors_over_pic_boundaries_flag */
+        getOneBit(pvBuf);     	 /* motion_vectors_over_pic_boundaries_flag */
 		parseUe(pvBuf);
         //get_ue_golomb(&h->gb); /* max_bytes_per_pic_denom */
         parseUe(pvBuf);
@@ -845,8 +1035,8 @@ int h264DecSeqParameterSet(void *pvBufSrc, T_SPS *ptSps)
         unsigned int uiCropRight  = parseUe(pvBuf);
         unsigned int uiCropTop    = parseUe(pvBuf);
         unsigned int uiCropBottom = parseUe(pvBuf);
-        int width  = 16 * ptSps->iMbWidth;
-        int height = 16 * ptSps->iMbHeight * (2 - ptSps->iFrameMbsOnlyFlag);
+        int iWidth  = 16 * ptSps->iMbWidth;
+        int iHeight = 16 * ptSps->iMbHeight * (2 - ptSps->iFrameMbsOnlyFlag);
 
 		if(1)
 		{
@@ -865,11 +1055,11 @@ int h264DecSeqParameterSet(void *pvBufSrc, T_SPS *ptSps)
                 uiCropRight > (unsigned)INT_MAX / 4 / step_x ||
                 uiCropTop   > (unsigned)INT_MAX / 4 / step_y ||
                 uiCropBottom> (unsigned)INT_MAX / 4 / step_y ||
-                (uiCropLeft + uiCropRight ) * step_x >= width ||
-                (uiCropTop  + uiCropBottom) * step_y >= height
+                (uiCropLeft + uiCropRight ) * step_x >= iWidth ||
+                (uiCropTop  + uiCropBottom) * step_y >= iHeight
             )
             {
-				RPT(RPT_ERR, "crop values invalid %d %d %d %d / %d %d\n", uiCropLeft, uiCropRight, uiCropTop, uiCropBottom, width, height);
+				RPT(RPT_ERR, "crop values invalid %d %d %d %d / %d %d\n", uiCropLeft, uiCropRight, uiCropTop, uiCropBottom, iWidth, iHeight);
 				iRet = -1;
 				goto exit;
             }
@@ -938,6 +1128,1041 @@ exit:
     return iRet;
 }
 
+
+static int decodeProfileTierLevel(T_GetBitContext *pvBuf, T_PTLCommon *tPtl)
+{
+    int i;
+
+    if (getBitsLeft(pvBuf) < 2+1+5 + 32 + 4 + 16 + 16 + 12)
+        return -1;
+
+    tPtl->u8ProfileSpace = getBits(pvBuf, 2);
+    tPtl->u8TierFlag     = getOneBit(pvBuf);
+    tPtl->u8ProfileIdc   = getBits(pvBuf, 5);
+    if (tPtl->u8ProfileIdc == T_PROFILE_HEVC_MAIN)
+        RPT(RPT_DBG, "Main profile bitstream\n");
+    else if (tPtl->u8ProfileIdc == T_PROFILE_HEVC_MAIN_10)
+        RPT(RPT_DBG, "Main 10 profile bitstream\n");
+    else if (tPtl->u8ProfileIdc == T_PROFILE_HEVC_MAIN_STILL_PICTURE)
+        RPT(RPT_DBG, "Main Still Picture profile bitstream\n");
+    else if (tPtl->u8ProfileIdc == T_PROFILE_HEVC_REXT)
+        RPT(RPT_DBG, "Range Extension profile bitstream\n");
+    else
+        RPT(RPT_WRN, "Unknown HEVC profile: %d\n", tPtl->u8ProfileIdc);
+
+    for (i = 0; i < 32; i++) {
+        tPtl->au8ProfileCompatibilityFlag[i] = getOneBit(pvBuf);
+
+        if (tPtl->u8ProfileIdc == 0 && i > 0 && tPtl->au8ProfileCompatibilityFlag[i])
+            tPtl->u8ProfileIdc = i;
+    }
+    tPtl->u8ProgressiveSourceFlag    = getOneBit(pvBuf);
+    tPtl->u8InterlacedSourceFlag     = getOneBit(pvBuf);
+    tPtl->u8NonPackedConstraintFlag  = getOneBit(pvBuf);
+    tPtl->u8FrameOnlyConstraintFlag  = getOneBit(pvBuf);
+
+    getBits(pvBuf, 16); // XXX_reserved_zero_44bits[0..15]
+    getBits(pvBuf, 16); // XXX_reserved_zero_44bits[16..31]
+    getBits(pvBuf, 12); // XXX_reserved_zero_44bits[32..43]
+
+    return 0;
+}
+									  
+
+static int parsePtl(T_GetBitContext *pvBuf, T_PTL *tPtl, int max_num_sub_layers)
+{
+    int i;
+    if (decodeProfileTierLevel(pvBuf, &tPtl->tGeneralPtl) < 0 ||
+        getBitsLeft(pvBuf) < 8 + (8*2 * (max_num_sub_layers - 1 > 0))) {
+        RPT(RPT_ERR, "PTL information too short\n");
+        return -1;
+    }
+
+    tPtl->tGeneralPtl.u8LevelIdc = getBits(pvBuf, 8);
+
+    for (i = 0; i < max_num_sub_layers - 1; i++) {
+        tPtl->au8SubLayerProfilePresentFlag[i] = getOneBit(pvBuf);
+        tPtl->au8SubLayerLevelPresentFlag[i]   = getOneBit(pvBuf);
+    }
+
+    if (max_num_sub_layers - 1> 0)
+        for (i = max_num_sub_layers - 1; i < 8; i++)
+            getBits(pvBuf, 2); // reserved_zero_2bits[i]
+    for (i = 0; i < max_num_sub_layers - 1; i++) {
+        if (tPtl->au8SubLayerProfilePresentFlag[i] &&
+            decodeProfileTierLevel(pvBuf, &tPtl->atSubLayerPtl[i]) < 0) {
+            RPT(RPT_ERR,
+                   "PTL information for sublayer %i too short\n", i);
+            return -1;
+        }
+        if (tPtl->au8SubLayerLevelPresentFlag[i]) {
+            if (getBitsLeft(pvBuf) < 8) {
+                RPT(RPT_ERR,
+                       "Not enough data for sublayer %i level_idc\n", i);
+                return -1;
+            } else
+                tPtl->atSubLayerPtl[i].u8LevelIdc = getBits(pvBuf, 8);
+        }
+    }
+
+    return 0;
+}
+					  
+
+static void setDefaultScalingListData(T_ScalingList *sl)
+{
+    int matrixId;
+
+    for (matrixId = 0; matrixId < 6; matrixId++) {
+        // 4x4 default is 16
+        memset(sl->aaau8Sl[0][matrixId], 16, 16);
+        sl->aau8SlDc[0][matrixId] = 16; // default for 16x16
+        sl->aau8SlDc[1][matrixId] = 16; // default for 32x32
+    }
+    memcpy(sl->aaau8Sl[1][0], sg_au8DefaultScalingListIntra, 64);
+    memcpy(sl->aaau8Sl[1][1], sg_au8DefaultScalingListIntra, 64);
+    memcpy(sl->aaau8Sl[1][2], sg_au8DefaultScalingListIntra, 64);
+    memcpy(sl->aaau8Sl[1][3], sg_au8DefaultScalingListInter, 64);
+    memcpy(sl->aaau8Sl[1][4], sg_au8DefaultScalingListInter, 64);
+    memcpy(sl->aaau8Sl[1][5], sg_au8DefaultScalingListInter, 64);
+    memcpy(sl->aaau8Sl[2][0], sg_au8DefaultScalingListIntra, 64);
+    memcpy(sl->aaau8Sl[2][1], sg_au8DefaultScalingListIntra, 64);
+    memcpy(sl->aaau8Sl[2][2], sg_au8DefaultScalingListIntra, 64);
+    memcpy(sl->aaau8Sl[2][3], sg_au8DefaultScalingListInter, 64);
+    memcpy(sl->aaau8Sl[2][4], sg_au8DefaultScalingListInter, 64);
+    memcpy(sl->aaau8Sl[2][5], sg_au8DefaultScalingListInter, 64);
+    memcpy(sl->aaau8Sl[3][0], sg_au8DefaultScalingListIntra, 64);
+    memcpy(sl->aaau8Sl[3][1], sg_au8DefaultScalingListIntra, 64);
+    memcpy(sl->aaau8Sl[3][2], sg_au8DefaultScalingListIntra, 64);
+    memcpy(sl->aaau8Sl[3][3], sg_au8DefaultScalingListInter, 64);
+    memcpy(sl->aaau8Sl[3][4], sg_au8DefaultScalingListInter, 64);
+    memcpy(sl->aaau8Sl[3][5], sg_au8DefaultScalingListInter, 64);
+}
+
+static int scalingListData(T_GetBitContext *pvBuf, T_ScalingList *sl, T_HEVCSPS *ptSps)
+{
+    uint8_t scaling_list_pred_mode_flag;
+    int32_t scaling_list_dc_coef[2][6];
+    int size_id, matrix_id, pos;
+    int i;
+
+    for (size_id = 0; size_id < 4; size_id++)
+        for (matrix_id = 0; matrix_id < 6; matrix_id += ((size_id == 3) ? 3 : 1)) {
+            scaling_list_pred_mode_flag = getOneBit(pvBuf);
+            if (!scaling_list_pred_mode_flag) {
+                unsigned int delta = parseUe(pvBuf);
+                /* Only need to handle non-zero delta. Zero means default,
+                 * which should already be in the arrays. */
+                if (delta) {
+                    // Copy from previous array.
+                    delta *= (size_id == 3) ? 3 : 1;
+                    if (matrix_id < delta) {
+                        RPT(RPT_ERR,
+                               "Invalid delta in scaling list data: %d.\n", delta);
+                        return -1;
+                    }
+
+                    memcpy(sl->aaau8Sl[size_id][matrix_id],
+                           sl->aaau8Sl[size_id][matrix_id - delta],
+                           size_id > 0 ? 64 : 16);
+                    if (size_id > 1)
+                        sl->aau8SlDc[size_id - 2][matrix_id] = sl->aau8SlDc[size_id - 2][matrix_id - delta];
+                }
+            } else {
+                int next_coef, coef_num;
+                int32_t scaling_list_delta_coef;
+
+                next_coef = 8;
+                coef_num  = FFMIN(64, 1 << (4 + (size_id << 1)));
+                if (size_id > 1) {
+                    scaling_list_dc_coef[size_id - 2][matrix_id] = parseSe(pvBuf) + 8;
+                    next_coef = scaling_list_dc_coef[size_id - 2][matrix_id];
+                    sl->aau8SlDc[size_id - 2][matrix_id] = next_coef;
+                }
+                for (i = 0; i < coef_num; i++) {
+                    if (size_id == 0)
+                        pos = 4 * g_au8HevcDiagScan4x4Y[i] +
+                                  g_au8HevcDiagScan4x4X[i];
+                    else
+                        pos = 8 * g_au8HevcDiagScan8x8Y[i] +
+                                  g_au8HevcDiagScan8x8X[i];
+
+                    scaling_list_delta_coef = parseSe(pvBuf);
+                    next_coef = (next_coef + 256U + scaling_list_delta_coef) % 256;
+                    sl->aaau8Sl[size_id][matrix_id][pos] = next_coef;
+                }
+            }
+        }
+
+    if (ptSps->iChromaFormatIdc == 3) {
+        for (i = 0; i < 64; i++) {
+            sl->aaau8Sl[3][1][i] = sl->aaau8Sl[2][1][i];
+            sl->aaau8Sl[3][2][i] = sl->aaau8Sl[2][2][i];
+            sl->aaau8Sl[3][4][i] = sl->aaau8Sl[2][4][i];
+            sl->aaau8Sl[3][5][i] = sl->aaau8Sl[2][5][i];
+        }
+        sl->aau8SlDc[1][1] = sl->aau8SlDc[0][1];
+        sl->aau8SlDc[1][2] = sl->aau8SlDc[0][2];
+        sl->aau8SlDc[1][4] = sl->aau8SlDc[0][4];
+        sl->aau8SlDc[1][5] = sl->aau8SlDc[0][5];
+    }
+
+
+    return 0;
+}
+
+int hevcDecodeShortTermRps(T_GetBitContext *pvBuf,
+                                  T_ShortTermRPS *rps, const T_HEVCSPS *ptSps, int is_slice_header)
+{
+    uint8_t rps_predict = 0;
+    int au32DeltaPoc;
+    int k0 = 0;
+    int k1 = 0;
+    int k  = 0;
+    int i;
+
+    if (rps != ptSps->atStRps && ptSps->uiNbStRps)
+        rps_predict = getOneBit(pvBuf);
+
+    if (rps_predict) {
+        const T_ShortTermRPS *ptRpsRidx;
+        int iDeltaRps;
+        unsigned int uiAbsDeltaRps;
+        uint8_t u8UseDeltaFlag = 0;
+        uint8_t u8DeltaRpsSign;
+
+        if (is_slice_header) {
+            unsigned int uiDeltaIdx = parseUe(pvBuf) + 1;
+            if (u8DeltaRpsSign > ptSps->uiNbStRps) {
+                RPT(RPT_ERR,
+                       "Invalid value of delta_idx in slice header RPS: %d > %d.\n",
+                       u8DeltaRpsSign, ptSps->uiNbStRps);
+                return -1;
+            }
+            ptRpsRidx = &ptSps->atStRps[ptSps->uiNbStRps - u8DeltaRpsSign];
+            rps->iRpsIdxNumDeltaPocs = ptRpsRidx->iNumDeltaPocs;
+        } else
+            ptRpsRidx = &ptSps->atStRps[rps - ptSps->atStRps - 1];
+
+        u8DeltaRpsSign = getOneBit(pvBuf);
+        uiAbsDeltaRps  = parseUe(pvBuf) + 1;
+        if (uiAbsDeltaRps < 1 || uiAbsDeltaRps > 32768) {
+            RPT(RPT_ERR,
+                   "Invalid value of uiAbsDeltaRps: %d\n",
+                   uiAbsDeltaRps);
+            return -1;
+        }
+        iDeltaRps      = (1 - (u8DeltaRpsSign << 1)) * uiAbsDeltaRps;
+        for (i = 0; i <= ptRpsRidx->iNumDeltaPocs; i++) {
+            int used = rps->au8Used[k] = getOneBit(pvBuf);
+
+            if (!used)
+                u8UseDeltaFlag = getOneBit(pvBuf);
+
+            if (used || u8UseDeltaFlag) {
+                if (i < ptRpsRidx->iNumDeltaPocs)
+                    au32DeltaPoc = iDeltaRps + ptRpsRidx->au32DeltaPoc[i];
+                else
+                    au32DeltaPoc = iDeltaRps;
+                rps->au32DeltaPoc[k] = au32DeltaPoc;
+                if (au32DeltaPoc < 0)
+                    k0++;
+                else
+                    k1++;
+                k++;
+            }
+        }
+
+        if (k >= FF_ARRAY_ELEMS(rps->au8Used)) {
+            RPT(RPT_ERR,
+                   "Invalid iNumDeltaPocs: %d\n", k);
+            return -1;
+        }
+
+        rps->iNumDeltaPocs    = k;
+        rps->uiNumNegativePics = k0;
+        // sort in increasing order (smallest first)
+        if (rps->iNumDeltaPocs != 0) {
+            int used, tmp;
+            for (i = 1; i < rps->iNumDeltaPocs; i++) {
+                au32DeltaPoc = rps->au32DeltaPoc[i];
+                used      = rps->au8Used[i];
+                for (k = i - 1; k >= 0; k--) {
+                    tmp = rps->au32DeltaPoc[k];
+                    if (au32DeltaPoc < tmp) {
+                        rps->au32DeltaPoc[k + 1] = tmp;
+                        rps->au8Used[k + 1]      = rps->au8Used[k];
+                        rps->au32DeltaPoc[k]     = au32DeltaPoc;
+                        rps->au8Used[k]          = used;
+                    }
+                }
+            }
+        }
+        if ((rps->uiNumNegativePics >> 1) != 0) {
+            int used;
+            k = rps->uiNumNegativePics - 1;
+            // flip the negative values to largest first
+            for (i = 0; i < rps->uiNumNegativePics >> 1; i++) {
+                au32DeltaPoc         = rps->au32DeltaPoc[i];
+                used              = rps->au8Used[i];
+                rps->au32DeltaPoc[i] = rps->au32DeltaPoc[k];
+                rps->au8Used[i]      = rps->au8Used[k];
+                rps->au32DeltaPoc[k] = au32DeltaPoc;
+                rps->au8Used[k]      = used;
+                k--;
+            }
+        }
+    } else {
+        unsigned int uiPrev, uiNbPositivePics;
+        rps->uiNumNegativePics = parseUe(pvBuf);
+        uiNbPositivePics       = parseUe(pvBuf);
+
+        if (rps->uiNumNegativePics >= HEVC_MAX_REFS ||
+            uiNbPositivePics >= HEVC_MAX_REFS) {
+            RPT(RPT_ERR, "Too many refs in a short term RPS.\n");
+            return -1;
+        }
+
+        rps->iNumDeltaPocs = rps->uiNumNegativePics + uiNbPositivePics;
+        if (rps->iNumDeltaPocs) {
+            uiPrev = 0;
+            for (i = 0; i < rps->uiNumNegativePics; i++) {
+                au32DeltaPoc = parseUe(pvBuf) + 1;
+                if (au32DeltaPoc < 1 || au32DeltaPoc > 32768) {
+                    RPT(RPT_ERR,
+                        "Invalid value of au32DeltaPoc: %d\n",
+                        au32DeltaPoc);
+                    return -1;
+                }
+                uiPrev -= au32DeltaPoc;
+                rps->au32DeltaPoc[i] = uiPrev;
+                rps->au8Used[i]      = getOneBit(pvBuf);
+            }
+            uiPrev = 0;
+            for (i = 0; i < uiNbPositivePics; i++) {
+                au32DeltaPoc = parseUe(pvBuf) + 1;
+                if (au32DeltaPoc < 1 || au32DeltaPoc > 32768) {
+                    RPT(RPT_ERR,
+                        "Invalid value of au32DeltaPoc: %d\n",
+                        au32DeltaPoc);
+                    return -1;
+                }
+                uiPrev += au32DeltaPoc;
+                rps->au32DeltaPoc[rps->uiNumNegativePics + i] = uiPrev;
+                rps->au8Used[rps->uiNumNegativePics + i]      = getOneBit(pvBuf);
+            }
+        }
+    }
+    return 0;
+}
+
+static void decodeSublayerHrd(T_GetBitContext *pvBuf, unsigned int nb_cpb,
+                                int iSubpicParamsPresent)
+{
+    int i;
+
+    for (i = 0; i < nb_cpb; i++) {
+        parseUe(pvBuf); // bit_rate_value_minus1
+        parseUe(pvBuf); // cpb_size_value_minus1
+
+        if (iSubpicParamsPresent) {
+            parseUe(pvBuf); // cpb_size_du_value_minus1
+            parseUe(pvBuf); // bit_rate_du_value_minus1
+        }
+        getOneBit(pvBuf); // cbr_flag
+    }
+}
+
+static int decodeHrd(T_GetBitContext *pvBuf, int common_inf_present,
+                       int max_sublayers)
+{
+    int iNalParamsPresent = 0, iVclParamsPresent = 0;
+    int iSubpicParamsPresent = 0;
+    int i;
+
+    if (common_inf_present) {
+        iNalParamsPresent = getOneBit(pvBuf);
+        iVclParamsPresent = getOneBit(pvBuf);
+
+        if (iNalParamsPresent || iVclParamsPresent) {
+            iSubpicParamsPresent = getOneBit(pvBuf);
+
+            if (iSubpicParamsPresent) {
+                getBits(pvBuf, 8); // tick_divisor_minus2
+                getBits(pvBuf, 5); // du_cpb_removal_delay_increment_length_minus1
+                getBits(pvBuf, 1); // sub_pic_cpb_params_in_pic_timing_sei_flag
+                getBits(pvBuf, 5); // dpb_output_delay_du_length_minus1
+            }
+
+            getBits(pvBuf, 4); // bit_rate_scale
+            getBits(pvBuf, 4); // cpb_size_scale
+
+            if (iSubpicParamsPresent)
+                getBits(pvBuf, 4);  // cpb_size_du_scale
+
+            getBits(pvBuf, 5); // initial_cpb_removal_delay_length_minus1
+            getBits(pvBuf, 5); // au_cpb_removal_delay_length_minus1
+            getBits(pvBuf, 5); // dpb_output_delay_length_minus1
+        }
+    }
+
+    for (i = 0; i < max_sublayers; i++) {
+        int low_delay = 0;
+        unsigned int nb_cpb = 1;
+        int iFixedRate = getOneBit(pvBuf);
+
+        if (!iFixedRate)
+            iFixedRate = getOneBit(pvBuf);
+
+        if (iFixedRate)
+            parseUe(pvBuf);  // elemental_duration_in_tc_minus1
+        else
+            low_delay = getOneBit(pvBuf);
+
+        if (!low_delay) {
+            nb_cpb = parseUe(pvBuf) + 1;
+            if (nb_cpb < 1 || nb_cpb > 32) {
+                RPT(RPT_ERR, "nb_cpb %d invalid\n", nb_cpb);
+                return -1;
+            }
+        }
+
+        if (iNalParamsPresent)
+            decodeSublayerHrd(pvBuf, nb_cpb, iSubpicParamsPresent);
+        if (iVclParamsPresent)
+            decodeSublayerHrd(pvBuf, nb_cpb, iSubpicParamsPresent);
+    }
+    return 0;
+}
+
+					   
+
+static void decodeVui(T_GetBitContext *pvBuf, T_HEVCSPS *ptSps)
+{
+    T_VUI tBackupVui, *tVui = &ptSps->tVui;
+    T_GetBitContext tBackup;
+    int sar_present, alt = 0;
+
+    RPT(RPT_DBG, "Decoding VUI\n");
+
+    sar_present = getOneBit(pvBuf);
+    if (sar_present) {
+        uint8_t sar_idx = getBits(pvBuf, 8);
+        if (sar_idx < FF_ARRAY_ELEMS(sg_atVuiSar))
+            tVui->tSar = sg_atVuiSar[sar_idx];
+        else if (sar_idx == 255) {
+            tVui->tSar.num = getBits(pvBuf, 16);
+            tVui->tSar.den = getBits(pvBuf, 16);
+        } else
+            RPT(RPT_WRN,
+                   "Unknown SAR index: %u.\n", sar_idx);
+    }
+
+    tVui->iOverscanInfoPresentFlag = getOneBit(pvBuf);
+    if (tVui->iOverscanInfoPresentFlag)
+        tVui->iOverscanAppropriateFlag = getOneBit(pvBuf);
+
+    tVui->iVideoSignalTypePresentFlag = getOneBit(pvBuf);
+    if (tVui->iVideoSignalTypePresentFlag) {
+        tVui->iVideoFormat                    = getBits(pvBuf, 3);
+        tVui->iVideoFullRangeFlag           = getOneBit(pvBuf);
+        tVui->iColourDescriptionPresentFlag = getOneBit(pvBuf);
+//        if (tVui->iVideoFullRangeFlag && ptSps->pix_fmt == AV_PIX_FMT_YUV420P)
+//            ptSps->pix_fmt = AV_PIX_FMT_YUVJ420P;
+        if (tVui->iColourDescriptionPresentFlag) {
+            tVui->u8ColourPrimaries        = getBits(pvBuf, 8);
+            tVui->u8TransferCharacteristic = getBits(pvBuf, 8);
+            tVui->u8MatrixCoeffs           = getBits(pvBuf, 8);
+        }
+    }
+
+    tVui->iChromaLocInfoPresentFlag = getOneBit(pvBuf);
+    if (tVui->iChromaLocInfoPresentFlag) {
+        tVui->iChromaSampleLocTypeTopField    = parseUe(pvBuf);
+        tVui->iChromaSampleLocTypeBottomField = parseUe(pvBuf);
+    }
+
+    tVui->iNeutraChromaIndicationFlag = getOneBit(pvBuf);
+    tVui->iFieldSeqFlag               = getOneBit(pvBuf);
+    tVui->iFrameFieldInfoPresentFlag  = getOneBit(pvBuf);
+
+    // Backup context in case an alternate header is detected
+    memcpy(&tBackup, pvBuf, sizeof(tBackup));
+    memcpy(&tBackupVui, tVui, sizeof(tBackupVui));
+    if (getBitsLeft(pvBuf) >= 68 && showBitsLong(pvBuf, 21) == 0x100000) {
+        tVui->iDefaultDisplayWindowFlag = 0;
+        RPT(RPT_WRN, "Invalid default display window\n");
+    } else
+        tVui->iDefaultDisplayWindowFlag = getOneBit(pvBuf);
+
+    if (tVui->iDefaultDisplayWindowFlag) {
+        int vert_mult  = sg_au8HevcSubHeightC[ptSps->iChromaFormatIdc];
+        int horiz_mult = sg_au8HevcSubWidthC[ptSps->iChromaFormatIdc];
+        tVui->tDefDispWin.uiLeftOffset   = parseUe(pvBuf) * horiz_mult;
+        tVui->tDefDispWin.uiRightOffset  = parseUe(pvBuf) * horiz_mult;
+        tVui->tDefDispWin.uiTopOffset    = parseUe(pvBuf) *  vert_mult;
+        tVui->tDefDispWin.uiBottomOffset = parseUe(pvBuf) *  vert_mult;    
+    }
+
+timing_info:
+    tVui->iVuiTimingInfoPresentFlag = getOneBit(pvBuf);
+
+    if (tVui->iVuiTimingInfoPresentFlag) {
+        if( getBitsLeft(pvBuf) < 66 && !alt) {
+            // The alternate syntax seem to have timing info located
+            // at where tDefDispWin is normally located
+            RPT(RPT_WRN,
+                   "Strange VUI timing information, retrying...\n");
+            memcpy(tVui, &tBackupVui, sizeof(tBackupVui));
+            memcpy(pvBuf, &tBackup, sizeof(tBackup));
+            alt = 1;
+            goto timing_info;
+        }
+        tVui->u32VuiNumUnitsInTick               = getBits(pvBuf, 32);
+        tVui->u32VuiTimeScale                      = getBits(pvBuf, 32);
+        if (alt) {
+            RPT(RPT_INF, "Retry got %u/%u fps\n",
+                   tVui->u32VuiTimeScale, tVui->u32VuiNumUnitsInTick);
+        }
+        tVui->iVuiPocProportionalToTimingFlag = getOneBit(pvBuf);
+        if (tVui->iVuiPocProportionalToTimingFlag)
+            tVui->iVuiNumTicksPocDiffOneMinus1 = parseUe(pvBuf);
+        tVui->iVuiHrdParametersPresentFlag = getOneBit(pvBuf);
+        if (tVui->iVuiHrdParametersPresentFlag)
+            decodeHrd(pvBuf, 1, ptSps->iMaxSubLayers);
+    }
+
+    tVui->iBitstreamRestrictionFlag = getOneBit(pvBuf);
+    if (tVui->iBitstreamRestrictionFlag) {
+        if (getBitsLeft(pvBuf) < 8 && !alt) {
+            RPT(RPT_WRN,
+                   "Strange VUI bitstream restriction information, retrying"
+                   " from timing information...\n");
+            memcpy(tVui, &tBackupVui, sizeof(tBackupVui));
+            memcpy(pvBuf, &tBackup, sizeof(tBackup));
+            alt = 1;
+            goto timing_info;
+        }
+        tVui->iTilesFixedStructureFlag              = getOneBit(pvBuf);
+        tVui->iMotionVectorsOverPicBoundariesFlag   = getOneBit(pvBuf);
+        tVui->iRestrictedRefPicListsFlag            = getOneBit(pvBuf);
+        tVui->iMinSpatialSegmentationIdc            = parseUe(pvBuf);
+        tVui->iMaxBytesPerPicDenom                 = parseUe(pvBuf);
+        tVui->iMaxBitsPerMinCuDenom               = parseUe(pvBuf);
+        tVui->iLog2MaxMvLengthHorizontal           = parseUe(pvBuf);
+        tVui->iLog2MaxMvLengthVertical             = parseUe(pvBuf);
+    }
+
+    if (getBitsLeft(pvBuf) < 1 && !alt) {
+        // XXX: Alternate syntax when iSpsRangeExtensionFlag != 0?
+        RPT(RPT_WRN,
+               "Overread in VUI, retrying from timing information...\n");
+        memcpy(tVui, &tBackupVui, sizeof(tBackupVui));
+        memcpy(pvBuf, &tBackup, sizeof(tBackup));
+        alt = 1;
+        goto timing_info;
+    }
+}
+
+static  unsigned avModUintp2c(unsigned a, unsigned p)
+{
+    return a & ((1 << p) - 1);
+}
+
+
+int h265DecSeqParameterSet( void *pvBufSrc, T_HEVCSPS *ptSps )
+{
+    T_HEVCWindow *ow;
+    int iLog2DiffMaxMinTransformBlockSize;
+    int iBitDepthChroma, iStart, iVuiPresent, iSublayerOrderingInfo;
+    int i;
+    int iRet = 0;
+
+    void *pvBuf = NULL;
+    if(NULL == pvBufSrc || NULL == ptSps)
+    {
+        RPT(RPT_ERR,"ERR null pointer\n");
+        iRet = -1;
+        goto exit;
+    }
+
+    memset((void *)ptSps, 0, sizeof(T_HEVCSPS));
+
+    pvBuf = deEmulationPrevention(pvBufSrc);
+    if(NULL == pvBuf)
+    {
+        RPT(RPT_ERR,"ERR null pointer\n");
+        iRet = -1;
+        goto exit;
+    }
+
+    // Coded parameters
+
+    ptSps->uiVpsId = getBits(pvBuf, 4);
+    if (ptSps->uiVpsId >= HEVC_MAX_VPS_COUNT) {
+        RPT(RPT_ERR, "VPS id out of range: %d\n", ptSps->uiVpsId);
+		iRet = -1;
+		goto exit;
+    }
+
+    ptSps->iMaxSubLayers = getBits(pvBuf, 3) + 1;
+    if (ptSps->iMaxSubLayers > HEVC_MAX_SUB_LAYERS) {
+        RPT(RPT_ERR, "sps_max_sub_layers out of range: %d\n",
+               ptSps->iMaxSubLayers);
+		iRet = -1;
+		goto exit;
+    }
+
+    ptSps->u8temporalIdNestingFlag = getBits(pvBuf, 1);
+    if ((iRet = parsePtl(pvBuf, &ptSps->tPtl, ptSps->iMaxSubLayers)) < 0)
+        goto exit;
+
+    int sps_id = parseUe(pvBuf);
+    if (sps_id >= HEVC_MAX_SPS_COUNT) {
+        RPT(RPT_ERR, "SPS id out of range: %d\n", sps_id);
+		iRet = -1;
+		goto exit;
+    }
+
+    ptSps->iChromaFormatIdc = parseUe(pvBuf);
+    if (ptSps->iChromaFormatIdc > 3U) {
+        RPT(RPT_ERR, "iChromaFormatIdc %d is invalid\n", ptSps->iChromaFormatIdc);
+		iRet = -1;
+		goto exit;
+    }
+
+    if (ptSps->iChromaFormatIdc == 3)
+        ptSps->u8SeparateColourPlaneFlag = getOneBit(pvBuf);
+
+    if (ptSps->u8SeparateColourPlaneFlag)
+        ptSps->iChromaFormatIdc = 0;
+
+    ptSps->iWidth  = parseUe(pvBuf);
+    ptSps->iHeight = parseUe(pvBuf);
+
+    if (getOneBit(pvBuf)) { // pic_conformance_flag
+        int vert_mult  = sg_au8HevcSubHeightC[ptSps->iChromaFormatIdc];
+        int horiz_mult = sg_au8HevcSubWidthC[ptSps->iChromaFormatIdc];
+        ptSps->tPicConfWin.uiLeftOffset   = parseUe(pvBuf) * horiz_mult;
+        ptSps->tPicConfWin.uiRightOffset  = parseUe(pvBuf) * horiz_mult;
+        ptSps->tPicConfWin.uiTopOffset    = parseUe(pvBuf) *  vert_mult;
+        ptSps->tPicConfWin.uiBottomOffset = parseUe(pvBuf) *  vert_mult;
+
+        ptSps->tOutputWindow = ptSps->tPicConfWin;
+    }
+
+    ptSps->iBitDepth   = parseUe(pvBuf) + 8;
+    iBitDepthChroma = parseUe(pvBuf) + 8;
+	
+    if (ptSps->iChromaFormatIdc && iBitDepthChroma != ptSps->iBitDepth) {
+        RPT(RPT_ERR,
+               "Luma bit depth (%d) is different from chroma bit depth (%d), "
+               "this is unsupported.\n",
+               ptSps->iBitDepth, iBitDepthChroma);
+		iRet = -1;
+		goto exit;
+    }
+    ptSps->iBitDepthChroma = iBitDepthChroma;
+
+    ptSps->uiLog2MaxPocLsb = parseUe(pvBuf) + 4;
+    if (ptSps->uiLog2MaxPocLsb > 16) {
+        RPT(RPT_ERR, "log2_max_pic_order_cnt_lsb_minus4 out range: %d\n",
+               ptSps->uiLog2MaxPocLsb - 4);
+		iRet = -1;
+		goto exit;
+    }
+
+    iSublayerOrderingInfo = getOneBit(pvBuf);
+    iStart = iSublayerOrderingInfo ? 0 : ptSps->iMaxSubLayers - 1;
+    for (i = iStart; i < ptSps->iMaxSubLayers; i++) {
+        ptSps->stTemporalLayer[i].iMaxDecPicBuffering = parseUe(pvBuf) + 1;
+        ptSps->stTemporalLayer[i].iNumReorderPics      = parseUe(pvBuf);
+        ptSps->stTemporalLayer[i].iMaxLatencyIncrease  = parseUe(pvBuf) - 1;
+        if (ptSps->stTemporalLayer[i].iMaxDecPicBuffering > (unsigned)HEVC_MAX_DPB_SIZE) {
+            RPT(RPT_ERR, "sps_max_dec_pic_buffering_minus1 out of range: %d\n",
+                   ptSps->stTemporalLayer[i].iMaxDecPicBuffering - 1U);
+			iRet = -1;
+			goto exit;
+        }
+        if (ptSps->stTemporalLayer[i].iNumReorderPics > ptSps->stTemporalLayer[i].iMaxDecPicBuffering - 1) {
+            RPT(RPT_WRN, "sps_max_num_reorder_pics out of range: %d\n",
+                   ptSps->stTemporalLayer[i].iNumReorderPics);
+            if (ptSps->stTemporalLayer[i].iNumReorderPics > HEVC_MAX_DPB_SIZE - 1) {
+				iRet = -1;
+				goto exit;
+            }
+            ptSps->stTemporalLayer[i].iMaxDecPicBuffering = ptSps->stTemporalLayer[i].iNumReorderPics + 1;
+        }
+    }
+
+    if (!iSublayerOrderingInfo) {
+        for (i = 0; i < iStart; i++) {
+            ptSps->stTemporalLayer[i].iMaxDecPicBuffering  = ptSps->stTemporalLayer[iStart].iMaxDecPicBuffering;
+            ptSps->stTemporalLayer[i].iNumReorderPics      = ptSps->stTemporalLayer[iStart].iNumReorderPics;
+            ptSps->stTemporalLayer[i].iMaxLatencyIncrease  = ptSps->stTemporalLayer[iStart].iMaxLatencyIncrease;
+        }
+    }
+
+    ptSps->uiLog2MinCbSize                    = parseUe(pvBuf) + 3;
+    ptSps->uiLog2DiffMaxMinCodingBlockSize    = parseUe(pvBuf);
+    ptSps->uiLog2MinTbSize                    = parseUe(pvBuf) + 2;
+    iLog2DiffMaxMinTransformBlockSize   	  = parseUe(pvBuf);
+    ptSps->uiLog2MaxTrafoSize                 = iLog2DiffMaxMinTransformBlockSize +
+                                               ptSps->uiLog2MinTbSize;
+
+    if (ptSps->uiLog2MinCbSize < 3 || ptSps->uiLog2MinCbSize > 30) {
+        RPT(RPT_ERR, "Invalid value %d for uiLog2MinCbSize", ptSps->uiLog2MinCbSize);
+		iRet = -1;
+		goto exit;
+    }
+
+    if (ptSps->uiLog2DiffMaxMinCodingBlockSize > 30) {
+        RPT(RPT_ERR, "Invalid value %d for uiLog2DiffMaxMinCodingBlockSize", ptSps->uiLog2DiffMaxMinCodingBlockSize);
+		iRet = -1;
+		goto exit;
+    }
+
+    if (ptSps->uiLog2MinTbSize >= ptSps->uiLog2MinCbSize || ptSps->uiLog2MinTbSize < 2) {
+        RPT(RPT_ERR, "Invalid value for uiLog2MinTbSize");
+		iRet = -1;
+		goto exit;
+    }
+
+    if (iLog2DiffMaxMinTransformBlockSize < 0 || iLog2DiffMaxMinTransformBlockSize > 30) {
+        RPT(RPT_ERR, "Invalid value %d for iLog2DiffMaxMinTransformBlockSize", iLog2DiffMaxMinTransformBlockSize);
+		iRet = -1;
+		goto exit;
+    }
+
+    ptSps->iMaxTransformHierarchyDepthInter = parseUe(pvBuf);
+    ptSps->iMaxTransformHierarchyDepthIntra = parseUe(pvBuf);
+
+    ptSps->u8ScalingListEnableFlag = getOneBit(pvBuf);
+	
+    if (ptSps->u8ScalingListEnableFlag) {
+        setDefaultScalingListData(&ptSps->tScalingList);
+
+        if (getOneBit(pvBuf)) {
+            iRet = scalingListData(pvBuf, &ptSps->tScalingList, ptSps);
+            if (iRet < 0)
+                goto exit;
+        }
+    }
+
+    ptSps->u8AmpEnabledFlag = getOneBit(pvBuf);
+    ptSps->u8SaoEnabled      = getOneBit(pvBuf);
+
+    ptSps->iPcmEnabledFlag = getOneBit(pvBuf);
+	
+    if (ptSps->iPcmEnabledFlag) {
+        ptSps->pcm.u8BitDepth   = getBits(pvBuf, 4) + 1;
+        ptSps->pcm.u8BitDepthChroma = getBits(pvBuf, 4) + 1;
+        ptSps->pcm.uiLog2MinPcmCbSize = parseUe(pvBuf) + 3;
+        ptSps->pcm.uiLog2MaxPcmCbSize = ptSps->pcm.uiLog2MinPcmCbSize +
+                                        parseUe(pvBuf);
+        if (FFMAX(ptSps->pcm.u8BitDepth, ptSps->pcm.u8BitDepthChroma) > ptSps->iBitDepth) {
+            RPT(RPT_ERR,
+                   "PCM bit depth (%d, %d) is greater than normal bit depth (%d)\n",
+                   ptSps->pcm.u8BitDepth, ptSps->pcm.u8BitDepthChroma, ptSps->iBitDepth);
+			iRet = -1;
+			goto exit;
+        }
+
+        ptSps->pcm.u8LoopFilterDisableFlag = getOneBit(pvBuf);
+    }
+
+    ptSps->uiNbStRps = parseUe(pvBuf);
+    if (ptSps->uiNbStRps > HEVC_MAX_SHORT_TERM_REF_PIC_SETS) {
+        RPT(RPT_ERR, "Too many short term RPS: %d.\n",
+               ptSps->uiNbStRps);
+		iRet = -1;
+		goto exit;
+    }
+    for (i = 0; i < ptSps->uiNbStRps; i++) {
+        if ((iRet = hevcDecodeShortTermRps(pvBuf, &ptSps->atStRps[i],
+                                                 ptSps, 0)) < 0)
+            goto exit;
+    }
+
+    ptSps->u8LongTermRefPicsPresentFlag = getOneBit(pvBuf);
+    if (ptSps->u8LongTermRefPicsPresentFlag) {
+        ptSps->u8NumLongTermRefPicsSps = parseUe(pvBuf);
+        if (ptSps->u8NumLongTermRefPicsSps > HEVC_MAX_LONG_TERM_REF_PICS) {
+            RPT(RPT_ERR, "Too many long term ref pics: %d.\n",
+                   ptSps->u8NumLongTermRefPicsSps);
+			iRet = -1;
+			goto exit;
+        }
+        for (i = 0; i < ptSps->u8NumLongTermRefPicsSps; i++) {
+            ptSps->au16LtRefPicPocLsbSps[i]       = getBits(pvBuf, ptSps->uiLog2MaxPocLsb);
+            ptSps->au8UsedByCurrPicLtSpsFlag[i] = getOneBit(pvBuf);
+        }
+    }
+
+    ptSps->u8SpsTemporalMvpEnabledFlag          = getOneBit(pvBuf);
+    ptSps->u8SpsStrongIntraMmoothingEnableFlag = getOneBit(pvBuf);
+    ptSps->tVui.tSar = (T_AVRational){0, 1};
+    ptSps->iVuiPresent = getOneBit(pvBuf);
+    if (ptSps->iVuiPresent)
+        decodeVui(pvBuf, ptSps);
+
+
+    if (getOneBit(pvBuf)) { // sps_extension_flag
+        int iSpsRangeExtensionFlag = getOneBit(pvBuf);
+        getBits(pvBuf, 7); //sps_extension_7bits = getBits(pvBuf, 7);
+        if (iSpsRangeExtensionFlag) {
+            int iExtendedPrecisionProcessingFlag;
+            int iCabacBypassAlignmentEnabledFlag;
+
+            ptSps->iTransformSkipRotationEnabledFlag = getOneBit(pvBuf);
+            ptSps->iTransformSkipContextEnabledFlag  = getOneBit(pvBuf);
+            ptSps->iImplicitRdpcmEnabledFlag = getOneBit(pvBuf);
+
+            ptSps->iExplicitRdpcmEnabledFlag = getOneBit(pvBuf);
+
+            iExtendedPrecisionProcessingFlag = getOneBit(pvBuf);
+            if (iExtendedPrecisionProcessingFlag)
+                RPT(RPT_WRN,
+                   "iExtendedPrecisionProcessingFlag not yet implemented\n");
+
+            ptSps->iIntraSmoothingDisabledFlag       = getOneBit(pvBuf);
+            ptSps->iHighPrecisionOffsetsEnabledFlag = getOneBit(pvBuf);
+            if (ptSps->iHighPrecisionOffsetsEnabledFlag)
+                RPT(RPT_WRN,
+                   "iHighPrecisionOffsetsEnabledFlag not yet implemented\n");
+
+            ptSps->iPersistentRiceAdaptationEnabledFlag = getOneBit(pvBuf);
+
+            iCabacBypassAlignmentEnabledFlag  = getOneBit(pvBuf);
+            if (iCabacBypassAlignmentEnabledFlag)
+                RPT(RPT_WRN,
+                   "iCabacBypassAlignmentEnabledFlag not yet implemented\n");
+        }
+    }
+
+    ow = &ptSps->tOutputWindow;
+    if (ow->uiLeftOffset >= INT_MAX - ow->uiRightOffset     ||
+        ow->uiTopOffset  >= INT_MAX - ow->uiBottomOffset    ||
+        ow->uiLeftOffset + ow->uiRightOffset  >= ptSps->iWidth ||
+        ow->uiTopOffset  + ow->uiBottomOffset >= ptSps->iHeight) {
+        RPT(RPT_WRN, "Invalid cropping offsets: %u/%u/%u/%u\n",
+               ow->uiLeftOffset, ow->uiRightOffset, ow->uiTopOffset, ow->uiBottomOffset);
+        RPT(RPT_WRN,
+               "Displaying the whole video surface.\n");
+        memset(ow, 0, sizeof(*ow));
+        memset(&ptSps->tPicConfWin, 0, sizeof(ptSps->tPicConfWin));
+    }
+
+    // Inferred parameters
+    ptSps->uiLog2CtbSize = ptSps->uiLog2MinCbSize +
+                         ptSps->uiLog2DiffMaxMinCodingBlockSize;
+    ptSps->uiLog2MinPuSize = ptSps->uiLog2MinCbSize - 1;
+
+    if (ptSps->uiLog2CtbSize > HEVC_MAX_LOG2_CTB_SIZE) {
+        RPT(RPT_ERR, "CTB size out of range: 2^%d\n", ptSps->uiLog2CtbSize);
+        iRet = -1;
+		goto exit;
+    }
+    if (ptSps->uiLog2CtbSize < 4) {
+        RPT(RPT_ERR,
+               "uiLog2CtbSize %d differs from the bounds of any known profile\n",
+               ptSps->uiLog2CtbSize);
+        iRet = -1;
+		goto exit;
+    }
+
+    ptSps->iCtbWidth  = (ptSps->iWidth  + (1 << ptSps->uiLog2CtbSize) - 1) >> ptSps->uiLog2CtbSize;
+    ptSps->iCtbHeight = (ptSps->iHeight + (1 << ptSps->uiLog2CtbSize) - 1) >> ptSps->uiLog2CtbSize;
+    ptSps->iCtbSize   = ptSps->iCtbWidth * ptSps->iCtbHeight;
+
+    ptSps->iMinCbWidth  = ptSps->iWidth  >> ptSps->uiLog2MinCbSize;
+    ptSps->iMinCbHeight = ptSps->iHeight >> ptSps->uiLog2MinCbSize;
+    ptSps->iMinTbWidth  = ptSps->iWidth  >> ptSps->uiLog2MinTbSize;
+    ptSps->iMinTbHeight = ptSps->iHeight >> ptSps->uiLog2MinTbSize;
+    ptSps->iMinPuWidth  = ptSps->iWidth  >> ptSps->uiLog2MinPuSize;
+    ptSps->iMinPuHeight = ptSps->iHeight >> ptSps->uiLog2MinPuSize;
+    ptSps->iTbMask       = (1 << (ptSps->uiLog2CtbSize - ptSps->uiLog2MinTbSize)) - 1;
+
+    ptSps->iQpBdOffset = 6 * (ptSps->iBitDepth - 8);
+
+    if (avModUintp2c(ptSps->iWidth, ptSps->uiLog2MinCbSize) ||
+        avModUintp2c(ptSps->iHeight, ptSps->uiLog2MinCbSize)) {
+        RPT(RPT_ERR, "Invalid coded frame dimensions.\n");
+        iRet = -1;
+		goto exit;
+    }
+
+    if (ptSps->iMaxTransformHierarchyDepthInter > ptSps->uiLog2CtbSize - ptSps->uiLog2MinTbSize) {
+        RPT(RPT_ERR, "iMaxTransformHierarchyDepthInter out of range: %d\n",
+               ptSps->iMaxTransformHierarchyDepthInter);
+        iRet = -1;
+		goto exit;
+    }
+    if (ptSps->iMaxTransformHierarchyDepthIntra > ptSps->uiLog2CtbSize - ptSps->uiLog2MinTbSize) {
+        RPT(RPT_ERR, "iMaxTransformHierarchyDepthIntra out of range: %d\n",
+               ptSps->iMaxTransformHierarchyDepthIntra);
+        iRet = -1;
+		goto exit;
+    }
+    if (ptSps->uiLog2MaxTrafoSize > FFMIN(ptSps->uiLog2CtbSize, 5)) {
+        RPT(RPT_ERR,
+               "max transform block size out of range: %d\n",
+               ptSps->uiLog2MaxTrafoSize);
+        iRet = -1;
+		goto exit;
+    }
+
+    if (getBitsLeft(pvBuf) < 0) {
+        RPT(RPT_ERR,
+               "Overread SPS by %d bits\n", -getBitsLeft(pvBuf));
+        iRet = -1;
+		goto exit;
+    }
+
+	
+exit:
+
+	getBitContextFree(pvBuf);
+	return iRet;
+
+}
+
+
+int h265DecVideoParameterSet( void *pvBufSrc, T_HEVCVPS *ptVps )
+{
+	int iRet = 0;
+    int i,j;
+    int uiVpsId = 0;
+	
+	void *pvBuf = NULL;
+	if(NULL == pvBufSrc || NULL == ptVps)
+	{
+		RPT(RPT_ERR,"ERR null pointer\n");
+		iRet = -1;
+		goto exit;
+	}
+
+	memset((void *)ptVps, 0, sizeof(T_HEVCVPS));
+
+	pvBuf = deEmulationPrevention(pvBufSrc);
+	if(NULL == pvBuf)
+	{
+		RPT(RPT_ERR,"ERR null pointer\n");
+		iRet = -1;
+		goto exit;
+	}
+
+	RPT(RPT_DBG, "Decoding VPS\n");
+
+	uiVpsId = getBits(pvBuf, 4);
+	if (uiVpsId >= HEVC_MAX_VPS_COUNT) {
+		RPT(RPT_ERR, "VPS id out of range: %d\n", uiVpsId);
+		iRet = -1;
+		goto exit;
+	}
+
+	if (getBits(pvBuf, 2) != 3) { // vps_reserved_three_2bits
+		RPT(RPT_ERR, "vps_reserved_three_2bits is not three\n");
+		iRet = -1;
+		goto exit;
+	}
+
+	ptVps->iVpsMaxLayers 			  = getBits(pvBuf, 6) + 1;
+	ptVps->iVpsMaxSubLayers 		  = getBits(pvBuf, 3) + 1;
+	ptVps->u8VpsTemporalIdNestingFlag = getOneBit(pvBuf);
+
+	if (getBits(pvBuf, 16) != 0xffff) { // vps_reserved_ffff_16bits
+		RPT(RPT_ERR, "vps_reserved_ffff_16bits is not 0xffff\n");
+		iRet = -1;
+		goto exit;
+	}
+
+	if (ptVps->iVpsMaxSubLayers > HEVC_MAX_SUB_LAYERS) {
+		RPT(RPT_ERR, "iVpsMaxSubLayers out of range: %d\n",
+			   ptVps->iVpsMaxSubLayers);
+		iRet = -1;
+		goto exit;
+	}
+
+	if (parsePtl(pvBuf, &ptVps->tPtl, ptVps->iVpsMaxSubLayers) < 0){
+		iRet = -1;
+		goto exit;
+	}
+
+	ptVps->iVpsSubLayerOrderingInfoPresentFlag = getOneBit(pvBuf);
+
+	i = ptVps->iVpsSubLayerOrderingInfoPresentFlag ? 0 : ptVps->iVpsMaxSubLayers - 1;
+	for (; i < ptVps->iVpsMaxSubLayers; i++) {
+		ptVps->uiVpsMaxDecPicBuffering[i] = parseUe(pvBuf) + 1;
+		ptVps->auiVpsNumReorderPics[i]	  = parseUe(pvBuf);
+		ptVps->auiVpsMaxLatencyIncrease[i]  = parseUe(pvBuf) - 1;
+
+		if (ptVps->uiVpsMaxDecPicBuffering[i] > HEVC_MAX_DPB_SIZE || !ptVps->uiVpsMaxDecPicBuffering[i]) {
+			RPT(RPT_ERR, "vps_max_dec_pic_buffering_minus1 out of range: %d\n",
+				   ptVps->uiVpsMaxDecPicBuffering[i] - 1);
+			iRet = -1;
+			goto exit;
+		}
+		if (ptVps->auiVpsNumReorderPics[i] > ptVps->uiVpsMaxDecPicBuffering[i] - 1) {
+			RPT(RPT_WRN, "vps_max_num_reorder_pics out of range: %d\n",
+				   ptVps->auiVpsNumReorderPics[i]);
+		}
+	}
+
+	ptVps->iVpsMaxLayerId	= getBits(pvBuf, 6);
+	ptVps->iVpsNumLayerSets = parseUe(pvBuf) + 1;
+	if (ptVps->iVpsNumLayerSets < 1 || ptVps->iVpsNumLayerSets > 1024 ||
+		(ptVps->iVpsNumLayerSets - 1LL) * (ptVps->iVpsMaxLayerId + 1LL) > getBitsLeft(pvBuf)) {
+		RPT(RPT_ERR, "too many layer_id_included_flags\n");
+		iRet = -1;
+		goto exit;
+	}
+
+	for (i = 1; i < ptVps->iVpsNumLayerSets; i++)
+		for (j = 0; j <= ptVps->iVpsMaxLayerId; j++)
+			getBits(pvBuf, 1);  // layer_id_included_flag[i][j]
+
+	ptVps->u8VpsTimingInfoPresentFlag = getOneBit(pvBuf);
+	if (ptVps->u8VpsTimingInfoPresentFlag) {
+		ptVps->u32VpsNumUnitsInTick				 = getBits(pvBuf, 32);
+		ptVps->u32VpsTimeScale 					 = getBits(pvBuf, 32);
+		ptVps->u8VpsPocProportionalToTimingFlag = getOneBit(pvBuf);
+		if (ptVps->u8VpsPocProportionalToTimingFlag)
+			ptVps->iVpsNumTicksPocDiffOne = parseUe(pvBuf) + 1;
+		ptVps->iVpsNumHrdParameters = parseUe(pvBuf);
+		if (ptVps->iVpsNumHrdParameters > (unsigned)ptVps->iVpsNumLayerSets) {
+			RPT(RPT_ERR,
+				   "iVpsNumHrdParameters %d is invalid\n", ptVps->iVpsNumHrdParameters);
+			iRet = -1;
+			goto exit;
+		}
+		for (i = 0; i < ptVps->iVpsNumHrdParameters; i++) {
+			int common_inf_present = 1;
+
+			parseUe(pvBuf); // hrd_layer_set_idx
+			if (i)
+				common_inf_present = getOneBit(pvBuf);
+			decodeHrd(pvBuf, common_inf_present, ptVps->iVpsMaxSubLayers);
+		}
+	}
+	getOneBit(pvBuf); /* vps_extension_flag */
+
+	if (getBitsLeft(pvBuf) < 0) {
+		RPT(RPT_ERR,
+			   "Overread VPS by %d bits\n", -getBitsLeft(pvBuf));
+		
+		iRet = -1;
+		goto exit;
+	}
+
+
+exit:
+
+	getBitContextFree(pvBuf);
+	return iRet;
+
+}
+	
 void h264GetWidthHeight(T_SPS *ptSps, int *piWidth, int *piHeight)
 {
 	// ¿í¸ß¼ÆË㹫ʽ
@@ -1036,3 +2261,43 @@ void h264GeFramerate(T_SPS *ptSps, float *pfFramerate)
 
 
 
+
+void h265GetWidthHeight(T_HEVCSPS *ptSps, int *piWidth, int *piHeight)
+{
+#if 1
+	int iCodeWidth = 0;
+	int iCodedHeight = 0;
+	iCodeWidth	= ptSps->iWidth;
+	iCodedHeight = ptSps->iHeight;
+	*piWidth		 = ptSps->iWidth  - ptSps->tOutputWindow.uiLeftOffset - ptSps->tOutputWindow.uiRightOffset;
+	*piHeight		 = ptSps->iHeight - ptSps->tOutputWindow.uiTopOffset  - ptSps->tOutputWindow.uiBottomOffset;
+
+
+	RPT(RPT_DBG, "iCodeWidth:%d, iCodedHeight:%d\n", iCodeWidth, iCodedHeight);
+
+	RPT(RPT_DBG, "*piWidth:%d, *piHeight:%d\n", *piWidth, *piHeight);
+
+	RPT(RPT_DBG, "ptSps->tOutputWindow.uiRightOffset:%d, ptSps->tOutputWindow.uiLeftOffset:%d\n", ptSps->tOutputWindow.uiRightOffset, ptSps->tOutputWindow.uiLeftOffset);
+
+	RPT(RPT_DBG, "ptSps->tOutputWindow.uiTopOffset:%d, ptSps->tOutputWindow.uiBottomOffset:%d\n", ptSps->tOutputWindow.uiTopOffset, ptSps->tOutputWindow.uiBottomOffset);
+#endif
+
+}
+
+
+
+void h265GeFramerate(T_HEVCVPS *ptVps, T_HEVCSPS *ptSps,float *pfFramerate)
+{
+    if (ptVps && ptVps->u8VpsTimingInfoPresentFlag) {
+		*pfFramerate = (float)(ptVps->u32VpsTimeScale) / (float)(ptVps->u32VpsNumUnitsInTick);
+	
+    } else if (ptSps && ptSps->tVui.iVuiTimingInfoPresentFlag && ptSps->iVuiPresent) {
+		*pfFramerate = (float)(ptSps->tVui.u32VuiTimeScale) / (float)(ptSps->tVui.u32VuiNumUnitsInTick);
+    }
+	else{
+		//vps sps可能不包含帧率
+		*pfFramerate = 0.0F;
+		RPT(RPT_WRN, "frame rate:0");
+	}
+}
+
diff --git a/src/Extension/SPSParser.h b/src/Extension/SPSParser.h
index e25501a..1fb97ad 100644
--- a/src/Extension/SPSParser.h
+++ b/src/Extension/SPSParser.h
@@ -7,6 +7,15 @@
 
 #define QP_MAX_NUM (51 + 6*6)           // The maximum supported qp
 
+#define HEVC_MAX_SHORT_TERM_RPS_COUNT 64
+
+#define T_PROFILE_HEVC_MAIN                        1
+#define T_PROFILE_HEVC_MAIN_10                     2
+#define T_PROFILE_HEVC_MAIN_STILL_PICTURE          3
+#define T_PROFILE_HEVC_REXT                        4
+
+
+
 /**
   * Chromaticity coordinates of the source primaries.
   */
@@ -67,6 +76,62 @@ enum T_AVColorSpace {
 };
 
 
+enum {
+    // 7.4.3.1: vps_max_layers_minus1 is in [0, 62].
+    HEVC_MAX_LAYERS     = 63,
+    // 7.4.3.1: vps_max_sub_layers_minus1 is in [0, 6].
+    HEVC_MAX_SUB_LAYERS = 7,
+    // 7.4.3.1: vps_num_layer_sets_minus1 is in [0, 1023].
+    HEVC_MAX_LAYER_SETS = 1024,
+
+    // 7.4.2.1: vps_video_parameter_set_id is u(4).
+    HEVC_MAX_VPS_COUNT = 16,
+    // 7.4.3.2.1: sps_seq_parameter_set_id is in [0, 15].
+    HEVC_MAX_SPS_COUNT = 16,
+    // 7.4.3.3.1: pps_pic_parameter_set_id is in [0, 63].
+    HEVC_MAX_PPS_COUNT = 64,
+
+    // A.4.2: MaxDpbSize is bounded above by 16.
+    HEVC_MAX_DPB_SIZE = 16,
+    // 7.4.3.1: vps_max_dec_pic_buffering_minus1[i] is in [0, MaxDpbSize - 1].
+    HEVC_MAX_REFS     = HEVC_MAX_DPB_SIZE,
+
+    // 7.4.3.2.1: num_short_term_ref_pic_sets is in [0, 64].
+    HEVC_MAX_SHORT_TERM_REF_PIC_SETS = 64,
+    // 7.4.3.2.1: num_long_term_ref_pics_sps is in [0, 32].
+    HEVC_MAX_LONG_TERM_REF_PICS      = 32,
+
+    // A.3: all profiles require that CtbLog2SizeY is in [4, 6].
+    HEVC_MIN_LOG2_CTB_SIZE = 4,
+    HEVC_MAX_LOG2_CTB_SIZE = 6,
+
+    // E.3.2: cpb_cnt_minus1[i] is in [0, 31].
+    HEVC_MAX_CPB_CNT = 32,
+
+    // A.4.1: in table A.6 the highest level allows a MaxLumaPs of 35 651 584.
+    HEVC_MAX_LUMA_PS = 35651584,
+    // A.4.1: pic_width_in_luma_samples and pic_height_in_luma_samples are
+    // constrained to be not greater than sqrt(MaxLumaPs * 8).  Hence height/
+    // width are bounded above by sqrt(8 * 35651584) = 16888.2 samples.
+    HEVC_MAX_WIDTH  = 16888,
+    HEVC_MAX_HEIGHT = 16888,
+
+    // A.4.1: table A.6 allows at most 22 tile rows for any level.
+    HEVC_MAX_TILE_ROWS    = 22,
+    // A.4.1: table A.6 allows at most 20 tile columns for any level.
+    HEVC_MAX_TILE_COLUMNS = 20,
+
+    // 7.4.7.1: in the worst case (tiles_enabled_flag and
+    // entropy_coding_sync_enabled_flag are both set), entry points can be
+    // placed at the beginning of every Ctb row in every tile, giving an
+    // upper bound of (num_tile_columns_minus1 + 1) * PicHeightInCtbsY - 1.
+    // Only a stream with very high resolution and perverse parameters could
+    // get near that, though, so set a lower limit here with the maximum
+    // possible value for 4K video (at most 135 16x16 Ctb rows).
+    HEVC_MAX_ENTRY_POINT_OFFSETS = HEVC_MAX_TILE_COLUMNS * 135,
+};
+
+
 /**
  * rational number numerator/denominator
  */
@@ -170,6 +235,209 @@ typedef struct T_PPS {
     int iChromaQpDiff;
 } T_PPS;
 
+
+typedef struct T_HEVCWindow {
+    unsigned int uiLeftOffset;
+    unsigned int uiRightOffset;
+    unsigned int uiTopOffset;
+    unsigned int uiBottomOffset;
+} T_HEVCWindow;
+
+
+typedef struct T_VUI {
+    T_AVRational tSar;
+
+    int iOverscanInfoPresentFlag;
+    int iOverscanAppropriateFlag;
+
+    int iVideoSignalTypePresentFlag;
+    int iVideoFormat;
+    int iVideoFullRangeFlag;
+    int iColourDescriptionPresentFlag;
+    uint8_t u8ColourPrimaries;
+    uint8_t u8TransferCharacteristic;
+    uint8_t u8MatrixCoeffs;
+
+    int iChromaLocInfoPresentFlag;
+    int iChromaSampleLocTypeTopField;
+    int iChromaSampleLocTypeBottomField;
+    int iNeutraChromaIndicationFlag;
+
+    int iFieldSeqFlag;
+    int iFrameFieldInfoPresentFlag;
+
+    int iDefaultDisplayWindowFlag;
+    T_HEVCWindow tDefDispWin;
+
+    int iVuiTimingInfoPresentFlag;
+    uint32_t u32VuiNumUnitsInTick;
+    uint32_t u32VuiTimeScale;
+    int iVuiPocProportionalToTimingFlag;
+    int iVuiNumTicksPocDiffOneMinus1;
+    int iVuiHrdParametersPresentFlag;
+
+    int iBitstreamRestrictionFlag;
+    int iTilesFixedStructureFlag;
+    int iMotionVectorsOverPicBoundariesFlag;
+    int iRestrictedRefPicListsFlag;
+    int iMinSpatialSegmentationIdc;
+    int iMaxBytesPerPicDenom;
+    int iMaxBitsPerMinCuDenom;
+    int iLog2MaxMvLengthHorizontal;
+    int iLog2MaxMvLengthVertical;
+} T_VUI;
+
+typedef struct T_PTLCommon {
+    uint8_t u8ProfileSpace;
+    uint8_t u8TierFlag;
+    uint8_t u8ProfileIdc;
+    uint8_t au8ProfileCompatibilityFlag[32];
+    uint8_t u8LevelIdc;
+    uint8_t u8ProgressiveSourceFlag;
+    uint8_t u8InterlacedSourceFlag;
+    uint8_t u8NonPackedConstraintFlag;
+    uint8_t u8FrameOnlyConstraintFlag;
+} T_PTLCommon;
+
+typedef struct T_PTL {
+    T_PTLCommon tGeneralPtl;
+    T_PTLCommon atSubLayerPtl[HEVC_MAX_SUB_LAYERS];
+
+    uint8_t au8SubLayerProfilePresentFlag[HEVC_MAX_SUB_LAYERS];
+    uint8_t au8SubLayerLevelPresentFlag[HEVC_MAX_SUB_LAYERS];
+} T_PTL;
+
+typedef struct T_ScalingList {
+    /* This is a little wasteful, since sizeID 0 only needs 8 coeffs,
+     * and size ID 3 only has 2 arrays, not 6. */
+    uint8_t aaau8Sl[4][6][64];
+    uint8_t aau8SlDc[2][6];
+} T_ScalingList;
+
+typedef struct T_ShortTermRPS {
+    unsigned int uiNumNegativePics;
+    int iNumDeltaPocs;
+    int iRpsIdxNumDeltaPocs;
+    int32_t au32DeltaPoc[32];
+    uint8_t au8Used[32];
+} T_ShortTermRPS;
+
+
+typedef struct T_HEVCVPS {
+    uint8_t u8VpsTemporalIdNestingFlag;
+    int iVpsMaxLayers;
+    int iVpsMaxSubLayers; ///< vps_max_temporal_layers_minus1 + 1
+
+    T_PTL tPtl;
+    int iVpsSubLayerOrderingInfoPresentFlag;
+    unsigned int uiVpsMaxDecPicBuffering[HEVC_MAX_SUB_LAYERS];
+    unsigned int auiVpsNumReorderPics[HEVC_MAX_SUB_LAYERS];
+    unsigned int auiVpsMaxLatencyIncrease[HEVC_MAX_SUB_LAYERS];
+    int iVpsMaxLayerId;
+    int iVpsNumLayerSets; ///< vps_num_layer_sets_minus1 + 1
+    uint8_t u8VpsTimingInfoPresentFlag;
+    uint32_t u32VpsNumUnitsInTick;
+    uint32_t u32VpsTimeScale;
+    uint8_t u8VpsPocProportionalToTimingFlag;
+    int iVpsNumTicksPocDiffOne; ///< vps_num_ticks_poc_diff_one_minus1 + 1
+    int iVpsNumHrdParameters;
+
+} T_HEVCVPS;
+
+typedef struct T_HEVCSPS {
+    unsigned int  uiVpsId;
+    int iChromaFormatIdc;
+    uint8_t u8SeparateColourPlaneFlag;
+
+    ///< output (i.e. cropped) values
+    int iIutputWidth, iOutputHeight;
+    T_HEVCWindow tOutputWindow;
+
+    T_HEVCWindow tPicConfWin;
+
+    int iBitDepth;	
+    int iBitDepthChroma;
+    int iPixelShift;
+
+    unsigned int uiLog2MaxPocLsb;
+    int iPcmEnabledFlag;
+
+    int iMaxSubLayers;
+    struct {
+        int iMaxDecPicBuffering;
+        int iNumReorderPics;
+        int iMaxLatencyIncrease;
+    } stTemporalLayer[HEVC_MAX_SUB_LAYERS];
+    uint8_t u8temporalIdNestingFlag;
+
+    T_VUI tVui;
+    T_PTL tPtl;
+
+    uint8_t u8ScalingListEnableFlag;
+    T_ScalingList tScalingList;
+
+    unsigned int uiNbStRps;
+    T_ShortTermRPS atStRps[HEVC_MAX_SHORT_TERM_RPS_COUNT];
+
+    uint8_t u8AmpEnabledFlag;
+    uint8_t u8SaoEnabled;
+
+    uint8_t u8LongTermRefPicsPresentFlag;
+    uint16_t au16LtRefPicPocLsbSps[32];
+    uint8_t au8UsedByCurrPicLtSpsFlag[32];
+    uint8_t u8NumLongTermRefPicsSps;
+
+    struct {
+        uint8_t u8BitDepth;
+        uint8_t u8BitDepthChroma;
+        unsigned int uiLog2MinPcmCbSize;
+        unsigned int uiLog2MaxPcmCbSize;
+        uint8_t u8LoopFilterDisableFlag;
+    } pcm;
+    uint8_t u8SpsTemporalMvpEnabledFlag;
+    uint8_t u8SpsStrongIntraMmoothingEnableFlag;
+
+    unsigned int uiLog2MinCbSize;
+    unsigned int uiLog2DiffMaxMinCodingBlockSize;
+    unsigned int uiLog2MinTbSize;
+    unsigned int uiLog2MaxTrafoSize;
+    unsigned int uiLog2CtbSize;
+    unsigned int uiLog2MinPuSize;
+
+    int iMaxTransformHierarchyDepthInter;
+    int iMaxTransformHierarchyDepthIntra;
+
+    int iTransformSkipRotationEnabledFlag;
+    int iTransformSkipContextEnabledFlag;
+    int iImplicitRdpcmEnabledFlag;
+    int iExplicitRdpcmEnabledFlag;
+    int iIntraSmoothingDisabledFlag;
+    int iHighPrecisionOffsetsEnabledFlag;
+    int iPersistentRiceAdaptationEnabledFlag;
+
+    ///< coded frame dimension in various units
+    int iWidth;
+    int iHeight;
+    int iCtbWidth;
+    int iCtbHeight;
+    int iCtbSize;
+    int iMinCbWidth;
+    int iMinCbHeight;
+    int iMinTbWidth;
+    int iMinTbHeight;
+    int iMinPuWidth;
+    int iMinPuHeight;
+    int iTbMask;
+
+    int aiHshift[3];
+    int aiVshift[3];
+
+    int iQpBdOffset;
+
+	int iVuiPresent;
+}T_HEVCSPS;
+
+
 typedef struct T_GetBitContext{
     uint8_t *pu8Buf;         /*Ö¸ÏòSPS start*/
     int     iBufSize;     /*SPS ³¤¶È*/
@@ -180,8 +448,15 @@ typedef struct T_GetBitContext{
 
 
 int h264DecSeqParameterSet(void *pvBuf, T_SPS *ptSps);
+int h265DecSeqParameterSet( void *pvBufSrc, T_HEVCSPS *ptSps );
+int h265DecVideoParameterSet( void *pvBufSrc, T_HEVCVPS *ptVps );
+
+
 void h264GetWidthHeight(T_SPS *ptSps, int *piWidth, int *piHeight);
+void h265GetWidthHeight(T_HEVCSPS *ptSps, int *piWidth, int *piHeight);
+
 void h264GeFramerate(T_SPS *ptSps, float *pfFramerate);
+void h265GeFramerate(T_HEVCVPS *ptVps, T_HEVCSPS *ptSps,float *pfFramerate);
 
 #if defined (__cplusplus)
 }
diff --git a/src/Http/HttpClient.cpp b/src/Http/HttpClient.cpp
index 3f98c60..d9eafee 100644
--- a/src/Http/HttpClient.cpp
+++ b/src/Http/HttpClient.cpp
@@ -1,4 +1,4 @@
-/*
+/*
 * MIT License
 *
 * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
diff --git a/src/Http/HttpCookieManager.cpp b/src/Http/HttpCookieManager.cpp
index 46fbad6..8bd5da1 100644
--- a/src/Http/HttpCookieManager.cpp
+++ b/src/Http/HttpCookieManager.cpp
@@ -1,4 +1,4 @@
-/*
+/*
  * MIT License
  *
  * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
diff --git a/src/Http/HttpCookieManager.h b/src/Http/HttpCookieManager.h
index 0bf18c9..172f4b0 100644
--- a/src/Http/HttpCookieManager.h
+++ b/src/Http/HttpCookieManager.h
@@ -1,4 +1,4 @@
-/*
+/*
  * MIT License
  *
  * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
diff --git a/src/Http/HttpSession.cpp b/src/Http/HttpSession.cpp
index a32e050..367790c 100644
--- a/src/Http/HttpSession.cpp
+++ b/src/Http/HttpSession.cpp
@@ -212,11 +212,12 @@ inline bool HttpSession::checkWebSocket(){
 		headerOut["Sec-WebSocket-Protocol"] = _parser["Sec-WebSocket-Protocol"];
 	}
 	sendResponse("101 Switching Protocols",headerOut,"");
+    checkLiveFlvStream(true);
 	return true;
 }
 //http-flv 链接格式:http://vhost-url:port/app/streamid.flv?key1=value1&key2=value2
 //如果url(除去?以及后面的参数)后缀是.flv,那么表明该url是一个http-flv直播。
-inline bool HttpSession::checkLiveFlvStream(){
+inline bool HttpSession::checkLiveFlvStream(bool over_websocket){
 	auto pos = strrchr(_parser.Url().data(),'.');
 	if(!pos){
 		//未找到".flv"后缀
@@ -239,7 +240,7 @@ inline bool HttpSession::checkLiveFlvStream(){
     bool bClose = (strcasecmp(_parser["Connection"].data(),"close") == 0) || ( ++_iReqCnt > reqCnt);
 
     weak_ptr<HttpSession> weakSelf = dynamic_pointer_cast<HttpSession>(shared_from_this());
-    MediaSource::findAsync(_mediaInfo,weakSelf.lock(), true,[weakSelf,bClose,this](const MediaSource::Ptr &src){
+    MediaSource::findAsync(_mediaInfo,weakSelf.lock(), true,[weakSelf,bClose,this,over_websocket](const MediaSource::Ptr &src){
         auto strongSelf = weakSelf.lock();
         if(!strongSelf){
             //本对象已经销毁
@@ -248,29 +249,35 @@ inline bool HttpSession::checkLiveFlvStream(){
         auto rtmp_src = dynamic_pointer_cast<RtmpMediaSource>(src);
         if(!rtmp_src){
             //未找到该流
-            sendNotFound(bClose);
+            if(!over_websocket){
+                sendNotFound(bClose);
+            }
             if(bClose){
                 shutdown(SockException(Err_shutdown,"flv stream not found"));
             }
             return;
         }
         //找到流了
-        auto onRes = [this,rtmp_src](const string &err){
+        auto onRes = [this,rtmp_src,over_websocket](const string &err){
             bool authSuccess = err.empty();
             if(!authSuccess){
-                sendResponse("401 Unauthorized", makeHttpHeader(true,err.size()),err);
+                if(!over_websocket){
+                    sendResponse("401 Unauthorized", makeHttpHeader(true,err.size()),err);
+                }
                 shutdown(SockException(Err_shutdown,StrPrinter << "401 Unauthorized:" << err));
                 return ;
             }
 
-            //找到rtmp源,发送http头,负载后续发送
-            sendResponse("200 OK", makeHttpHeader(false,0,get_mime_type(".flv")), "");
+            if(!over_websocket) {
+                //找到rtmp源,发送http头,负载后续发送
+                sendResponse("200 OK", makeHttpHeader(false, 0, get_mime_type(".flv")), "");
+            }
 
             //开始发送rtmp负载
             //关闭tcp_nodelay ,优化性能
             SockUtil::setNoDelay(_sock->rawFD(),false);
             (*this) << SocketFlags(kSockFlags);
-
+            _flv_over_websocket = over_websocket;
             try{
                 start(getPoller(),rtmp_src);
             }catch (std::exception &ex){
@@ -473,7 +480,7 @@ inline void HttpSession::Handle_Req_GET(int64_t &content_len) {
 	}
 
     //再看看是否为http-flv直播请求
-	if(checkLiveFlvStream()){
+	if(checkLiveFlvStream(false)){
 		return;
 	}
 
@@ -936,8 +943,23 @@ inline void HttpSession::sendNotFound(bool bClose) {
 
 void HttpSession::onWrite(const Buffer::Ptr &buffer) {
 	_ticker.resetTime();
-	_ui64TotalBytes += buffer->size();
-	send(buffer);
+    if(!_flv_over_websocket){
+        _ui64TotalBytes += buffer->size();
+        send(buffer);
+        return;
+    }
+
+    WebSocketHeader header;
+    header._fin = true;
+    header._reserved = 0;
+    header._opcode = WebSocketHeader::BINARY;
+    header._mask_flag = false;
+    WebSocketSplitter::encode(header,(uint8_t *)buffer->data(),buffer->size());
+}
+
+void HttpSession::onWebSocketEncodeData(const uint8_t *ptr,uint64_t len){
+    _ui64TotalBytes += len;
+    SocketHelper::send((char *)ptr,len);
 }
 
 void HttpSession::onDetach() {
diff --git a/src/Http/HttpSession.h b/src/Http/HttpSession.h
index 9cd4bfb..e1df8b1 100644
--- a/src/Http/HttpSession.h
+++ b/src/Http/HttpSession.h
@@ -102,10 +102,16 @@ protected:
         WebSocketSplitter::decode((uint8_t *)data,len);
     }
 
+	/**
+    * 发送数据进行websocket协议打包后回调
+    * @param ptr
+    * @param len
+    */
+	void onWebSocketEncodeData(const uint8_t *ptr,uint64_t len) override;
 private:
 	inline void Handle_Req_GET(int64_t &content_len);
 	inline void Handle_Req_POST(int64_t &content_len);
-	inline bool checkLiveFlvStream();
+	inline bool checkLiveFlvStream(bool over_websocket = false);
 	inline bool checkWebSocket();
 	inline bool emitHttpEvent(bool doInvoke);
 	inline void urlDecode(Parser &parser);
@@ -148,6 +154,7 @@ private:
     MediaInfo _mediaInfo;
     //处理content数据的callback
     function<bool (const char *data,uint64_t len) > _contentCallBack;
+	bool _flv_over_websocket = false;
 };
 
 
diff --git a/src/MediaFile/HlsMaker.cpp b/src/MediaFile/HlsMaker.cpp
index b3ea0ce..5e88b31 100644
--- a/src/MediaFile/HlsMaker.cpp
+++ b/src/MediaFile/HlsMaker.cpp
@@ -1,4 +1,4 @@
-/*
+/*
  * MIT License
  *
  * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
diff --git a/src/MediaFile/HlsMaker.h b/src/MediaFile/HlsMaker.h
index 69dc3c6..37f6d67 100644
--- a/src/MediaFile/HlsMaker.h
+++ b/src/MediaFile/HlsMaker.h
@@ -1,4 +1,4 @@
-/*
+/*
  * MIT License
  *
  * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
diff --git a/src/MediaFile/HlsMakerImp.cpp b/src/MediaFile/HlsMakerImp.cpp
index f1f3e80..ece25b7 100644
--- a/src/MediaFile/HlsMakerImp.cpp
+++ b/src/MediaFile/HlsMakerImp.cpp
@@ -1,4 +1,4 @@
-/*
+/*
  * MIT License
  *
  * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
diff --git a/src/MediaFile/HlsMakerImp.h b/src/MediaFile/HlsMakerImp.h
index 447aa12..d04de21 100644
--- a/src/MediaFile/HlsMakerImp.h
+++ b/src/MediaFile/HlsMakerImp.h
@@ -1,4 +1,4 @@
-/*
+/*
  * MIT License
  *
  * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
diff --git a/src/MediaFile/HlsRecorder.h b/src/MediaFile/HlsRecorder.h
index 1684f28..7b876ae 100644
--- a/src/MediaFile/HlsRecorder.h
+++ b/src/MediaFile/HlsRecorder.h
@@ -1,4 +1,4 @@
-/*
+/*
  * MIT License
  *
  * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
diff --git a/src/MediaFile/MP4Muxer.cpp b/src/MediaFile/MP4Muxer.cpp
new file mode 100644
index 0000000..92b0135
--- /dev/null
+++ b/src/MediaFile/MP4Muxer.cpp
@@ -0,0 +1,282 @@
+/*
+ * MIT License
+ *
+ * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
+ *
+ * This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+#ifdef ENABLE_MP4RECORD
+
+#include "MP4Muxer.h"
+#include "Util/File.h"
+#include "Common/config.h"
+
+namespace mediakit{
+
+#if defined(_WIN32) || defined(_WIN64)
+#define fseek64 _fseeki64
+#define ftell64 _ftelli64
+#else
+#define fseek64 fseek
+#define ftell64 ftell
+#endif
+
+void MP4MuxerBase::init(int flags) {
+    static struct mov_buffer_t s_io = {
+            [](void* ctx, void* data, uint64_t bytes) {
+                MP4MuxerBase *thiz = (MP4MuxerBase *)ctx;
+                return thiz->onRead(data,bytes);
+            },
+            [](void* ctx, const void* data, uint64_t bytes){
+                MP4MuxerBase *thiz = (MP4MuxerBase *)ctx;
+                return thiz->onWrite(data,bytes);
+            },
+            [](void* ctx, uint64_t offset) {
+                MP4MuxerBase *thiz = (MP4MuxerBase *)ctx;
+                return thiz->onSeek(offset);
+            },
+            [](void* ctx){
+                MP4MuxerBase *thiz = (MP4MuxerBase *)ctx;
+                return thiz->onTell();
+            }
+    };
+    _mov_writter.reset(mov_writer_create(&s_io,this,flags),[](mov_writer_t *ptr){
+        if(ptr){
+            mov_writer_destroy(ptr);
+        }
+    });
+}
+
+///////////////////////////////////
+
+void MP4Muxer::onTrackFrame(const Frame::Ptr &frame) {
+    if(frame->configFrame()){
+        //忽略配置帧
+        return;
+    }
+    auto it = _codec_to_trackid.find(frame->getCodecId());
+    if(it == _codec_to_trackid.end()){
+        //该Track不存在或初始化失败
+        return;
+    }
+
+    if(!_started){
+        //还没开始
+        if(frame->getTrackType() != TrackVideo || !frame->keyFrame()){
+            //如果首帧是音频或者是视频但是不是i帧,那么不能开始写文件
+            return;
+        }
+        //开始写文件
+        _started = true;
+    }
+
+    int with_nalu_size ;
+    switch (frame->getCodecId()){
+        case CodecH264:
+        case CodecH265:
+            //我们输入264、265是没有头四个字节表明数据长度的
+            with_nalu_size = 0;
+            break;
+        default:
+            //aac或其他类型frame不用添加4个nalu_size的字节
+            with_nalu_size = 1;
+            break;
+    }
+
+    //mp4文件时间戳需要从0开始
+    auto &track_info = it->second;
+    int64_t dts_out, pts_out;
+    track_info.stamp.revise(frame->dts(),frame->pts(),dts_out,pts_out);
+
+    mov_writer_write_l(_mov_writter.get(),
+                       track_info.track_id,
+                       frame->data() + frame->prefixSize(),
+                       frame->size() - frame->prefixSize(),
+                       pts_out,
+                       dts_out,
+                       frame->keyFrame() ? MOV_AV_FLAG_KEYFREAME : 0,
+                       with_nalu_size);
+}
+
+void MP4Muxer::onTrackReady(const Track::Ptr &track) {
+    switch (track->getCodecId()) {
+        case CodecAAC: {
+            auto aac_track = dynamic_pointer_cast<AACTrack>(track);
+            if (!aac_track) {
+                WarnL << "不是AAC Track";
+                return;
+            }
+            auto track_id = mov_writer_add_audio(_mov_writter.get(),
+                                                 MOV_OBJECT_AAC,
+                                                 aac_track->getAudioChannel(),
+                                                 aac_track->getAudioSampleBit() * aac_track->getAudioChannel(),
+                                                 aac_track->getAudioSampleRate(),
+                                                 aac_track->getAacCfg().data(), 2);
+            if(track_id < 0){
+                WarnL << "添加AAC Track失败:" << track_id;
+                return;
+            }
+            track_info info;
+            info.track_id = track_id;
+            _codec_to_trackid[track->getCodecId()] = info;
+        }
+            break;
+        case CodecH264: {
+            auto h264_track = dynamic_pointer_cast<H264Track>(track);
+            if (!h264_track) {
+                WarnL << "不是H264 Track";
+                return;
+            }
+
+            struct mpeg4_avc_t avc;
+            string sps_pps = string("\x00\x00\x00\x01", 4) + h264_track->getSps() +
+                             string("\x00\x00\x00\x01", 4) + h264_track->getPps();
+            h264_annexbtomp4(&avc, sps_pps.data(), sps_pps.size(), NULL, 0, NULL);
+
+            uint8_t extra_data[1024];
+            int extra_data_size = mpeg4_avc_decoder_configuration_record_save(&avc, extra_data, sizeof(extra_data));
+            if (extra_data_size == -1) {
+                WarnL << "生成H264 extra_data 失败";
+                return;
+            }
+
+            auto track_id = mov_writer_add_video(_mov_writter.get(),
+                                                 MOV_OBJECT_H264,
+                                                 h264_track->getVideoWidth(),
+                                                 h264_track->getVideoHeight(),
+                                                 extra_data,
+                                                 extra_data_size);
+
+            if(track_id < 0){
+                WarnL << "添加H264 Track失败:" << track_id;
+                return;
+            }
+            track_info info;
+            info.track_id = track_id;
+            _codec_to_trackid[track->getCodecId()] = info;
+        }
+            break;
+        case CodecH265: {
+            auto h265_track = dynamic_pointer_cast<H265Track>(track);
+            if (!h265_track) {
+                WarnL << "不是H265 Track";
+                return;
+            }
+
+            struct mpeg4_hevc_t hevc;
+            string vps_sps_pps = string("\x00\x00\x00\x01", 4) + h265_track->getVps() +
+                                 string("\x00\x00\x00\x01", 4) + h265_track->getSps() +
+                                 string("\x00\x00\x00\x01", 4) + h265_track->getPps();
+            h265_annexbtomp4(&hevc, vps_sps_pps.data(), vps_sps_pps.size(), NULL, 0, NULL);
+
+            uint8_t extra_data[1024];
+            int extra_data_size = mpeg4_hevc_decoder_configuration_record_save(&hevc, extra_data, sizeof(extra_data));
+            if (extra_data_size == -1) {
+                WarnL << "生成H265 extra_data 失败";
+                return;
+            }
+
+            auto track_id = mov_writer_add_video(_mov_writter.get(),
+                                                 MOV_OBJECT_HEVC,
+                                                 h265_track->getVideoWidth(),
+                                                 h265_track->getVideoHeight(),
+                                                 extra_data,
+                                                 extra_data_size);
+            if(track_id < 0){
+                WarnL << "添加H265 Track失败:" << track_id;
+                return;
+            }
+            track_info info;
+            info.track_id = track_id;
+            _codec_to_trackid[track->getCodecId()] = info;
+        }
+            break;
+        default:
+            WarnL << "MP4录制不支持该编码格式:" << track->getCodecId();
+            break;
+    }
+}
+
+MP4MuxerFile::MP4MuxerFile(const char *file) {
+    //创建文件
+    auto fp = File::createfile_file(file,"wb+");
+    if(!fp){
+        throw std::runtime_error(string("打开文件失败:") + file);
+    }
+
+    GET_CONFIG(uint32_t,mp4BufSize,Record::kFileBufSize);
+
+    //新建文件io缓存
+    std::shared_ptr<char> file_buf(new char[mp4BufSize],[](char *ptr){
+        if(ptr){
+            delete [] ptr;
+        }
+    });
+
+    if(file_buf){
+        //设置文件io缓存
+        setvbuf(fp, file_buf.get(), _IOFBF, mp4BufSize);
+    }
+
+    //创建智能指针
+    _file.reset(fp,[file_buf](FILE *fp) {
+        fclose(fp);
+    });
+
+    init(MOV_FLAG_FASTSTART);
+}
+
+MP4MuxerFile::~MP4MuxerFile() {
+    _mov_writter = nullptr;
+}
+
+int MP4MuxerFile::onRead(void *data, uint64_t bytes) {
+    if (bytes == fread(data, 1, bytes, _file.get())){
+        return 0;
+    }
+    return 0 != ferror(_file.get()) ? ferror(_file.get()) : -1 /*EOF*/;
+}
+
+int MP4MuxerFile::onWrite(const void *data, uint64_t bytes) {
+    return bytes == fwrite(data, 1, bytes, _file.get()) ? 0 : ferror(_file.get());
+}
+
+
+#if defined(_WIN32) || defined(_WIN64)
+    #define fseek64 _fseeki64
+    #define ftell64 _ftelli64
+#else
+    #define fseek64 fseek
+    #define ftell64 ftell
+#endif
+
+int MP4MuxerFile::onSeek(uint64_t offset) {
+    return fseek64(_file.get(), offset, SEEK_SET);
+}
+
+uint64_t MP4MuxerFile::onTell() {
+    return ftell64(_file.get());
+}
+
+}//namespace mediakit
+
+#endif//#ifdef ENABLE_MP4RECORD
diff --git a/src/MediaFile/MP4Muxer.h b/src/MediaFile/MP4Muxer.h
new file mode 100644
index 0000000..de15d4c
--- /dev/null
+++ b/src/MediaFile/MP4Muxer.h
@@ -0,0 +1,105 @@
+/*
+ * MIT License
+ *
+ * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
+ *
+ * This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+#ifndef ZLMEDIAKIT_MP4MUXER_H
+#define ZLMEDIAKIT_MP4MUXER_H
+
+#ifdef ENABLE_MP4RECORD
+
+#include "Common/MediaSink.h"
+#include "mov-writer.h"
+#include "mpeg4-hevc.h"
+#include "mpeg4-avc.h"
+#include "mpeg4-aac.h"
+#include "mov-buffer.h"
+#include "mov-format.h"
+#include "Extension/AAC.h"
+#include "Extension/H264.h"
+#include "Extension/H265.h"
+#include "Stamp.h"
+
+namespace mediakit{
+
+class MP4MuxerBase{
+public:
+    MP4MuxerBase() = default;
+    virtual ~MP4MuxerBase() = default;
+protected:
+    virtual int onRead(void* data, uint64_t bytes) = 0;
+    virtual int onWrite(const void* data, uint64_t bytes) = 0;
+    virtual int onSeek( uint64_t offset) = 0;
+    virtual uint64_t onTell() = 0;
+    void init(int flags);
+protected:
+    std::shared_ptr<mov_writer_t> _mov_writter;
+};
+
+class MP4Muxer : public MediaSink , public MP4MuxerBase{
+public:
+    MP4Muxer() = default;
+    ~MP4Muxer() override = default;
+protected:
+    /**
+     * 某track已经准备好,其ready()状态返回true,
+     * 此时代表可以获取其例如sps pps等相关信息了
+     * @param track
+     */
+    void onTrackReady(const Track::Ptr & track) override;
+
+    /**
+     * 某Track输出frame,在onAllTrackReady触发后才会调用此方法
+     * @param frame
+     */
+    void onTrackFrame(const Frame::Ptr &frame) override;
+private:
+    struct track_info{
+        int track_id = -1;
+        Stamp stamp;
+    };
+    unordered_map<int,track_info> _codec_to_trackid;
+    bool _started = false;
+};
+
+
+class MP4MuxerFile : public MP4Muxer {
+public:
+    typedef std::shared_ptr<MP4MuxerFile> Ptr;
+    MP4MuxerFile(const char *file);
+    ~MP4MuxerFile();
+protected:
+    int onRead(void* data, uint64_t bytes) override;
+    int onWrite(const void* data, uint64_t bytes) override;
+    int onSeek( uint64_t offset) override;
+    uint64_t onTell() override ;
+private:
+    std::shared_ptr<FILE> _file;
+};
+
+}//namespace mediakit
+
+#endif//#ifdef ENABLE_MP4RECORD
+
+#endif //ZLMEDIAKIT_MP4MUXER_H
diff --git a/src/MediaFile/MediaRecorder.cpp b/src/MediaFile/MediaRecorder.cpp
index be08867..71eac5e 100644
--- a/src/MediaFile/MediaRecorder.cpp
+++ b/src/MediaFile/MediaRecorder.cpp
@@ -106,11 +106,11 @@ void MediaRecorder::addTrack(const Track::Ptr &track) {
     }
 #endif //defined(ENABLE_HLS)
 
-#if defined(ENABLE_MP4V2)
+#if defined(ENABLE_MP4RECORD)
     if (_mp4Maker) {
         _mp4Maker->addTrack(track);
     }
-#endif //defined(ENABLE_MP4V2)
+#endif //defined(ENABLE_MP4RECORD)
 }
 
 } /* namespace mediakit */
diff --git a/src/MediaFile/MediaRecorder.h b/src/MediaFile/MediaRecorder.h
index b1b7211..da06885 100644
--- a/src/MediaFile/MediaRecorder.h
+++ b/src/MediaFile/MediaRecorder.h
@@ -64,9 +64,9 @@ private:
 	std::shared_ptr<HlsRecorder> _hlsMaker;
 #endif //defined(ENABLE_HLS)
 
-#if defined(ENABLE_MP4V2)
+#if defined(ENABLE_MP4RECORD)
 	std::shared_ptr<Mp4Maker> _mp4Maker;
-#endif //defined(ENABLE_MP4V2)
+#endif //defined(ENABLE_MP4RECORD)
 };
 
 } /* namespace mediakit */
diff --git a/src/MediaFile/Mp4Maker.cpp b/src/MediaFile/Mp4Maker.cpp
index 4afa291..3cf2a7e 100644
--- a/src/MediaFile/Mp4Maker.cpp
+++ b/src/MediaFile/Mp4Maker.cpp
@@ -24,18 +24,13 @@
  * SOFTWARE.
  */
 
-#ifdef ENABLE_MP4V2
+#ifdef ENABLE_MP4RECORD
 #include <ctime>
 #include <sys/stat.h>
 #include "Common/config.h"
 #include "Mp4Maker.h"
-#include "MediaRecorder.h"
-#include "Util/File.h"
-#include "Util/mini.h"
 #include "Util/util.h"
 #include "Util/NoticeCenter.h"
-#include "Extension/H264.h"
-#include "Extension/AAC.h"
 #include "Thread/WorkThreadPool.h"
 
 using namespace toolkit;
@@ -62,88 +57,19 @@ Mp4Maker::Mp4Maker(const string& strPath,
 				   const string &strVhost,
 				   const string &strApp,
 				   const string &strStreamId) {
-	DebugL << strPath;
 	_strPath = strPath;
-
 	/////record 业务逻辑//////
 	_info.strAppName = strApp;
 	_info.strStreamId = strStreamId;
 	_info.strVhost = strVhost;
 	_info.strFolder = strPath;
-	//----record 业务逻辑----//
 }
 Mp4Maker::~Mp4Maker() {
 	closeFile();
 }
 
-void Mp4Maker::inputH264(void *pData, uint32_t ui32Length, uint32_t ui32TimeStamp){
-	auto iType = H264_TYPE(((uint8_t*)pData)[0]);
-	switch (iType) {
-	case H264Frame::NAL_B_P: //P
-	case H264Frame::NAL_IDR: { //IDR
-		if (_strLastVideo.size()) {
-			int64_t iTimeInc = (int64_t)ui32TimeStamp - (int64_t)_ui32LastVideoTime;
-			iTimeInc = MAX(0,MIN(iTimeInc,500));
-			if(iTimeInc == 0 ||  iTimeInc == 500){
-				WarnL << "abnormal time stamp increment:" << ui32TimeStamp << " " << _ui32LastVideoTime;
-			}
-			inputH264_l((char *) _strLastVideo.data(), _strLastVideo.size(), iTimeInc);
-		}
-
-		uint32_t prefixe  = htonl(ui32Length);
-		_strLastVideo.assign((char *) &prefixe, 4);
-		_strLastVideo.append((char *)pData,ui32Length);
-
-		_ui32LastVideoTime = ui32TimeStamp;
-	}
-		break;
-	default:
-		break;
-	}
-}
-void Mp4Maker::inputAAC(void *pData, uint32_t ui32Length, uint32_t ui32TimeStamp){
-	if (_strLastAudio.size()) {
-		int64_t iTimeInc = (int64_t)ui32TimeStamp - (int64_t)_ui32LastAudioTime;
-		iTimeInc = MAX(0,MIN(iTimeInc,500));
-		if(iTimeInc == 0 ||  iTimeInc == 500){
-			WarnL << "abnormal time stamp increment:" << ui32TimeStamp << " " << _ui32LastAudioTime;
-		}
-		inputAAC_l((char *) _strLastAudio.data(), _strLastAudio.size(), iTimeInc);
-	}
-	_strLastAudio.assign((char *)pData, ui32Length);
-	_ui32LastAudioTime = ui32TimeStamp;
-}
-
-void Mp4Maker::inputH264_l(void *pData, uint32_t ui32Length, uint32_t ui32Duration) {
-    GET_CONFIG(uint32_t,recordSec,Record::kFileSecond);
-	auto iType =  H264_TYPE(((uint8_t*)pData)[4]);
-	if(iType == H264Frame::NAL_IDR && (_hMp4 == MP4_INVALID_FILE_HANDLE || _ticker.elapsedTime() > recordSec * 1000)){
-		//在I帧率处新建MP4文件
-		//如果文件未创建或者文件超过10分钟则创建新文件
-		createFile();
-	}
-	if (_hVideo != MP4_INVALID_TRACK_ID) {
-		MP4WriteSample(_hMp4, _hVideo, (uint8_t *) pData, ui32Length,ui32Duration * 90,0,iType == 5);
-	}
-}
-
-void Mp4Maker::inputAAC_l(void *pData, uint32_t ui32Length, uint32_t ui32Duration) {
-    GET_CONFIG(uint32_t,recordSec,Record::kFileSecond);
-
-    if (!_haveVideo && (_hMp4 == MP4_INVALID_FILE_HANDLE || _ticker.elapsedTime() > recordSec * 1000)) {
-		//在I帧率处新建MP4文件
-		//如果文件未创建或者文件超过10分钟则创建新文件
-		createFile();
-	}
-	if (_hAudio != MP4_INVALID_TRACK_ID) {
-		auto duration = ui32Duration * _audioSampleRate /1000.0;
-		MP4WriteSample(_hMp4, _hAudio, (uint8_t*)pData, ui32Length,duration,0,false);
-	}
-}
-
 void Mp4Maker::createFile() {
 	closeFile();
-
 	auto strDate = timeStr("%Y-%m-%d");
 	auto strTime = timeStr("%H-%M-%S");
 	auto strFileTmp = _strPath + strDate + "/." + strTime + ".mp4";
@@ -153,76 +79,37 @@ void Mp4Maker::createFile() {
 	_info.ui64StartedTime = ::time(NULL);
 	_info.strFileName = strTime + ".mp4";
 	_info.strFilePath = strFile;
-
     GET_CONFIG(string,appName,Record::kAppName);
-
     _info.strUrl = appName + "/"
                    + _info.strAppName + "/"
                    + _info.strStreamId + "/"
                    + strDate + "/"
                    + strTime + ".mp4";
 
-	//----record 业务逻辑----//
-
-#if !defined(_WIN32)
-	File::createfile_path(strFileTmp.data(), S_IRWXO | S_IRWXG | S_IRWXU);
-#else
-	File::createfile_path(strFileTmp.data(), 0);
-#endif
-	_hMp4 = MP4Create(strFileTmp.data());
-	if (_hMp4 == MP4_INVALID_FILE_HANDLE) {
-		WarnL << "创建MP4文件失败:" << strFileTmp;
-		return;
-	}
-	//MP4SetTimeScale(_hMp4, 90000);
-	_strFileTmp = strFileTmp;
-	_strFile = strFile;
-	_ticker.resetTime();
-
-	auto videoTrack = dynamic_pointer_cast<H264Track>(getTrack(TrackVideo));
-	if(videoTrack){
-		auto &sps = videoTrack->getSps();
-		auto &pps = videoTrack->getPps();
-		_hVideo = MP4AddH264VideoTrack(_hMp4,
-									   90000,
-									   MP4_INVALID_DURATION,
-									   videoTrack->getVideoWidth(),
-									   videoTrack->getVideoHeight(),
-									   sps[1],
-									   sps[2],
-									   sps[3],
-									   3);
-		if(_hVideo != MP4_INVALID_TRACK_ID){
-			MP4AddH264SequenceParameterSet(_hMp4, _hVideo, (uint8_t *)sps.data(), sps.size());
-			MP4AddH264PictureParameterSet(_hMp4, _hVideo, (uint8_t *)pps.data(), pps.size());
-		}else{
-			WarnL << "添加视频通道失败:" << strFileTmp;
-		}
-	}
-
-	auto audioTrack = dynamic_pointer_cast<AACTrack>(getTrack(TrackAudio));
-	if(audioTrack){
-		_audioSampleRate = audioTrack->getAudioSampleRate();
-		_hAudio = MP4AddAudioTrack(_hMp4, _audioSampleRate, MP4_INVALID_DURATION, MP4_MPEG4_AUDIO_TYPE);
-		if (_hAudio != MP4_INVALID_TRACK_ID) {
-			auto &cfg =  audioTrack->getAacCfg();
-			MP4SetTrackESConfiguration(_hMp4, _hAudio,(uint8_t *)cfg.data(), cfg.size());
-		}else{
-			WarnL << "添加音频通道失败:" << strFileTmp;
+	try {
+		_muxer = std::make_shared<MP4MuxerFile>(strFileTmp.data());
+		for(auto &track :_tracks){
+            //添加track
+            _muxer->addTrack(track);
 		}
+		_strFileTmp = strFileTmp;
+		_strFile = strFile;
+		_createFileTicker.resetTime();
+	}catch(std::exception &ex) {
+		WarnL << ex.what();
 	}
 }
 
 void Mp4Maker::asyncClose() {
-	auto hMp4 = _hMp4;
+	auto muxer = _muxer;
 	auto strFileTmp = _strFileTmp;
 	auto strFile = _strFile;
 	auto info = _info;
-	WorkThreadPool::Instance().getExecutor()->async([hMp4,strFileTmp,strFile,info]() {
-		//获取文件录制时间,放在MP4Close之前是为了忽略MP4Close执行时间
+	WorkThreadPool::Instance().getExecutor()->async([muxer,strFileTmp,strFile,info]() {
+		//获取文件录制时间,放在关闭mp4之前是为了忽略关闭mp4执行时间
 		const_cast<Mp4Info&>(info).ui64TimeLen = ::time(NULL) - info.ui64StartedTime;
-		//MP4Close非常耗时,所以要放在后台线程执行
-		MP4Close(hMp4,MP4_CLOSE_DO_NOT_COMPUTE_BITRATE);
+		//关闭mp4非常耗时,所以要放在后台线程执行
+		const_cast<MP4MuxerFile::Ptr &>(muxer).reset();
 		//临时文件名改成正式文件名,防止mp4未完成时被访问
 		rename(strFileTmp.data(),strFile.data());
 		//获取文件大小
@@ -235,35 +122,38 @@ void Mp4Maker::asyncClose() {
 }
 
 void Mp4Maker::closeFile() {
-	if (_hMp4 != MP4_INVALID_FILE_HANDLE) {
+	if (_muxer) {
 		asyncClose();
-		_hMp4 = MP4_INVALID_FILE_HANDLE;
-		_hVideo = MP4_INVALID_TRACK_ID;
-		_hAudio = MP4_INVALID_TRACK_ID;
+		_muxer = nullptr;
 	}
 }
 
 void Mp4Maker::onTrackFrame(const Frame::Ptr &frame) {
-	switch (frame->getCodecId()){
-		case CodecH264:{
-			inputH264(frame->data() + frame->prefixSize(), frame->size() - frame->prefixSize(),frame->stamp());
-		}
-			break;
-		case CodecAAC:{
-			inputAAC(frame->data() + frame->prefixSize(), frame->size() - frame->prefixSize(),frame->stamp());
-		}
-			break;
+	GET_CONFIG(uint32_t,recordSec,Record::kFileSecond);
+	if(!_muxer || ((_createFileTicker.elapsedTime() > recordSec * 1000) &&
+			      (!_haveVideo || (_haveVideo && frame->keyFrame()))) ){
+		//成立条件
+		//1、_muxer为空
+		//2、到了切片时间,并且只有音频
+		//3、到了切片时间,有视频并且遇到视频的关键帧
+		createFile();
+	}
 
-		default:
-			break;
+	if(_muxer){
+		//生成mp4文件
+		_muxer->inputFrame(frame);
 	}
 }
 
-void Mp4Maker::onAllTrackReady() {
-	_haveVideo = getTrack(TrackVideo).operator bool();
+void Mp4Maker::onTrackReady(const Track::Ptr & track){
+	//保存所有的track,为创建MP4MuxerFile做准备
+	_tracks.emplace_back(track);
+	if(track->getTrackType() == TrackVideo){
+		_haveVideo = true;
+	}
 }
 
 } /* namespace mediakit */
 
 
-#endif //ENABLE_MP4V2
+#endif //ENABLE_MP4RECORD
diff --git a/src/MediaFile/Mp4Maker.h b/src/MediaFile/Mp4Maker.h
index f6e4114..cef6970 100644
--- a/src/MediaFile/Mp4Maker.h
+++ b/src/MediaFile/Mp4Maker.h
@@ -1,4 +1,4 @@
-/*
+ /*
  * MIT License
  *
  * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
@@ -27,19 +27,17 @@
 #ifndef MP4MAKER_H_
 #define MP4MAKER_H_
 
-#ifdef ENABLE_MP4V2
+#ifdef ENABLE_MP4RECORD
 
 #include <mutex>
 #include <memory>
-#include <mp4v2/mp4v2.h>
 #include "Player/PlayerBase.h"
 #include "Util/util.h"
 #include "Util/logger.h"
 #include "Util/TimeTicker.h"
 #include "Util/TimeTicker.h"
 #include "Common/MediaSink.h"
-#include "Extension/Track.h"
-
+#include "MP4Muxer.h"
 using namespace toolkit;
 
 namespace mediakit {
@@ -72,44 +70,29 @@ private:
      */
 	void onTrackFrame(const Frame::Ptr &frame) override ;
 
-	/**
-	 * 所有Track准备好了
-	 */
-	void onAllTrackReady() override;
+    /**
+     * 某track已经准备好,其ready()状态返回true,
+     * 此时代表可以获取其例如sps pps等相关信息了
+     * @param track
+     */
+    void onTrackReady(const Track::Ptr & track) override;
 private:
     void createFile();
     void closeFile();
     void asyncClose();
-
-	//时间戳:参考频率1000
-	void inputH264(void *pData, uint32_t ui32Length, uint32_t ui32TimeStamp);
-	//时间戳:参考频率1000
-	void inputAAC(void *pData, uint32_t ui32Length, uint32_t ui32TimeStamp);
-
-	void inputH264_l(void *pData, uint32_t ui32Length, uint32_t ui64Duration);
-    void inputAAC_l(void *pData, uint32_t ui32Length, uint32_t ui64Duration);
 private:
-	MP4FileHandle _hMp4 = MP4_INVALID_FILE_HANDLE;
-	MP4TrackId _hVideo = MP4_INVALID_TRACK_ID;
-	MP4TrackId _hAudio = MP4_INVALID_TRACK_ID;
 	string _strPath;
 	string _strFile;
 	string _strFileTmp;
-	Ticker _ticker;
-
-	string _strLastVideo;
-	string _strLastAudio;
-
-	uint32_t _ui32LastVideoTime = 0;
-	uint32_t _ui32LastAudioTime = 0;
+	Ticker _createFileTicker;
 	Mp4Info _info;
-
 	bool _haveVideo = false;
-	int _audioSampleRate;
+	MP4MuxerFile::Ptr _muxer;
+	list<Track::Ptr> _tracks;
 };
 
 } /* namespace mediakit */
 
-#endif ///ENABLE_MP4V2
+#endif ///ENABLE_MP4RECORD
 
 #endif /* MP4MAKER_H_ */
diff --git a/src/MediaFile/Stamp.cpp b/src/MediaFile/Stamp.cpp
new file mode 100644
index 0000000..d4b67df
--- /dev/null
+++ b/src/MediaFile/Stamp.cpp
@@ -0,0 +1,79 @@
+/*
+ * MIT License
+ *
+ * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
+ *
+ * This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+#include "Stamp.h"
+
+namespace mediakit {
+
+void Stamp::revise(uint32_t dts, uint32_t pts, int64_t &dts_out, int64_t &pts_out) {
+    if(_first){
+        //记录第一次时间戳,后面好计算时间戳增量
+        _start_dts = dts;
+        _first = false;
+        _ticker = std::make_shared<SmoothTicker>();
+    }
+    //pts和dts的差值
+    int pts_dts_diff = pts - dts;
+    if(_modifyStamp){
+        dts = _ticker->elapsedTime();
+    }
+
+    //相对时间戳
+    dts_out = dts - _start_dts;
+    if(dts_out < _dts_inc){
+        //本次相对时间戳竟然小于上次?
+        if(dts_out < 0 || _dts_inc - dts_out > 0xFFFF){
+            //时间戳回环,保证下次相对时间戳与本次相对合理增长
+            _start_dts = dts - _dts_inc;
+            //本次时间戳强制等于上次时间戳
+            dts_out = _dts_inc;
+        }else{
+            //时间戳变小了?,那么取上次时间戳
+            dts_out = _dts_inc;
+        }
+    }
+
+    //保留这次相对时间戳,以便下次对比是否回环或乱序
+    _dts_inc = dts_out;
+
+    //////////////以下是播放时间戳的计算//////////////////
+    if(!pts){
+        //没有播放时间戳
+        pts = dts;
+    }
+
+    if(pts_dts_diff > 200 || pts_dts_diff < -200){
+        //如果差值大于200毫秒,则认为由于回环导致时间戳错乱了
+        pts_dts_diff = 0;
+    }
+    pts_out = dts_out + pts_dts_diff;
+    if(pts_out < 0){
+        //时间戳不能小于0
+        pts_out = 0;
+    }
+}
+
+}//namespace mediakit
\ No newline at end of file
diff --git a/src/MediaFile/Stamp.h b/src/MediaFile/Stamp.h
new file mode 100644
index 0000000..e6b2d3b
--- /dev/null
+++ b/src/MediaFile/Stamp.h
@@ -0,0 +1,51 @@
+/*
+ * MIT License
+ *
+ * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
+ *
+ * This file is part of ZLMediaKit(https://github.com/xiongziliang/ZLMediaKit).
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+#ifndef ZLMEDIAKIT_STAMP_H
+#define ZLMEDIAKIT_STAMP_H
+
+#include "Util/TimeTicker.h"
+#include <cstdint>
+using namespace toolkit;
+
+namespace mediakit {
+
+class Stamp {
+public:
+    Stamp(bool modifyStamp = false) {_modifyStamp = modifyStamp;};
+    ~Stamp() = default;
+    void revise(uint32_t dts, uint32_t pts, int64_t &dts_out, int64_t &pts_out);
+private:
+    int64_t _start_dts = 0;
+    int64_t _dts_inc = 0;
+    bool _first = true;
+    bool _modifyStamp;
+    std::shared_ptr<SmoothTicker> _ticker;
+};
+
+}//namespace mediakit
+
+#endif //ZLMEDIAKIT_STAMP_H
diff --git a/src/MediaFile/TsMuxer.cpp b/src/MediaFile/TsMuxer.cpp
index 2eabd14..6bd4840 100644
--- a/src/MediaFile/TsMuxer.cpp
+++ b/src/MediaFile/TsMuxer.cpp
@@ -1,4 +1,4 @@
-/*
+/*
  * MIT License
  *
  * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
@@ -41,54 +41,76 @@ TsMuxer::~TsMuxer() {
 
 void TsMuxer::addTrack(const Track::Ptr &track) {
     switch (track->getCodecId()){
-        case CodecH264:
-            _codecid_to_stream_id[CodecH264] = mpeg_ts_add_stream(_context,PSI_STREAM_H264, nullptr,0);
-            break;
-        case CodecH265:
-            _codecid_to_stream_id[CodecH265] = mpeg_ts_add_stream(_context,PSI_STREAM_H265, nullptr,0);
-            break;
-        case CodecAAC:
-            _codecid_to_stream_id[CodecAAC] = mpeg_ts_add_stream(_context,PSI_STREAM_AAC, nullptr,0);
-            break;
+        case CodecH264: {
+            track_info info;
+            info.track_id = mpeg_ts_add_stream(_context, PSI_STREAM_H264, nullptr, 0);
+            _codec_to_trackid[track->getCodecId()] = info;
+        } break;
+        case CodecH265: {
+            track_info info;
+            info.track_id = mpeg_ts_add_stream(_context, PSI_STREAM_H265, nullptr, 0);
+            _codec_to_trackid[track->getCodecId()] = info;
+        }break;
+        case CodecAAC: {
+            track_info info;
+            info.track_id = mpeg_ts_add_stream(_context, PSI_STREAM_AAC, nullptr, 0);
+            _codec_to_trackid[track->getCodecId()] = info;
+        }break;
         default:
             break;
     }
 }
 
 void TsMuxer::inputFrame(const Frame::Ptr &frame) {
-    auto it = _codecid_to_stream_id.find(frame->getCodecId());
-    if(it == _codecid_to_stream_id.end()){
+    auto it = _codec_to_trackid.find(frame->getCodecId());
+    if(it == _codec_to_trackid.end()){
         return;
     }
+    //mp4文件时间戳需要从0开始
+    auto &track_info = it->second;
+    int64_t dts_out, pts_out;
+
     switch (frame->getCodecId()){
         case CodecH265:
         case CodecH264: {
-            //这里的代码逻辑是让SPS、PPS、IDR这些时间戳相同的帧打包到一起当做一个帧处理,
-            if (!_frameCached.empty() && _frameCached.back()->dts() != frame->dts()) {
-                Frame::Ptr back = _frameCached.back();
-                Buffer::Ptr merged_frame = back;
-                if(_frameCached.size() != 1){
-                    string merged;
-                    _frameCached.for_each([&](const Frame::Ptr &frame){
-                        if(frame->prefixSize()){
-                            merged.append(frame->data(),frame->size());
-                        } else{
-                            merged.append("\x00\x00\x00\x01",4);
-                            merged.append(frame->data(),frame->size());
-                        }
-                    });
-                    merged_frame = std::make_shared<BufferString>(std::move(merged));
+
+            Buffer::Ptr merged_frame ;
+            if(frame->configFrame()){
+                //配置帧,缓存后直接返回,以便下次输入关键帧时使用
+                _config_frame_cache.append("\x00\x00\x00\x01",4);
+                _config_frame_cache.append(frame->data() + frame->prefixSize(),frame->size() - frame->prefixSize());
+                break;
+            }
+
+            if(frame->keyFrame()){
+                //关键帧
+                if(!_config_frame_cache.empty()){
+                    //有配置帧,那么配置帧合并关键帧后输入ts打包
+                    _config_frame_cache.append("\x00\x00\x00\x01",4);
+                    _config_frame_cache.append(frame->data() + frame->prefixSize(),frame->size() - frame->prefixSize());
+                    merged_frame = std::make_shared<BufferString>(std::move(_config_frame_cache));
+                    _config_frame_cache.clear();
+                }else{
+                    //这是非第一个的关键帧(h265有多种关键帧)
+                    merged_frame = frame;
                 }
-                _timestamp = back->dts();
-                mpeg_ts_write(_context, it->second, back->keyFrame() ? 0x0001 : 0, back->pts() * 90LL, back->dts() * 90LL, merged_frame->data(),  merged_frame->size());
-                _frameCached.clear();
+            }else{
+                //这里是普通帧,例如B/P,
+                merged_frame = frame;
+                //sps、pps这些配置帧清空掉
+                _config_frame_cache.clear();
             }
-            _frameCached.emplace_back(Frame::getCacheAbleFrame(frame));
+
+            //输入到ts文件
+            track_info.stamp.revise(frame->dts(),frame->pts(),dts_out,pts_out);
+            _timestamp = dts_out;
+            mpeg_ts_write(_context, track_info.track_id, frame->keyFrame() ? 0x0001 : 0, pts_out * 90LL, dts_out * 90LL, merged_frame->data(),  merged_frame->size());
         }
             break;
         default: {
-            _timestamp = frame->dts();
-            mpeg_ts_write(_context, it->second, frame->keyFrame() ? 0x0001 : 0, frame->pts() * 90LL, frame->dts() * 90LL, frame->data(), frame->size());
+            track_info.stamp.revise(frame->dts(),frame->pts(),dts_out,pts_out);
+            _timestamp = dts_out;
+            mpeg_ts_write(_context, track_info.track_id, frame->keyFrame() ? 0x0001 : 0, pts_out * 90LL, dts_out * 90LL, frame->data(), frame->size());
         }
             break;
     }
@@ -124,7 +146,7 @@ void TsMuxer::uninit() {
         mpeg_ts_destroy(_context);
         _context = nullptr;
     }
-    _codecid_to_stream_id.clear();
+    _codec_to_trackid.clear();
 }
 
 }//namespace mediakit
diff --git a/src/MediaFile/TsMuxer.h b/src/MediaFile/TsMuxer.h
index 88dae72..476a56a 100644
--- a/src/MediaFile/TsMuxer.h
+++ b/src/MediaFile/TsMuxer.h
@@ -1,4 +1,4 @@
-/*
+/*
  * MIT License
  *
  * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
@@ -31,6 +31,9 @@
 #include "Extension/Frame.h"
 #include "Extension/Track.h"
 #include "Util/File.h"
+#include "Common/MediaSink.h"
+#include "Stamp.h"
+
 using namespace toolkit;
 
 namespace mediakit {
@@ -39,8 +42,8 @@ class TsMuxer {
 public:
     TsMuxer();
     virtual ~TsMuxer();
-    void addTrack(const Track::Ptr &track);
-    void inputFrame(const Frame::Ptr &frame);
+    void addTrack(const Track::Ptr &track) ;
+    void inputFrame(const Frame::Ptr &frame)  ;
 protected:
     virtual void onTs(const void *packet, int bytes,uint32_t timestamp,int flags) = 0;
     void resetTracks();
@@ -51,8 +54,13 @@ private:
     void  *_context = nullptr;
     char *_tsbuf[188];
     uint32_t _timestamp = 0;
-    unordered_map<int,int > _codecid_to_stream_id;
-    List<Frame::Ptr> _frameCached;
+
+    struct track_info{
+        int track_id = -1;
+        Stamp stamp;
+    };
+    unordered_map<int,track_info> _codec_to_trackid;
+    string _config_frame_cache;
 };
 
 }//namespace mediakit
diff --git a/src/Pusher/MediaPusher.cpp b/src/Pusher/MediaPusher.cpp
index d46f957..78475fb 100644
--- a/src/Pusher/MediaPusher.cpp
+++ b/src/Pusher/MediaPusher.cpp
@@ -1,4 +1,4 @@
-/*
+/*
 * MIT License
 *
 * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
diff --git a/src/Pusher/MediaPusher.h b/src/Pusher/MediaPusher.h
index c4b8b95..aba11f6 100644
--- a/src/Pusher/MediaPusher.h
+++ b/src/Pusher/MediaPusher.h
@@ -1,4 +1,4 @@
-/*
+/*
 * MIT License
 *
 * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
diff --git a/src/Pusher/PusherBase.cpp b/src/Pusher/PusherBase.cpp
index a8f47d9..7637e38 100644
--- a/src/Pusher/PusherBase.cpp
+++ b/src/Pusher/PusherBase.cpp
@@ -1,4 +1,4 @@
-/*
+/*
 * MIT License
 *
 * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
diff --git a/src/Pusher/PusherBase.h b/src/Pusher/PusherBase.h
index cf9b1d8..6fc86e5 100644
--- a/src/Pusher/PusherBase.h
+++ b/src/Pusher/PusherBase.h
@@ -1,4 +1,4 @@
-/*
+/*
 * MIT License
 *
 * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
diff --git a/src/Rtsp/RtspPusher.cpp b/src/Rtsp/RtspPusher.cpp
index bb5f943..d468ddc 100644
--- a/src/Rtsp/RtspPusher.cpp
+++ b/src/Rtsp/RtspPusher.cpp
@@ -1,4 +1,4 @@
-//
+//
 // Created by xzl on 2019/3/27.
 //
 
diff --git a/src/Rtsp/RtspPusher.h b/src/Rtsp/RtspPusher.h
index b01af46..aa8232b 100644
--- a/src/Rtsp/RtspPusher.h
+++ b/src/Rtsp/RtspPusher.h
@@ -1,4 +1,4 @@
-//
+//
 // Created by xzl on 2019/3/27.
 //
 
diff --git a/src/Rtsp/RtspSession.cpp b/src/Rtsp/RtspSession.cpp
index d3677ca..a744992 100644
--- a/src/Rtsp/RtspSession.cpp
+++ b/src/Rtsp/RtspSession.cpp
@@ -1,4 +1,4 @@
-/*
+/*
  * MIT License
  *
  * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
diff --git a/src/Rtsp/RtspSession.h b/src/Rtsp/RtspSession.h
index 3501988..51561c6 100644
--- a/src/Rtsp/RtspSession.h
+++ b/src/Rtsp/RtspSession.h
@@ -1,4 +1,4 @@
-/*
+/*
  * MIT License
  *
  * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
diff --git a/tests/test_httpClient.cpp b/tests/test_httpClient.cpp
index 4b6f12d..7aaf167 100644
--- a/tests/test_httpClient.cpp
+++ b/tests/test_httpClient.cpp
@@ -1,4 +1,4 @@
-/*
+/*
  * MIT License
  *
  * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com>
diff --git a/tests/test_server.cpp b/tests/test_server.cpp
index df30a0c..7aaac9f 100644
--- a/tests/test_server.cpp
+++ b/tests/test_server.cpp
@@ -239,8 +239,7 @@ int main(int argc,char *argv[]) {
 
     //这里是拉流地址,支持rtmp/rtsp协议,负载必须是H264+AAC
     //如果是其他不识别的音视频将会被忽略(譬如说h264+adpcm转发后会去除音频)
-    auto urlList = {"rtmp://live.hkstv.hk.lxdns.com/live/hks1",
-                    "rtmp://live.hkstv.hk.lxdns.com/live/hks2"
+    auto urlList = {"rtsp://184.72.239.149/vod/mp4:BigBuckBunny_115k.mov"
             //rtsp链接支持输入用户名密码
             /*"rtsp://admin:jzan123456@192.168.0.122/"*/};
     map<string, PlayerProxy::Ptr> proxyMap;