H265: Support HEVC over SRT.(#465) v6.0.20 (#3366)

* H265: Refine demux vps/sps/pps interface for SRT and GB.
* H265: Support HEVC over SRT.(#465)
* UTest: add hevc vps/sps/pps utest.
* SRT: fix mpegts.js play hevc http-flv error.
* UTest: add HTTP-TS and HTTP-FLV blackbox test.
* Update release v6.0.20

Co-authored-by: Winlin <winlin@vip.126.com>
Co-authored-by: Haibo Chen <495810242@qq.com>
Co-authored-by: john <hondaxiao@tencent.com>
pull/3371/head
chundonglinlin 2 years ago committed by GitHub
parent 7922057467
commit ef90da352e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -644,3 +644,392 @@ func TestSlow_RtmpPublish_DvrMp4_HEVC_Basic(t *testing.T) {
}
}
}
func TestSlow_SrtPublish_RtmpPlay_HEVC_Basic(t *testing.T) {
// This case is run in parallel.
t.Parallel()
// Setup the max timeout for this case.
ctx, cancel := context.WithTimeout(logger.WithContext(context.Background()), time.Duration(*srsTimeout)*time.Millisecond)
defer cancel()
// Only enable for github actions, ignore for darwin.
if runtime.GOOS == "darwin" {
logger.Tf(ctx, "Depends on FFmpeg(HEVC over RTMP), only available for GitHub actions")
return
}
// Check a set of errors.
var r0, r1, r2, r3, r4, r5, r6, r7 error
defer func(ctx context.Context) {
if err := filterTestError(ctx.Err(), r0, r1, r2, r3, r4, r5, r6, r7); err != nil {
t.Errorf("Fail for err %+v", err)
} else {
logger.Tf(ctx, "test done with err %+v", err)
}
}(ctx)
var wg sync.WaitGroup
defer wg.Wait()
// Start SRS server and wait for it to be ready.
svr := NewSRSServer(func(v *srsServer) {
v.envs = []string{
"SRS_SRT_SERVER_ENABLED=on",
"SRS_VHOST_SRT_ENABLED=on",
"SRS_VHOST_SRT_SRT_TO_RTMP=on",
}
})
wg.Add(1)
go func() {
defer wg.Done()
r0 = svr.Run(ctx, cancel)
}()
// Start FFmpeg to publish stream.
streamID := fmt.Sprintf("stream-%v-%v", os.Getpid(), rand.Int())
streamURL := fmt.Sprintf("srt://localhost:%v?streamid=#!::r=live/%v,m=publish", svr.SRTPort(), streamID)
ffmpeg := NewFFmpeg(func(v *ffmpegClient) {
v.args = []string{
// Use the fastest preset of x265, see https://x265.readthedocs.io/en/master/presets.html
"-stream_loop", "-1", "-re", "-i", *srsPublishAvatar, "-acodec", "copy", "-vcodec", "libx265",
"-profile:v", "main", "-preset", "ultrafast", "-pes_payload_size", "0", "-f", "mpegts", streamURL,
}
})
wg.Add(1)
go func() {
defer wg.Done()
<-svr.ReadyCtx().Done()
r1 = ffmpeg.Run(ctx, cancel)
}()
// Start FFprobe to detect and verify stream.
duration := time.Duration(*srsFFprobeDuration) * time.Millisecond
ffprobe := NewFFprobe(func(v *ffprobeClient) {
v.dvrFile = path.Join(svr.WorkDir(), "objs", fmt.Sprintf("srs-ffprobe-%v.ts", streamID))
v.streamURL = fmt.Sprintf("rtmp://localhost:%v/live/%v", svr.RTMPPort(), streamID)
v.duration, v.timeout = duration, time.Duration(*srsFFprobeTimeout)*time.Millisecond
})
wg.Add(1)
go func() {
defer wg.Done()
<-svr.ReadyCtx().Done()
r2 = ffprobe.Run(ctx, cancel)
}()
// Fast quit for probe done.
select {
case <-ctx.Done():
case <-ffprobe.ProbeDoneCtx().Done():
defer cancel()
str, m := ffprobe.Result()
if len(m.Streams) != 2 {
r3 = errors.Errorf("invalid streams=%v, %v, %v", len(m.Streams), m.String(), str)
}
// Note that HLS score is low, so we only check duration.
if dv := m.Duration(); dv < duration {
r5 = errors.Errorf("short duration=%v < %v, %v, %v", dv, duration, m.String(), str)
}
if v := m.Video(); v == nil {
r5 = errors.Errorf("no video %v, %v", m.String(), str)
} else if v.CodecName != "hevc" {
r6 = errors.Errorf("invalid video codec=%v, %v, %v", v.CodecName, m.String(), str)
}
}
}
func TestSlow_SrtPublish_HttpFlvPlay_HEVC_Basic(t *testing.T) {
// This case is run in parallel.
t.Parallel()
// Setup the max timeout for this case.
ctx, cancel := context.WithTimeout(logger.WithContext(context.Background()), time.Duration(*srsTimeout)*time.Millisecond)
defer cancel()
// Only enable for github actions, ignore for darwin.
if runtime.GOOS == "darwin" {
logger.Tf(ctx, "Depends on FFmpeg(HEVC over RTMP), only available for GitHub actions")
return
}
// Check a set of errors.
var r0, r1, r2, r3, r4, r5, r6, r7 error
defer func(ctx context.Context) {
if err := filterTestError(ctx.Err(), r0, r1, r2, r3, r4, r5, r6, r7); err != nil {
t.Errorf("Fail for err %+v", err)
} else {
logger.Tf(ctx, "test done with err %+v", err)
}
}(ctx)
var wg sync.WaitGroup
defer wg.Wait()
// Start SRS server and wait for it to be ready.
svr := NewSRSServer(func(v *srsServer) {
v.envs = []string{
"SRS_HTTP_SERVER_ENABLED=on",
"SRS_SRT_SERVER_ENABLED=on",
"SRS_VHOST_SRT_ENABLED=on",
"SRS_VHOST_SRT_SRT_TO_RTMP=on",
"SRS_VHOST_HTTP_REMUX_ENABLED=on",
}
})
wg.Add(1)
go func() {
defer wg.Done()
r0 = svr.Run(ctx, cancel)
}()
// Start FFmpeg to publish stream.
streamID := fmt.Sprintf("stream-%v-%v", os.Getpid(), rand.Int())
streamURL := fmt.Sprintf("srt://localhost:%v?streamid=#!::r=live/%v,m=publish", svr.SRTPort(), streamID)
ffmpeg := NewFFmpeg(func(v *ffmpegClient) {
v.args = []string{
// Use the fastest preset of x265, see https://x265.readthedocs.io/en/master/presets.html
"-stream_loop", "-1", "-re", "-i", *srsPublishAvatar, "-acodec", "copy", "-vcodec", "libx265",
"-profile:v", "main", "-preset", "ultrafast", "-pes_payload_size", "0", "-f", "mpegts", streamURL,
}
})
wg.Add(1)
go func() {
defer wg.Done()
<-svr.ReadyCtx().Done()
r1 = ffmpeg.Run(ctx, cancel)
}()
// Start FFprobe to detect and verify stream.
duration := time.Duration(*srsFFprobeDuration) * time.Millisecond
ffprobe := NewFFprobe(func(v *ffprobeClient) {
v.dvrFile = path.Join(svr.WorkDir(), "objs", fmt.Sprintf("srs-ffprobe-%v.ts", streamID))
v.streamURL = fmt.Sprintf("http://localhost:%v/live/%v.flv", svr.HTTPPort(), streamID)
v.duration, v.timeout = duration, time.Duration(*srsFFprobeTimeout)*time.Millisecond
})
wg.Add(1)
go func() {
defer wg.Done()
<-svr.ReadyCtx().Done()
r2 = ffprobe.Run(ctx, cancel)
}()
// Fast quit for probe done.
select {
case <-ctx.Done():
case <-ffprobe.ProbeDoneCtx().Done():
defer cancel()
str, m := ffprobe.Result()
if len(m.Streams) != 2 {
r3 = errors.Errorf("invalid streams=%v, %v, %v", len(m.Streams), m.String(), str)
}
// Note that HLS score is low, so we only check duration.
if dv := m.Duration(); dv < duration {
r5 = errors.Errorf("short duration=%v < %v, %v, %v", dv, duration, m.String(), str)
}
if v := m.Video(); v == nil {
r5 = errors.Errorf("no video %v, %v", m.String(), str)
} else if v.CodecName != "hevc" {
r6 = errors.Errorf("invalid video codec=%v, %v, %v", v.CodecName, m.String(), str)
}
}
}
func TestSlow_SrtPublish_HttpTsPlay_HEVC_Basic(t *testing.T) {
// This case is run in parallel.
t.Parallel()
// Setup the max timeout for this case.
ctx, cancel := context.WithTimeout(logger.WithContext(context.Background()), time.Duration(*srsTimeout)*time.Millisecond)
defer cancel()
// Only enable for github actions, ignore for darwin.
if runtime.GOOS == "darwin" {
logger.Tf(ctx, "Depends on FFmpeg(HEVC over RTMP), only available for GitHub actions")
return
}
// Check a set of errors.
var r0, r1, r2, r3, r4, r5, r6, r7 error
defer func(ctx context.Context) {
if err := filterTestError(ctx.Err(), r0, r1, r2, r3, r4, r5, r6, r7); err != nil {
t.Errorf("Fail for err %+v", err)
} else {
logger.Tf(ctx, "test done with err %+v", err)
}
}(ctx)
var wg sync.WaitGroup
defer wg.Wait()
// Start SRS server and wait for it to be ready.
svr := NewSRSServer(func(v *srsServer) {
v.envs = []string{
"SRS_HTTP_SERVER_ENABLED=on",
"SRS_SRT_SERVER_ENABLED=on",
"SRS_VHOST_SRT_ENABLED=on",
"SRS_VHOST_SRT_SRT_TO_RTMP=on",
"SRS_VHOST_HTTP_REMUX_ENABLED=on",
"SRS_VHOST_HTTP_REMUX_MOUNT=[vhost]/[app]/[stream].ts",
}
})
wg.Add(1)
go func() {
defer wg.Done()
r0 = svr.Run(ctx, cancel)
}()
// Start FFmpeg to publish stream.
streamID := fmt.Sprintf("stream-%v-%v", os.Getpid(), rand.Int())
streamURL := fmt.Sprintf("srt://localhost:%v?streamid=#!::r=live/%v,m=publish", svr.SRTPort(), streamID)
ffmpeg := NewFFmpeg(func(v *ffmpegClient) {
v.args = []string{
// Use the fastest preset of x265, see https://x265.readthedocs.io/en/master/presets.htmlß
"-stream_loop", "-1", "-re", "-i", *srsPublishAvatar, "-acodec", "copy", "-vcodec", "libx265",
"-profile:v", "main", "-preset", "ultrafast", "-pes_payload_size", "0", "-f", "mpegts", streamURL,
}
})
wg.Add(1)
go func() {
defer wg.Done()
<-svr.ReadyCtx().Done()
r1 = ffmpeg.Run(ctx, cancel)
}()
// Start FFprobe to detect and verify stream.
duration := time.Duration(*srsFFprobeDuration) * time.Millisecond
ffprobe := NewFFprobe(func(v *ffprobeClient) {
v.dvrFile = path.Join(svr.WorkDir(), "objs", fmt.Sprintf("srs-ffprobe-%v.ts", streamID))
v.streamURL = fmt.Sprintf("http://localhost:%v/live/%v.ts", svr.HTTPPort(), streamID)
v.duration, v.timeout = duration, time.Duration(*srsFFprobeTimeout)*time.Millisecond
})
wg.Add(1)
go func() {
defer wg.Done()
<-svr.ReadyCtx().Done()
// wait for ffmpeg
time.Sleep(3 * time.Second)
r2 = ffprobe.Run(ctx, cancel)
}()
// Fast quit for probe done.
select {
case <-ctx.Done():
case <-ffprobe.ProbeDoneCtx().Done():
defer cancel()
str, m := ffprobe.Result()
if len(m.Streams) != 2 {
r3 = errors.Errorf("invalid streams=%v, %v, %v", len(m.Streams), m.String(), str)
}
// Note that HLS score is low, so we only check duration.
if dv := m.Duration(); dv < duration {
r5 = errors.Errorf("short duration=%v < %v, %v, %v", dv, duration, m.String(), str)
}
if v := m.Video(); v == nil {
r5 = errors.Errorf("no video %v, %v", m.String(), str)
} else if v.CodecName != "hevc" {
r6 = errors.Errorf("invalid video codec=%v, %v, %v", v.CodecName, m.String(), str)
}
}
}
func TestSlow_SrtPublish_HlsPlay_HEVC_Basic(t *testing.T) {
// This case is run in parallel.
t.Parallel()
// Setup the max timeout for this case.
ctx, cancel := context.WithTimeout(logger.WithContext(context.Background()), time.Duration(*srsTimeout)*time.Millisecond)
defer cancel()
// Check a set of errors.
var r0, r1, r2, r3, r4 error
defer func(ctx context.Context) {
if err := filterTestError(ctx.Err(), r0, r1, r2, r3, r4); err != nil {
t.Errorf("Fail for err %+v", err)
} else {
logger.Tf(ctx, "test done with err %+v", err)
}
}(ctx)
var wg sync.WaitGroup
defer wg.Wait()
// Start SRS server and wait for it to be ready.
svr := NewSRSServer(func(v *srsServer) {
v.envs = []string{
"SRS_HTTP_SERVER_ENABLED=on",
"SRS_SRT_SERVER_ENABLED=on",
"SRS_VHOST_SRT_ENABLED=on",
"SRS_VHOST_SRT_SRT_TO_RTMP=on",
"SRS_VHOST_HLS_ENABLED=on",
}
})
wg.Add(1)
go func() {
defer wg.Done()
r0 = svr.Run(ctx, cancel)
}()
// Start FFmpeg to publish stream.
streamID := fmt.Sprintf("stream-%v-%v", os.Getpid(), rand.Int())
streamURL := fmt.Sprintf("srt://localhost:%v?streamid=#!::r=live/%v,m=publish", svr.SRTPort(), streamID)
ffmpeg := NewFFmpeg(func(v *ffmpegClient) {
v.args = []string{
// Use the fastest preset of x265, see https://x265.readthedocs.io/en/master/presets.html
"-stream_loop", "-1", "-re", "-i", *srsPublishAvatar, "-acodec", "copy", "-vcodec", "libx265",
"-profile:v", "main", "-preset", "ultrafast", "-r", "25", "-g", "50", "-pes_payload_size", "0",
"-f", "mpegts", streamURL,
}
})
wg.Add(1)
go func() {
defer wg.Done()
<-svr.ReadyCtx().Done()
// wait for ffmpeg
time.Sleep(3 * time.Second)
r1 = ffmpeg.Run(ctx, cancel)
}()
// Start FFprobe to detect and verify stream.
duration := time.Duration(*srsFFprobeDuration) * time.Millisecond
ffprobe := NewFFprobe(func(v *ffprobeClient) {
v.dvrFile = path.Join(svr.WorkDir(), "objs", fmt.Sprintf("srs-ffprobe-%v.ts", streamID))
v.streamURL = fmt.Sprintf("http://localhost:%v/live/%v.m3u8", svr.HTTPPort(), streamID)
v.duration, v.timeout = duration, time.Duration(*srsFFprobeHEVCTimeout)*time.Millisecond
})
wg.Add(1)
go func() {
defer wg.Done()
<-svr.ReadyCtx().Done()
r2 = ffprobe.Run(ctx, cancel)
}()
// Fast quit for probe done.
select {
case <-ctx.Done():
case <-ffprobe.ProbeDoneCtx().Done():
defer cancel()
str, m := ffprobe.Result()
if len(m.Streams) != 2 {
r3 = errors.Errorf("invalid streams=%v, %v, %v", len(m.Streams), m.String(), str)
}
// Note that HLS score is low, so we only check duration. Note that only check half of duration, because we
// might get only some pieces of segments.
if dv := m.Duration(); dv < duration/2 {
r4 = errors.Errorf("short duration=%v < %v, %v, %v", dv, duration/2, m.String(), str)
}
}
}

@ -52,6 +52,7 @@ var srsFFprobeStdout *bool
var srsTimeout *int
var srsFFprobeDuration *int
var srsFFprobeTimeout *int
var srsFFprobeHEVCTimeout *int
var srsBinary *string
var srsFFmpeg *string
@ -72,6 +73,7 @@ func prepareTest() (err error) {
srsFFmpeg = flag.String("srs-ffmpeg", "ffmpeg", "The FFmpeg tool")
srsFFprobe = flag.String("srs-ffprobe", "ffprobe", "The FFprobe tool")
srsPublishAvatar = flag.String("srs-publish-avatar", "avatar.flv", "The avatar file for publisher.")
srsFFprobeHEVCTimeout = flag.Int("srs-ffprobe-hevc-timeout", 30000, "For each case, the timeout for ffprobe in ms")
// Parse user options.
flag.Parse()

@ -367,9 +367,13 @@ srt_server {
vhost srt.vhost.srs.com {
srt {
# Whether enable SRT on this vhost.
# Overwrite by env SRS_VHOST_SRT_ENABLED for all vhosts.
# Default: off
enabled on;
# Whether covert SRT to RTMP stream.
# Overwrite by env SRS_VHOST_SRT_TO_RTMP for all vhosts.
# Default: on
srt_to_rtmp on;
}
}

@ -8,6 +8,7 @@ The changelog for SRS.
## SRS 6.0 Changelog
* v6.0, 2023-01-19, Merge [#3366](https://github.com/ossrs/srs/pull/3366): H265: Support HEVC over SRT. v6.0.20 (#465) (#3366)
* v6.0, 2023-01-19, Merge [#3318](https://github.com/ossrs/srs/pull/3318): RTC: fix rtc publisher pli cid. v6.0.19 (#3318)
* v6.0, 2023-01-18, Merge [#3382](https://github.com/ossrs/srs/pull/3382): Rewrite research/api-server code by Go, remove Python. v6.0.18 (#3382)
* v6.0, 2023-01-18, Merge [#3386](https://github.com/ossrs/srs/pull/3386): SRT: fix crash when srt_to_rtmp off. v6.0.17 (#3386)

@ -7938,6 +7938,7 @@ bool SrsConfig::get_srt_enabled(std::string vhost)
bool SrsConfig::get_srt_to_rtmp(std::string vhost)
{
SRS_OVERWRITE_BY_ENV_BOOL("srs.vhost.srt.srt_to_rtmp"); // SRS_VHOST_SRT_SRT_TO_RTMP
SRS_OVERWRITE_BY_ENV_BOOL("srs.vhost.srt.to_rtmp"); // SRS_VHOST_SRT_TO_RTMP
static bool DEFAULT = true;

@ -338,7 +338,7 @@ srs_error_t SrsRtmpFromSrtBridge::on_ts_message(SrsTsMessage* msg)
}
// check supported codec
if (msg->channel->stream != SrsTsStreamVideoH264 && msg->channel->stream != SrsTsStreamAudioAAC) {
if (msg->channel->stream != SrsTsStreamVideoH264 && msg->channel->stream != SrsTsStreamVideoHEVC && msg->channel->stream != SrsTsStreamAudioAAC) {
return srs_error_new(ERROR_STREAM_CASTER_TS_CODEC, "ts: unsupported stream codec=%d", msg->channel->stream);
}
@ -347,7 +347,7 @@ srs_error_t SrsRtmpFromSrtBridge::on_ts_message(SrsTsMessage* msg)
// publish audio or video.
if (msg->channel->stream == SrsTsStreamVideoH264) {
if ((err = on_ts_video(msg, &avs)) != srs_success) {
if ((err = on_ts_video_avc(msg, &avs)) != srs_success) {
return srs_error_wrap(err, "ts: consume video");
}
}
@ -358,10 +358,18 @@ srs_error_t SrsRtmpFromSrtBridge::on_ts_message(SrsTsMessage* msg)
}
// TODO: FIXME: implements other codec?
#ifdef SRS_H265
if (msg->channel->stream == SrsTsStreamVideoHEVC) {
if ((err = on_ts_video_hevc(msg, &avs)) != srs_success) {
return srs_error_wrap(err, "ts: consume hevc video");
}
}
#endif
return err;
}
srs_error_t SrsRtmpFromSrtBridge::on_ts_video(SrsTsMessage* msg, SrsBuffer* avs)
srs_error_t SrsRtmpFromSrtBridge::on_ts_video_avc(SrsTsMessage* msg, SrsBuffer* avs)
{
srs_error_t err = srs_success;
@ -525,6 +533,194 @@ srs_error_t SrsRtmpFromSrtBridge::on_h264_frame(SrsTsMessage* msg, vector<pair<c
return err;
}
#ifdef SRS_H265
srs_error_t SrsRtmpFromSrtBridge::on_ts_video_hevc(SrsTsMessage *msg, SrsBuffer *avs)
{
srs_error_t err = srs_success;
vector<pair<char*, int> > ipb_frames;
SrsRawHEVCStream *hevc = new SrsRawHEVCStream();
SrsAutoFree(SrsRawHEVCStream, hevc);
// send each frame.
while (!avs->empty()) {
char* frame = NULL;
int frame_size = 0;
if ((err = hevc->annexb_demux(avs, &frame, &frame_size)) != srs_success) {
return srs_error_wrap(err, "demux hevc annexb");
}
if (frame == NULL || frame_size == 0) {
continue;
}
// for vps
if (hevc->is_vps(frame, frame_size)) {
std::string vps;
if ((err = hevc->vps_demux(frame, frame_size, vps)) != srs_success) {
return srs_error_wrap(err, "demux vps");
}
if (!vps.empty() && hevc_vps_ != vps) {
vps_sps_pps_change_ = true;
}
hevc_vps_ = vps;
continue;
}
// for sps
if (hevc->is_sps(frame, frame_size)) {
std::string sps;
if ((err = hevc->sps_demux(frame, frame_size, sps)) != srs_success) {
return srs_error_wrap(err, "demux sps");
}
if (! sps.empty() && hevc_sps_ != sps) {
vps_sps_pps_change_ = true;
}
hevc_sps_ = sps;
continue;
}
// for pps
if (hevc->is_pps(frame, frame_size)) {
std::string pps;
if ((err = hevc->pps_demux(frame, frame_size, pps)) != srs_success) {
return srs_error_wrap(err, "demux pps");
}
if (! pps.empty() && hevc_pps_ != pps) {
vps_sps_pps_change_ = true;
}
hevc_pps_ = pps;
continue;
}
ipb_frames.push_back(make_pair(frame, frame_size));
}
if ((err = check_vps_sps_pps_change(msg)) != srs_success) {
return srs_error_wrap(err, "check vps sps pps");
}
return on_hevc_frame(msg, ipb_frames);
}
srs_error_t SrsRtmpFromSrtBridge::check_vps_sps_pps_change(SrsTsMessage* msg)
{
srs_error_t err = srs_success;
if (!vps_sps_pps_change_) {
return err;
}
if (hevc_vps_.empty() || hevc_sps_.empty() || hevc_pps_.empty()) {
return srs_error_new(ERROR_SRT_TO_RTMP_EMPTY_SPS_PPS, "vps or sps or pps empty");
}
// vps/sps/pps changed, generate new video sh frame and dispatch it.
vps_sps_pps_change_ = false;
// ts tbn to flv tbn.
uint32_t dts = (uint32_t)(msg->dts / 90);
std::string sh;
SrsRawHEVCStream* hevc = new SrsRawHEVCStream();
SrsAutoFree(SrsRawHEVCStream, hevc);
if ((err = hevc->mux_sequence_header(hevc_vps_, hevc_sps_, hevc_pps_, sh)) != srs_success) {
return srs_error_wrap(err, "mux sequence header");
}
// h265 packet to flv packet.
char* flv = NULL;
int nb_flv = 0;
if ((err = hevc->mux_avc2flv(sh, SrsVideoAvcFrameTypeKeyFrame, SrsVideoAvcFrameTraitSequenceHeader, dts, dts, &flv, &nb_flv)) != srs_success) {
return srs_error_wrap(err, "avc to flv");
}
SrsMessageHeader header;
header.initialize_video(nb_flv, dts, video_streamid_);
SrsCommonMessage rtmp;
if ((err = rtmp.create(&header, flv, nb_flv)) != srs_success) {
return srs_error_wrap(err, "create rtmp");
}
if ((err = live_source_->on_video(&rtmp)) != srs_success) {
return srs_error_wrap(err, "srt to rtmp sps/pps");
}
return err;
}
srs_error_t SrsRtmpFromSrtBridge::on_hevc_frame(SrsTsMessage* msg, vector<pair<char*, int> >& ipb_frames)
{
srs_error_t err = srs_success;
if (ipb_frames.empty()) {
return srs_error_new(ERROR_SRT_CONN, "empty frame");
}
// ts tbn to flv tbn.
uint32_t dts = (uint32_t)(msg->dts / 90);
uint32_t pts = (uint32_t)(msg->pts / 90);
int32_t cts = pts - dts;
// for IDR frame, the frame is keyframe.
SrsVideoAvcFrameType frame_type = SrsVideoAvcFrameTypeInterFrame;
// 5bytes video tag header
int frame_size = 5;
for (size_t i = 0; i != ipb_frames.size(); ++i) {
// 4 bytes for nalu length.
frame_size += 4 + ipb_frames[i].second;
SrsHevcNaluType nalu_type = SrsHevcNaluTypeParse(ipb_frames[i].first[0]);
if ((nalu_type >= SrsHevcNaluType_CODED_SLICE_BLA) && (nalu_type <= SrsHevcNaluType_RESERVED_23)) {
frame_type = SrsVideoAvcFrameTypeKeyFrame;
}
}
SrsCommonMessage rtmp;
rtmp.header.initialize_video(frame_size, dts, video_streamid_);
rtmp.create_payload(frame_size);
rtmp.size = frame_size;
SrsBuffer payload(rtmp.payload, rtmp.size);
// Write 5bytes video tag header.
// @see: E.4.3 Video Tags, video_file_format_spec_v10_1.pdf, page 78
// Frame Type, Type of video frame.
// CodecID, Codec Identifier.
// set the rtmp header
payload.write_1bytes((frame_type << 4) | SrsVideoCodecIdHEVC);
// hevc_type: nalu
payload.write_1bytes(0x01);
// composition time
payload.write_3bytes(cts);
// Write video nalus.
for (size_t i = 0; i != ipb_frames.size(); ++i) {
char* nal = ipb_frames[i].first;
int nal_size = ipb_frames[i].second;
// write 4 bytes of nalu length.
payload.write_4bytes(nal_size);
// write nalu
payload.write_bytes(nal, nal_size);
}
if ((err = live_source_->on_video(&rtmp)) != srs_success) {
return srs_error_wrap(err ,"srt ts hevc video to rtmp");
}
return err;
}
#endif
srs_error_t SrsRtmpFromSrtBridge::on_ts_audio(SrsTsMessage* msg, SrsBuffer* avs)
{
srs_error_t err = srs_success;

@ -117,12 +117,19 @@ public:
public:
virtual srs_error_t on_ts_message(SrsTsMessage* msg);
private:
srs_error_t on_ts_video(SrsTsMessage* msg, SrsBuffer* avs);
srs_error_t on_ts_video_avc(SrsTsMessage* msg, SrsBuffer* avs);
srs_error_t on_ts_audio(SrsTsMessage* msg, SrsBuffer* avs);
srs_error_t check_sps_pps_change(SrsTsMessage* msg);
srs_error_t on_h264_frame(SrsTsMessage* msg, std::vector<std::pair<char*, int> >& ipb_frames);
srs_error_t check_audio_sh_change(SrsTsMessage* msg, uint32_t pts);
srs_error_t on_aac_frame(SrsTsMessage* msg, uint32_t pts, char* frame, int frame_size);
#ifdef SRS_H265
srs_error_t on_ts_video_hevc(SrsTsMessage *msg, SrsBuffer *avs);
srs_error_t check_vps_sps_pps_change(SrsTsMessage *msg);
srs_error_t on_hevc_frame(SrsTsMessage *msg, std::vector<std::pair<char *, int>> &ipb_frames);
#endif
private:
SrsTsContext* ts_ctx_;
@ -131,6 +138,13 @@ private:
std::string sps_;
std::string pps_;
#ifdef SRS_H265
bool vps_sps_pps_change_;
std::string hevc_vps_;
std::string hevc_sps_;
std::string hevc_pps_;
#endif
// Record audio sepcific config had changed, if change, need to generate new audio sh frame.
bool audio_sh_change_;
std::string audio_sh_;

@ -9,6 +9,6 @@
#define VERSION_MAJOR 6
#define VERSION_MINOR 0
#define VERSION_REVISION 19
#define VERSION_REVISION 20
#endif

@ -712,6 +712,10 @@ SrsFormat::~SrsFormat()
srs_error_t SrsFormat::initialize()
{
if (!vcodec) {
vcodec = new SrsVideoCodecConfig();
}
return srs_success;
}

@ -274,7 +274,9 @@
XX(ERROR_HTTP_WITH_BODY , 3097, "HttpWithBody", "Failed for HTTP body") \
XX(ERROR_HEVC_DISABLED , 3098, "HevcDisabled", "HEVC is disabled") \
XX(ERROR_HEVC_DECODE_ERROR , 3099, "HevcDecode", "HEVC decode av stream failed") \
XX(ERROR_MP4_HVCC_CHANGE , 3100, "Mp4HvcCChange", "MP4 does not support video HvcC change")
XX(ERROR_MP4_HVCC_CHANGE , 3100, "Mp4HvcCChange", "MP4 does not support video HvcC change") \
XX(ERROR_HEVC_API_NO_PREFIXED , 3101, "HevcAnnexbPrefix", "No annexb prefix for HEVC decoder")
/**************************************************/
/* HTTP/StreamConverter protocol error. */
#define SRS_ERRNO_MAP_HTTP(XX) \
@ -325,7 +327,12 @@
XX(ERROR_GB_CONFIG , 4052, "GbConfig", "Invalid configuration for GB28181") \
XX(ERROR_GB_TIMEOUT , 4053, "GbTimeout", "SIP or media connection timeout for GB28181") \
XX(ERROR_HEVC_NALU_UEV , 4054, "HevcNaluUev", "Failed to read UEV for HEVC NALU") \
XX(ERROR_HEVC_NALU_SEV , 4055, "HevcNaluSev", "Failed to read SEV for HEVC NALU")
XX(ERROR_HEVC_NALU_SEV , 4055, "HevcNaluSev", "Failed to read SEV for HEVC NALU") \
XX(ERROR_STREAM_CASTER_HEVC_VPS , 4054, "CasterTsHevcVps", "Invalid ts HEVC VPS for stream caster") \
XX(ERROR_STREAM_CASTER_HEVC_SPS , 4055, "CasterTsHevcSps", "Invalid ts HEVC SPS for stream caster") \
XX(ERROR_STREAM_CASTER_HEVC_PPS , 4056, "CasterTsHevcPps", "Invalid ts HEVC PPS for stream caster") \
XX(ERROR_STREAM_CASTER_HEVC_FORMAT , 4057, "CasterTsHevcFormat", "Invalid ts HEVC Format for stream caster")
/**************************************************/
/* RTC protocol error. */

@ -263,6 +263,324 @@ srs_error_t SrsRawH264Stream::mux_avc2flv(string video, int8_t frame_type, int8_
return err;
}
#ifdef SRS_H265
SrsRawHEVCStream::SrsRawHEVCStream()
{
}
SrsRawHEVCStream::~SrsRawHEVCStream()
{
}
srs_error_t SrsRawHEVCStream::annexb_demux(SrsBuffer *stream, char **pframe, int *pnb_frame)
{
srs_error_t err = srs_success;
*pframe = NULL;
*pnb_frame = 0;
while (!stream->empty()) {
// each frame must prefixed by annexb format.
// @see B.2 Byte stream NAL unit syntax and semantics
// @doc ITU-T-H.265-2021.pdf, page 292.
int pnb_start_code = 0;
if (!srs_avc_startswith_annexb(stream, &pnb_start_code)) {
return srs_error_new(ERROR_HEVC_API_NO_PREFIXED, "hevc annexb start code");
}
int start = stream->pos() + pnb_start_code;
// find the last frame prefixed by annexb format.
stream->skip(pnb_start_code);
while (!stream->empty()) {
if (srs_avc_startswith_annexb(stream, NULL)) {
break;
}
stream->skip(1);
}
// demux the frame.
*pnb_frame = stream->pos() - start;
*pframe = stream->data() + start;
break;
}
return err;
}
// whether the frame is vps or sps or pps.
bool SrsRawHEVCStream::is_vps(char *frame, int nb_frame)
{
srs_assert(nb_frame > 0);
// 7bits, 7.4.2.2 NAL unit header semantics,
// @see Table 7-1 NAL unit type codes and NAL unit type classes
// @doc ITU-T-H.265-2021.pdf, page 86.
return SrsHevcNaluTypeParse(frame[0]) == SrsHevcNaluType_VPS;
}
bool SrsRawHEVCStream::is_sps(char *frame, int nb_frame)
{
srs_assert(nb_frame > 0);
// 7bits, 7.4.2.2 NAL unit header semantics,
// @see Table 7-1 NAL unit type codes and NAL unit type classes
// @doc ITU-T-H.265-2021.pdf, page 86.
return SrsHevcNaluTypeParse(frame[0]) == SrsHevcNaluType_SPS;
}
bool SrsRawHEVCStream::is_pps(char *frame, int nb_frame)
{
srs_assert(nb_frame > 0);
// 7bits, 7.4.2.2 NAL unit header semantics,
// @see Table 7-1 NAL unit type codes and NAL unit type classes
// @doc ITU-T-H.265-2021.pdf, page 86.
return SrsHevcNaluTypeParse(frame[0]) == SrsHevcNaluType_PPS;
}
srs_error_t SrsRawHEVCStream::vps_demux(char *frame, int nb_frame, std::string &vps)
{
srs_error_t err = srs_success;
if (nb_frame <= 0) {
return srs_error_new(ERROR_STREAM_CASTER_HEVC_VPS, "no hevc vps");
}
vps = string(frame, nb_frame);
return err;
}
srs_error_t SrsRawHEVCStream::sps_demux(char *frame, int nb_frame, std::string &sps)
{
srs_error_t err = srs_success;
// atleast 1bytes for SPS to decode the type, profile, constrain and level.
if (nb_frame < 4) {
return err;
}
sps = string(frame, nb_frame);
return err;
}
srs_error_t SrsRawHEVCStream::pps_demux(char *frame, int nb_frame, std::string &pps)
{
srs_error_t err = srs_success;
if (nb_frame <= 0) {
return srs_error_new(ERROR_STREAM_CASTER_HEVC_PPS, "no hevc pps");
}
pps = string(frame, nb_frame);
return err;
}
srs_error_t SrsRawHEVCStream::mux_sequence_header(std::string vps, std::string sps, std::string pps, std::string &hvcC)
{
srs_error_t err = srs_success;
// hevc header information:
// 23bytes header:
// configurationVersion, general_profile_space, general_tier_flag, general_profile_idc
// general_profile_compatibility_flags, general_constraint_indicator_flags,
// general_level_idc, min_spatial_segmentation_idc, parallelismType,
// chromaFormat, bitDepthLumaMinus8, bitDepthChromaMinus8,
// avgFrameRate, constantFrameRate, numTemporalLayers, temporalIdNested,
// lengthSizeMinusOne, numOfArrays
// 5bytes size of vps/sps/pps:
// array_completeness, nal_unit_type, numNalus, nalUnitLength,
// Nbytes of vps/sps/pps.
// sequenceParameterSetNALUnit
// use simple mode: nalu size + nalu data
int nb_packet = 23 + 5 + (int)vps.length() + 5 + (int)sps.length() + 5 + (int)pps.length();
char *packet = new char[nb_packet];
SrsAutoFreeA(char, packet);
// use stream to generate the hevc packet.
SrsBuffer stream(packet, nb_packet);
SrsFormat format;
if ((err = format.initialize()) != srs_success) {
return srs_error_new(ERROR_STREAM_CASTER_HEVC_FORMAT, "format failed");
}
// hevc_dec_conf_record
SrsHevcDecoderConfigurationRecord *hevc_info = &format.vcodec->hevc_dec_conf_record_;
if (true) {
// H265 VPS (video_parameter_set_rbsp()) NAL Unit.
// @see Section 7.3.2.1 ("Video parameter set RBSP syntax") of the H.265
// @doc ITU-T-H.265-2021.pdf, page 54.
SrsBuffer vps_stream((char*)vps.data(), vps.length());
if ((err = format.hevc_demux_vps(&vps_stream)) != srs_success) {
return srs_error_new(ERROR_STREAM_CASTER_HEVC_VPS, "vps demux failed, len=%d", vps.length());
}
// H265 SPS Nal Unit (seq_parameter_set_rbsp()) parser.
// @see Section 7.3.2.2 ("Sequence parameter set RBSP syntax") of the H.265
// @doc ITU-T-H.265-2021.pdf, page 55.
SrsBuffer sps_stream((char*)sps.data(), sps.length());
if ((err = format.hevc_demux_sps(&sps_stream)) != srs_success) {
return srs_error_new(ERROR_STREAM_CASTER_HEVC_SPS, "sps demux failed, len=%d",sps.length());
}
}
// configurationVersion
stream.write_1bytes(0x01);
uint8_t temp8bits = 0;
// general_profile_space(2bits), general_tier_flag(1bit), general_profile_idc(5bits)
temp8bits |= ((hevc_info->general_profile_space & 0x03) << 6);
temp8bits |= ((hevc_info->general_tier_flag & 0x01) << 5);
temp8bits |= (hevc_info->general_profile_idc & 0x1f);
stream.write_1bytes(temp8bits);
stream.write_4bytes(hevc_info->general_profile_compatibility_flags);
stream.write_2bytes((hevc_info->general_constraint_indicator_flags >> 32) & 0xffff);
stream.write_4bytes(hevc_info->general_constraint_indicator_flags & 0xffffffff);
stream.write_1bytes(hevc_info->general_level_idc);
stream.write_2bytes(0xf000 | (hevc_info->min_spatial_segmentation_idc & 0x0fff));
stream.write_1bytes(0xfc | (hevc_info->parallelism_type & 0x03));
stream.write_1bytes(0xfc | (hevc_info->chroma_format & 0x03));
stream.write_1bytes(0xf8 | (hevc_info->bit_depth_luma_minus8 & 0x07));
stream.write_1bytes(0xf8 | (hevc_info->bit_depth_chroma_minus8 & 0x07));
stream.write_2bytes(hevc_info->avg_frame_rate);
hevc_info->length_size_minus_one = 3;
temp8bits = 0;
//8bits: constant_frame_rate(2bits), num_temporal_layers(3bits),
// temporal_id_nested(1bit), length_size_minus_one(2bits)
temp8bits |= (hevc_info->constant_frame_rate << 6) | 0xc0;
temp8bits |= (hevc_info->num_temporal_layers << 3) | 0x38;
temp8bits |= (hevc_info->temporal_id_nested << 2) | 0x04;
temp8bits |= (hevc_info->length_size_minus_one & 0x03);
stream.write_1bytes(temp8bits);
// numOfArrays, default 3
stream.write_1bytes(0x03);
// vps
if (true) {
// nal_type
stream.write_1bytes(SrsHevcNaluType_VPS & 0x3f);
// numOfVideoParameterSets, always 1
stream.write_2bytes(0x01);
// videoParameterSetLength
stream.write_2bytes((int16_t)vps.length());
// videoParameterSetNALUnit
stream.write_string(vps);
}
// sps
if (true) {
// nal_type
stream.write_1bytes(SrsHevcNaluType_SPS & 0x3f);
// numOfSequenceParameterSets, always 1
stream.write_2bytes(0x01);
// sequenceParameterSetLength
stream.write_2bytes((int16_t)sps.length());
// sequenceParameterSetNALUnit
stream.write_string(sps);
}
// pps
if (true) {
// nal_type
stream.write_1bytes(SrsHevcNaluType_PPS & 0x3f);
// numOfPictureParameterSets, always 1
stream.write_2bytes(0x01);
// pictureParameterSetLength
stream.write_2bytes((int16_t)pps.length());
// pictureParameterSetNALUnit
stream.write_string(pps);
}
hvcC = string(packet, nb_packet);
return err;
}
srs_error_t SrsRawHEVCStream::mux_ipb_frame(char *frame, int nb_frame, std::string &ibp)
{
srs_error_t err = srs_success;
// 4bytes size of nalu:
// NALUnitLength
// Nbytes of nalu.
// NALUnit
int nb_packet = 4 + nb_frame;
char *packet = new char[nb_packet];
SrsAutoFreeA(char, packet);
// use stream to generate the h265 packet.
SrsBuffer stream(packet, nb_packet);
// 5.3.4.2.1 Syntax, ISO_IEC_14496-15-AVC-format-2012.pdf, page 16
// lengthSizeMinusOne, or NAL_unit_length, always use 4bytes size
uint32_t NAL_unit_length = nb_frame;
// mux the avc NALU in "ISO Base Media File Format"
// from ISO_IEC_14496-15-AVC-format-2012.pdf, page 20
// NALUnitLength
stream.write_4bytes(NAL_unit_length);
// NALUnit
stream.write_bytes(frame, nb_frame);
ibp = string(packet, nb_packet);
return err;
}
srs_error_t SrsRawHEVCStream::mux_avc2flv(std::string video, int8_t frame_type, int8_t avc_packet_type, uint32_t dts, uint32_t pts, char **flv, int *nb_flv)
{
srs_error_t err = srs_success;
// for h265 in RTMP video payload, there is 5bytes header:
// 1bytes, FrameType | CodecID
// 1bytes, AVCPacketType
// 3bytes, CompositionTime, the cts.
// @see: E.4.3 Video Tags, video_file_format_spec_v10_1.pdf, page 78
int size = (int)video.length() + 5;
char *data = new char[size];
char *p = data;
// @see: E.4.3 Video Tags, video_file_format_spec_v10_1.pdf, page 78
// Frame Type, Type of video frame.
// CodecID, Codec Identifier.
// set the rtmp header
*p++ = (frame_type << 4) | SrsVideoCodecIdHEVC;
// AVCPacketType
*p++ = avc_packet_type;
// CompositionTime
// pts = dts + cts, or
// cts = pts - dts.
// where cts is the header in rtmp video packet payload header.
uint32_t cts = pts - dts;
char *pp = (char *)&cts;
*p++ = pp[2];
*p++ = pp[1];
*p++ = pp[0];
// hevc raw data.
memcpy(p, video.data(), video.length());
*flv = data;
*nb_flv = size;
return err;
}
#endif
SrsRawAacStream::SrsRawAacStream()
{
}

@ -53,6 +53,51 @@ public:
virtual srs_error_t mux_avc2flv(std::string video, int8_t frame_type, int8_t avc_packet_type, uint32_t dts, uint32_t pts, char** flv, int* nb_flv);
};
#ifdef SRS_H265
// The raw h.265 stream, in annexb.
class SrsRawHEVCStream
{
public:
SrsRawHEVCStream();
virtual ~SrsRawHEVCStream();
public:
// Demux the stream in annexb format.
// @param stream the input stream bytes.
// @param pframe the output hevc frame in stream. user should never free it.
// @param pnb_frame the output hevc frame size.
virtual srs_error_t annexb_demux(SrsBuffer *stream, char **pframe, int *pnb_frame);
// whether the frame is sps or pps or vps.
virtual bool is_sps(char *frame, int nb_frame);
virtual bool is_pps(char *frame, int nb_frame);
virtual bool is_vps(char *frame, int nb_frame);
// Demux the sps or pps or vps to string.
// @param sps/pps output the sps/pps/vps.
virtual srs_error_t sps_demux(char *frame, int nb_frame, std::string &sps);
virtual srs_error_t pps_demux(char *frame, int nb_frame, std::string &pps);
virtual srs_error_t vps_demux(char *frame, int nb_frame, std::string &vps);
public:
// The hevc raw data to hevc packet, without flv payload header.
// Mux the sps/pps/vps to flv sequence header packet.
// @param sh output the sequence header.
virtual srs_error_t mux_sequence_header(std::string vps, std::string sps, std::string pps, std::string &sh);
// The hevc raw data to hevc packet, without flv payload header.
// Mux the ibp to flv ibp packet.
// @param ibp output the packet.
// @param frame_type output the frame type.
virtual srs_error_t mux_ipb_frame(char *frame, int nb_frame, std::string &ibp);
// Mux the hevc video packet to flv video packet.
// @param frame_type, SrsVideoAvcFrameTypeKeyFrame or SrsVideoAvcFrameTypeInterFrame.
// @param avc_packet_type, SrsVideoAvcFrameTraitSequenceHeader or SrsVideoAvcFrameTraitNALU.
// @param video the hevc raw data.
// @param flv output the muxed flv packet.
// @param nb_flv output the muxed flv size.
virtual srs_error_t mux_avc2flv(std::string video, int8_t frame_type, int8_t avc_packet_type, uint32_t dts, uint32_t pts, char **flv, int *nb_flv);
};
#endif
// The header of adts sample.
struct SrsRawAacStreamCodec
{

@ -4241,6 +4241,9 @@ VOID TEST(ConfigEnvTest, CheckEnvValuesVhostSrt)
SrsSetEnvConfig(srt_to_rtmp, "SRS_VHOST_SRT_SRT_TO_RTMP", "off");
EXPECT_FALSE(conf.get_srt_to_rtmp("__defaultVhost__"));
SrsSetEnvConfig(srt_to_rtmp2, "SRS_VHOST_SRT_TO_RTMP", "off");
EXPECT_FALSE(conf.get_srt_to_rtmp("__defaultVhost__"));
}
}

Loading…
Cancel
Save