Squash: Merge v4.0.203

pull/3059/head
winlin 3 years ago
parent e16830e989
commit 8576fa7052

@ -30,6 +30,8 @@ The changelog for SRS.
## SRS 4.0 Changelog
* v4.0, 2021-12-04, Merge [#2768](https://github.com/ossrs/srs/pull/2768): RTC: Fix bugs for RTC2RTMP. (#2768). v4.0.203
* v4.0, 2021-12-04, Merge [#2757](https://github.com/ossrs/srs/pull/2757): RTC: Ignore empty audio packet when transcoding (#2757). v4.0.202
* v4.0, 2021-12-01, Fix [#2762](https://github.com/ossrs/srs/pull/2762): RTC: Refine publish security error message (#2762). v4.0.200
* v4.0, 2021-11-25, Merge [#2751](https://github.com/ossrs/srs/pull/2751): RTC: Fix crash when pkt->payload() if pkt is nullptr (#2751). v4.0.199
* v4.0, 2021-11-15, For [#1708](https://github.com/ossrs/srs/pull/1708): ST: Print log when multiple thread stop one coroutine. (#1708). v4.0.198

@ -2930,7 +2930,6 @@ srs_error_t SrsConfig::check_number_connections()
int nb_canbe = max_open_files - nb_consumed_fds - 1;
// for each play connections, we open a pipe(2fds) to convert SrsConsumver to io,
// refine performance, @see: https://github.com/ossrs/srs/issues/194
if (nb_total >= max_open_files) {
srs_error("invalid max_connections=%d, required=%d, system limit to %d, "
"total=%d(max_connections=%d, nb_consumed_fds=%d). "

@ -322,8 +322,12 @@ srs_error_t SrsAudioTranscoder::decode_and_resample(SrsAudioFrame *pkt)
dec_packet_->data = (uint8_t *)pkt->samples[0].bytes;
dec_packet_->size = pkt->samples[0].size;
char err_buf[AV_ERROR_MAX_STRING_SIZE] = {0};
// Ignore empty packet, see https://github.com/ossrs/srs/pull/2757#discussion_r759797651
if (!dec_packet_->data || !dec_packet_->size){
return err;
}
char err_buf[AV_ERROR_MAX_STRING_SIZE] = {0};
int error = avcodec_send_packet(dec_, dec_packet_);
if (error < 0) {
return srs_error_new(ERROR_RTC_RTP_MUXER, "submit to dec(%d,%s)", error,

@ -1278,7 +1278,7 @@ SrsRtmpFromRtcBridger::SrsRtmpFromRtcBridger(SrsLiveSource *src)
is_first_audio = true;
is_first_video = true;
format = NULL;
key_frame_ts_ = -1;
rtp_key_frame_ts_ = -1;
header_sn_ = 0;
memset(cache_video_pkts_, 0, sizeof(cache_video_pkts_));
}
@ -1504,24 +1504,24 @@ srs_error_t SrsRtmpFromRtcBridger::packet_video_key_frame(SrsRtpPacket* pkt)
}
}
if (-1 == key_frame_ts_) {
key_frame_ts_ = pkt->get_avsync_time();
if (-1 == rtp_key_frame_ts_) {
rtp_key_frame_ts_ = pkt->header.get_timestamp();
header_sn_ = pkt->header.get_sequence();
lost_sn_ = header_sn_ + 1;
// Received key frame and clean cache of old p frame pkts
clear_cached_video();
srs_trace("set ts=%lld, header=%hu, lost=%hu", key_frame_ts_, header_sn_, lost_sn_);
} else if (key_frame_ts_ != pkt->get_avsync_time()) {
srs_trace("set ts=%u, header=%hu, lost=%hu", (uint32_t)rtp_key_frame_ts_, header_sn_, lost_sn_);
} else if (rtp_key_frame_ts_ != pkt->header.get_timestamp()) {
//new key frame, clean cache
int64_t old_ts = key_frame_ts_;
int64_t old_ts = rtp_key_frame_ts_;
uint16_t old_header_sn = header_sn_;
uint16_t old_lost_sn = lost_sn_;
key_frame_ts_ = pkt->get_avsync_time();
rtp_key_frame_ts_ = pkt->header.get_timestamp();
header_sn_ = pkt->header.get_sequence();
lost_sn_ = header_sn_ + 1;
clear_cached_video();
srs_trace("drop old ts=%lld, header=%hu, lost=%hu, set new ts=%lld, header=%hu, lost=%hu",
old_ts, old_header_sn, old_lost_sn, key_frame_ts_, header_sn_, lost_sn_);
srs_warn("drop old ts=%u, header=%hu, lost=%hu, set new ts=%u, header=%hu, lost=%hu",
(uint32_t)old_ts, old_header_sn, old_lost_sn, (uint32_t)rtp_key_frame_ts_, header_sn_, lost_sn_);
}
uint16_t index = cache_index(pkt->header.get_sequence());
@ -1561,9 +1561,10 @@ srs_error_t SrsRtmpFromRtcBridger::packet_video_rtmp(const uint16_t start, const
srs_error_t err = srs_success;
int nb_payload = 0;
uint16_t cnt = end - start + 1;
int16_t cnt = srs_rtp_seq_distance(start, end) + 1;
srs_assert(cnt >= 1);
for (uint16_t i = 0; i < cnt; ++i) {
for (uint16_t i = 0; i < (uint16_t)cnt; ++i) {
uint16_t sn = start + i;
uint16_t index = cache_index(sn);
SrsRtpPacket* pkt = cache_video_pkts_[index].pkt;
@ -1615,7 +1616,7 @@ srs_error_t SrsRtmpFromRtcBridger::packet_video_rtmp(const uint16_t start, const
SrsBuffer payload(rtmp.payload, rtmp.size);
if (pkt->is_keyframe()) {
payload.write_1bytes(0x17); // type(4 bits): key frame; code(4bits): avc
key_frame_ts_ = -1;
rtp_key_frame_ts_ = -1;
} else {
payload.write_1bytes(0x27); // type(4 bits): inter frame; code(4bits): avc
}
@ -1625,7 +1626,7 @@ srs_error_t SrsRtmpFromRtcBridger::packet_video_rtmp(const uint16_t start, const
payload.write_1bytes(0x0);
int nalu_len = 0;
for (uint16_t i = 0; i < cnt; ++i) {
for (uint16_t i = 0; i < (uint16_t)cnt; ++i) {
uint16_t index = cache_index((start + i));
SrsRtpPacket* pkt = cache_video_pkts_[index].pkt;
@ -1664,10 +1665,10 @@ srs_error_t SrsRtmpFromRtcBridger::packet_video_rtmp(const uint16_t start, const
if (stap_payload) {
for (int j = 0; j < (int)stap_payload->nalus.size(); ++j) {
SrsSample* sample = stap_payload->nalus.at(j);
if (sample->size > 0) {
payload.write_4bytes(sample->size);
if (sample->size > 0) {
payload.write_4bytes(sample->size);
payload.write_bytes(sample->bytes, sample->size);
}
}
}
srs_freep(pkt);
continue;
@ -1726,7 +1727,7 @@ int32_t SrsRtmpFromRtcBridger::find_next_lost_sn(uint16_t current_sn, uint16_t&
}
}
srs_error("the cache is mess. the packet count of video frame is more than %u", s_cache_size);
srs_error("cache overflow. the packet count of video frame is more than %u", s_cache_size);
return -2;
}
@ -1746,10 +1747,12 @@ void SrsRtmpFromRtcBridger::clear_cached_video()
bool SrsRtmpFromRtcBridger::check_frame_complete(const uint16_t start, const uint16_t end)
{
uint16_t cnt = (end - start + 1);
int16_t cnt = srs_rtp_seq_distance(start, end) + 1;
srs_assert(cnt >= 1);
uint16_t fu_s_c = 0;
uint16_t fu_e_c = 0;
for (uint16_t i = 0; i < cnt; ++i) {
for (uint16_t i = 0; i < (uint16_t)cnt; ++i) {
int index = cache_index((start + i));
SrsRtpPacket* pkt = cache_video_pkts_[index].pkt;
@ -2264,6 +2267,8 @@ SrsRtcRecvTrack::SrsRtcRecvTrack(SrsRtcConnection* session, SrsRtcTrackDescripti
last_sender_report_rtp_time_ = 0;
last_sender_report_rtp_time1_ = 0;
rate_ = 0.0;
last_sender_report_sys_time_ = 0;
}
@ -2299,40 +2304,41 @@ void SrsRtcRecvTrack::update_send_report_time(const SrsNtp& ntp, uint32_t rtp_ti
// TODO: FIXME: Use system wall clock.
last_sender_report_sys_time_ = srs_update_system_time();
}
int64_t SrsRtcRecvTrack::cal_avsync_time(uint32_t rtp_time)
{
// Have no recv at least 2 sender reports, can't calculate sync time.
// TODO: FIXME: use the sample rate from sdp.
if (last_sender_report_rtp_time1_ <= 0) {
return -1;
}
// WebRTC using sender report to sync audio/video timestamp, because audio video have different timebase,
// typical audio opus is 48000Hz, video is 90000Hz.
// We using two sender report point to calculate avsync timestamp(clock time) with any given rtp timestamp.
// For example, there are two history sender report of audio as below.
// sender_report1: rtp_time1 = 10000, ntp_time1 = 40000
// sender_report : rtp_time = 10960, ntp_time = 40020
// (rtp_time - rtp_time1) / (ntp_time - ntp_time1) = 960 / 20 = 48,
// Now we can calcualte ntp time(ntp_x) of any given rtp timestamp(rtp_x),
// (rtp_x - rtp_time) / (ntp_x - ntp_time) = 48 => ntp_x = (rtp_x - rtp_time) / 48 + ntp_time;
double sys_time_elapsed = static_cast<double>(last_sender_report_ntp_.system_ms_) - static_cast<double>(last_sender_report_ntp1_.system_ms_);
if (last_sender_report_rtp_time1_ > 0) {
// WebRTC using sender report to sync audio/video timestamp, because audio video have different timebase,
// typical audio opus is 48000Hz, video is 90000Hz.
// We using two sender report point to calculate avsync timestamp(clock time) with any given rtp timestamp.
// For example, there are two history sender report of audio as below.
// sender_report1: rtp_time1 = 10000, ntp_time1 = 40000
// sender_report : rtp_time = 10960, ntp_time = 40020
// (rtp_time - rtp_time1) / (ntp_time - ntp_time1) = 960 / 20 = 48,
// Now we can calcualte ntp time(ntp_x) of any given rtp timestamp(rtp_x),
// (rtp_x - rtp_time) / (ntp_x - ntp_time) = 48 => ntp_x = (rtp_x - rtp_time) / 48 + ntp_time;
double sys_time_elapsed = static_cast<double>(last_sender_report_ntp_.system_ms_) - static_cast<double>(last_sender_report_ntp1_.system_ms_);
// Check sys_time_elapsed is equal to zero.
if (fpclassify(sys_time_elapsed) == FP_ZERO) {
return;
}
double rtp_time_elpased = static_cast<double>(last_sender_report_rtp_time_) - static_cast<double>(last_sender_report_rtp_time1_);
double rate = round(rtp_time_elpased / sys_time_elapsed);
// Check sys_time_elapsed is equal to zero.
if (fpclassify(sys_time_elapsed) == FP_ZERO) {
return -1;
// TODO: FIXME: use the sample rate from sdp.
if (rate > 0) {
rate_ = rate;
}
}
double rtp_time_elpased = static_cast<double>(last_sender_report_rtp_time_) - static_cast<double>(last_sender_report_rtp_time1_);
int rate = round(rtp_time_elpased / sys_time_elapsed);
}
if (rate <= 0) {
int64_t SrsRtcRecvTrack::cal_avsync_time(uint32_t rtp_time)
{
if (rate_ < 0.001) {
return -1;
}
double delta = round((rtp_time - last_sender_report_rtp_time_) / rate);
double delta = round((rtp_time - last_sender_report_rtp_time_) / rate_);
int64_t avsync_time = delta + last_sender_report_ntp_.system_ms_;

@ -307,7 +307,7 @@ private:
RtcPacketCache cache_video_pkts_[s_cache_size];
uint16_t header_sn_;
uint16_t lost_sn_;
int64_t key_frame_ts_;
int64_t rtp_key_frame_ts_;
public:
SrsRtmpFromRtcBridger(SrsLiveSource *src);
virtual ~SrsRtmpFromRtcBridger();
@ -527,6 +527,7 @@ protected:
SrsNtp last_sender_report_ntp1_;
int64_t last_sender_report_rtp_time1_;
double rate_;
uint64_t last_sender_report_sys_time_;
public:
SrsRtcRecvTrack(SrsRtcConnection* session, SrsRtcTrackDescription* stream_descs, bool is_audio);

@ -743,7 +743,6 @@ srs_error_t SrsRtmpConn::do_playing(SrsLiveSource* source, SrsLiveConsumer* cons
#ifdef SRS_PERF_QUEUE_COND_WAIT
// wait for message to incoming.
// @see https://github.com/ossrs/srs/issues/251
// @see https://github.com/ossrs/srs/issues/257
consumer->wait(mw_msgs, mw_sleep);
#endif

@ -77,7 +77,6 @@ public:
#ifdef SRS_PERF_QUEUE_FAST_VECTOR
// To alloc and increase fixed space, fast remove and insert for msgs sender.
// @see https://github.com/ossrs/srs/issues/251
class SrsFastVector
{
private:
@ -177,7 +176,6 @@ private:
bool should_update_source_id;
#ifdef SRS_PERF_QUEUE_COND_WAIT
// The cond wait for mw.
// @see https://github.com/ossrs/srs/issues/251
srs_cond_t mw_wait;
bool mw_waiting;
int mw_min_msgs;

@ -54,7 +54,6 @@
* @remark this largely improve performance, from 3.5k+ to 7.5k+.
* the latency+ when cache+.
* @remark the socket send buffer default to 185KB, it large enough.
* @see https://github.com/ossrs/srs/issues/194
* @see SrsConfig::get_mw_sleep_ms()
* @remark the mw sleep and msgs to send, maybe:
* mw_sleep msgs iovs
@ -89,24 +88,20 @@
/**
* whether set the socket send buffer size.
* @see https://github.com/ossrs/srs/issues/251
*/
#define SRS_PERF_MW_SO_SNDBUF
/**
* whether set the socket recv buffer size.
* @see https://github.com/ossrs/srs/issues/251
*/
#undef SRS_PERF_MW_SO_RCVBUF
/**
* whether enable the fast vector for qeueue.
* @see https://github.com/ossrs/srs/issues/251
*/
#define SRS_PERF_QUEUE_FAST_VECTOR
/**
* whether use cond wait to send messages.
* @remark this improve performance for large connectios.
* @see https://github.com/ossrs/srs/issues/251
*/
// TODO: FIXME: Should always enable it.
#define SRS_PERF_QUEUE_COND_WAIT

@ -9,6 +9,6 @@
#define VERSION_MAJOR 4
#define VERSION_MINOR 0
#define VERSION_REVISION 201
#define VERSION_REVISION 203
#endif

@ -79,7 +79,6 @@
#define SRS_CONSTS_RTMP_MAX_FMT3_HEADER_SIZE 5
// For performance issue,
// the iovs cache, @see https://github.com/ossrs/srs/issues/194
// iovs cache for multiple messages for each connections.
// suppose the chunk size is 64k, each message send in a chunk which needs only 2 iovec,
// so the iovs max should be (SRS_PERF_MW_MSGS * 2)
@ -87,7 +86,6 @@
// @remark, SRS will realloc when the iovs not enough.
#define SRS_CONSTS_IOVS_MAX (SRS_PERF_MW_MSGS * 2)
// For performance issue,
// the c0c3 cache, @see https://github.com/ossrs/srs/issues/194
// c0c3 cache for multiple messages for each connections.
// each c0 <= 16byes, suppose the chunk size is 64k,
// each message send in a chunk which needs only a c0 header,

@ -250,7 +250,6 @@ class SrsSharedPtrMessage
// 4.1. Message Header
public:
// The header can shared, only set the timestamp and stream id.
// @see https://github.com/ossrs/srs/issues/251
//SrsSharedMessageHeader header;
// Four-byte field that contains a timestamp of the message.
// The 4 bytes are packed in the big-endian order.
@ -278,7 +277,6 @@ private:
{
public:
// The shared message header.
// @see https://github.com/ossrs/srs/issues/251
SrsSharedMessageHeader header;
// The actual shared payload.
char* payload;

@ -1142,3 +1142,78 @@ VOID TEST(KernelRTCTest, SyncTimestampBySenderReportConsecutive)
}
}
}
VOID TEST(KernelRTCTest, SyncTimestampBySenderReportDuplicated)
{
SrsRtcConnection s(NULL, SrsContextId());
SrsRtcPublishStream publish(&s, SrsContextId());
SrsRtcTrackDescription video_ds;
video_ds.type_ = "video";
video_ds.id_ = "VMo22nfLDn122nfnDNL2";
video_ds.ssrc_ = 200;
SrsRtcVideoRecvTrack* video = new SrsRtcVideoRecvTrack(&s, &video_ds);
publish.video_tracks_.push_back(video);
publish.set_all_tracks_status(true);
SrsRtcSource* rtc_source = new SrsRtcSource();
SrsAutoFree(SrsRtcSource, rtc_source);
srand(time(NULL));
if (true)
{
SrsRtpPacket* video_rtp_pkt = new SrsRtpPacket();
SrsAutoFree(SrsRtpPacket, video_rtp_pkt);
uint32_t video_absolute_ts = srs_get_system_time();
uint32_t video_rtp_ts = random();
video_rtp_pkt->header.set_timestamp(video_rtp_ts);
video->on_rtp(rtc_source, video_rtp_pkt);
// No received any sender report, can not calculate absolute time, expect equal to -1.
EXPECT_EQ(video_rtp_pkt->get_avsync_time(), -1);
SrsNtp ntp = SrsNtp::from_time_ms(video_absolute_ts);
SrsRtcpSR* video_sr = new SrsRtcpSR();
SrsAutoFree(SrsRtcpSR, video_sr);
video_sr->set_ssrc(200);
video_sr->set_ntp(ntp.ntp_);
video_sr->set_rtp_ts(video_rtp_ts);
publish.on_rtcp_sr(video_sr);
// Video timebase 90000, fps=25
video_rtp_ts += 3600;
video_absolute_ts += 40;
video_rtp_pkt->header.set_timestamp(video_rtp_ts);
video->on_rtp(rtc_source, video_rtp_pkt);
// Received one sender report, can not calculate absolute time, expect equal to -1.
EXPECT_EQ(video_rtp_pkt->get_avsync_time(), -1);
ntp = SrsNtp::from_time_ms(video_absolute_ts);
video_sr->set_ntp(ntp.ntp_);
video_sr->set_rtp_ts(video_rtp_ts);
publish.on_rtcp_sr(video_sr);
for (int i = 0; i <= 1000; ++i) {
// Video timebase 90000, fps=25
video_rtp_ts += 3600;
video_absolute_ts += 40;
video_rtp_pkt->header.set_timestamp(video_rtp_ts);
video->on_rtp(rtc_source, video_rtp_pkt);
EXPECT_NEAR(video_rtp_pkt->get_avsync_time(), video_absolute_ts, 1);
// Duplicate 3 sender report packets.
if (i % 3 == 0) {
ntp = SrsNtp::from_time_ms(video_absolute_ts);
video_sr->set_ntp(ntp.ntp_);
video_sr->set_rtp_ts(video_rtp_ts);
}
publish.on_rtcp_sr(video_sr);
}
}
}

Loading…
Cancel
Save